##// END OF EJS Templates
separate import lines from mercurial and general python modules
Simon Heimberg -
r8312:b87a50b7 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,225 +1,226 b''
1 # archival.py - revision archival for mercurial
1 # archival.py - revision archival for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from i18n import _
8 from i18n import _
9 from node import hex
9 from node import hex
10 import cStringIO, os, stat, tarfile, time, util, zipfile
10 import util
11 import cStringIO, os, stat, tarfile, time, zipfile
11 import zlib, gzip
12 import zlib, gzip
12
13
13 def tidyprefix(dest, prefix, suffixes):
14 def tidyprefix(dest, prefix, suffixes):
14 '''choose prefix to use for names in archive. make sure prefix is
15 '''choose prefix to use for names in archive. make sure prefix is
15 safe for consumers.'''
16 safe for consumers.'''
16
17
17 if prefix:
18 if prefix:
18 prefix = util.normpath(prefix)
19 prefix = util.normpath(prefix)
19 else:
20 else:
20 if not isinstance(dest, str):
21 if not isinstance(dest, str):
21 raise ValueError('dest must be string if no prefix')
22 raise ValueError('dest must be string if no prefix')
22 prefix = os.path.basename(dest)
23 prefix = os.path.basename(dest)
23 lower = prefix.lower()
24 lower = prefix.lower()
24 for sfx in suffixes:
25 for sfx in suffixes:
25 if lower.endswith(sfx):
26 if lower.endswith(sfx):
26 prefix = prefix[:-len(sfx)]
27 prefix = prefix[:-len(sfx)]
27 break
28 break
28 lpfx = os.path.normpath(util.localpath(prefix))
29 lpfx = os.path.normpath(util.localpath(prefix))
29 prefix = util.pconvert(lpfx)
30 prefix = util.pconvert(lpfx)
30 if not prefix.endswith('/'):
31 if not prefix.endswith('/'):
31 prefix += '/'
32 prefix += '/'
32 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
33 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
33 raise util.Abort(_('archive prefix contains illegal components'))
34 raise util.Abort(_('archive prefix contains illegal components'))
34 return prefix
35 return prefix
35
36
36 class tarit:
37 class tarit:
37 '''write archive to tar file or stream. can write uncompressed,
38 '''write archive to tar file or stream. can write uncompressed,
38 or compress with gzip or bzip2.'''
39 or compress with gzip or bzip2.'''
39
40
40 class GzipFileWithTime(gzip.GzipFile):
41 class GzipFileWithTime(gzip.GzipFile):
41
42
42 def __init__(self, *args, **kw):
43 def __init__(self, *args, **kw):
43 timestamp = None
44 timestamp = None
44 if 'timestamp' in kw:
45 if 'timestamp' in kw:
45 timestamp = kw.pop('timestamp')
46 timestamp = kw.pop('timestamp')
46 if timestamp == None:
47 if timestamp == None:
47 self.timestamp = time.time()
48 self.timestamp = time.time()
48 else:
49 else:
49 self.timestamp = timestamp
50 self.timestamp = timestamp
50 gzip.GzipFile.__init__(self, *args, **kw)
51 gzip.GzipFile.__init__(self, *args, **kw)
51
52
52 def _write_gzip_header(self):
53 def _write_gzip_header(self):
53 self.fileobj.write('\037\213') # magic header
54 self.fileobj.write('\037\213') # magic header
54 self.fileobj.write('\010') # compression method
55 self.fileobj.write('\010') # compression method
55 # Python 2.6 deprecates self.filename
56 # Python 2.6 deprecates self.filename
56 fname = getattr(self, 'name', None) or self.filename
57 fname = getattr(self, 'name', None) or self.filename
57 flags = 0
58 flags = 0
58 if fname:
59 if fname:
59 flags = gzip.FNAME
60 flags = gzip.FNAME
60 self.fileobj.write(chr(flags))
61 self.fileobj.write(chr(flags))
61 gzip.write32u(self.fileobj, long(self.timestamp))
62 gzip.write32u(self.fileobj, long(self.timestamp))
62 self.fileobj.write('\002')
63 self.fileobj.write('\002')
63 self.fileobj.write('\377')
64 self.fileobj.write('\377')
64 if fname:
65 if fname:
65 self.fileobj.write(fname + '\000')
66 self.fileobj.write(fname + '\000')
66
67
67 def __init__(self, dest, prefix, mtime, kind=''):
68 def __init__(self, dest, prefix, mtime, kind=''):
68 self.prefix = tidyprefix(dest, prefix, ['.tar', '.tar.bz2', '.tar.gz',
69 self.prefix = tidyprefix(dest, prefix, ['.tar', '.tar.bz2', '.tar.gz',
69 '.tgz', '.tbz2'])
70 '.tgz', '.tbz2'])
70 self.mtime = mtime
71 self.mtime = mtime
71
72
72 def taropen(name, mode, fileobj=None):
73 def taropen(name, mode, fileobj=None):
73 if kind == 'gz':
74 if kind == 'gz':
74 mode = mode[0]
75 mode = mode[0]
75 if not fileobj:
76 if not fileobj:
76 fileobj = open(name, mode + 'b')
77 fileobj = open(name, mode + 'b')
77 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
78 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
78 zlib.Z_BEST_COMPRESSION,
79 zlib.Z_BEST_COMPRESSION,
79 fileobj, timestamp=mtime)
80 fileobj, timestamp=mtime)
80 return tarfile.TarFile.taropen(name, mode, gzfileobj)
81 return tarfile.TarFile.taropen(name, mode, gzfileobj)
81 else:
82 else:
82 return tarfile.open(name, mode + kind, fileobj)
83 return tarfile.open(name, mode + kind, fileobj)
83
84
84 if isinstance(dest, str):
85 if isinstance(dest, str):
85 self.z = taropen(dest, mode='w:')
86 self.z = taropen(dest, mode='w:')
86 else:
87 else:
87 # Python 2.5-2.5.1 have a regression that requires a name arg
88 # Python 2.5-2.5.1 have a regression that requires a name arg
88 self.z = taropen(name='', mode='w|', fileobj=dest)
89 self.z = taropen(name='', mode='w|', fileobj=dest)
89
90
90 def addfile(self, name, mode, islink, data):
91 def addfile(self, name, mode, islink, data):
91 i = tarfile.TarInfo(self.prefix + name)
92 i = tarfile.TarInfo(self.prefix + name)
92 i.mtime = self.mtime
93 i.mtime = self.mtime
93 i.size = len(data)
94 i.size = len(data)
94 if islink:
95 if islink:
95 i.type = tarfile.SYMTYPE
96 i.type = tarfile.SYMTYPE
96 i.mode = 0777
97 i.mode = 0777
97 i.linkname = data
98 i.linkname = data
98 data = None
99 data = None
99 i.size = 0
100 i.size = 0
100 else:
101 else:
101 i.mode = mode
102 i.mode = mode
102 data = cStringIO.StringIO(data)
103 data = cStringIO.StringIO(data)
103 self.z.addfile(i, data)
104 self.z.addfile(i, data)
104
105
105 def done(self):
106 def done(self):
106 self.z.close()
107 self.z.close()
107
108
108 class tellable:
109 class tellable:
109 '''provide tell method for zipfile.ZipFile when writing to http
110 '''provide tell method for zipfile.ZipFile when writing to http
110 response file object.'''
111 response file object.'''
111
112
112 def __init__(self, fp):
113 def __init__(self, fp):
113 self.fp = fp
114 self.fp = fp
114 self.offset = 0
115 self.offset = 0
115
116
116 def __getattr__(self, key):
117 def __getattr__(self, key):
117 return getattr(self.fp, key)
118 return getattr(self.fp, key)
118
119
119 def write(self, s):
120 def write(self, s):
120 self.fp.write(s)
121 self.fp.write(s)
121 self.offset += len(s)
122 self.offset += len(s)
122
123
123 def tell(self):
124 def tell(self):
124 return self.offset
125 return self.offset
125
126
126 class zipit:
127 class zipit:
127 '''write archive to zip file or stream. can write uncompressed,
128 '''write archive to zip file or stream. can write uncompressed,
128 or compressed with deflate.'''
129 or compressed with deflate.'''
129
130
130 def __init__(self, dest, prefix, mtime, compress=True):
131 def __init__(self, dest, prefix, mtime, compress=True):
131 self.prefix = tidyprefix(dest, prefix, ('.zip',))
132 self.prefix = tidyprefix(dest, prefix, ('.zip',))
132 if not isinstance(dest, str):
133 if not isinstance(dest, str):
133 try:
134 try:
134 dest.tell()
135 dest.tell()
135 except (AttributeError, IOError):
136 except (AttributeError, IOError):
136 dest = tellable(dest)
137 dest = tellable(dest)
137 self.z = zipfile.ZipFile(dest, 'w',
138 self.z = zipfile.ZipFile(dest, 'w',
138 compress and zipfile.ZIP_DEFLATED or
139 compress and zipfile.ZIP_DEFLATED or
139 zipfile.ZIP_STORED)
140 zipfile.ZIP_STORED)
140 self.date_time = time.gmtime(mtime)[:6]
141 self.date_time = time.gmtime(mtime)[:6]
141
142
142 def addfile(self, name, mode, islink, data):
143 def addfile(self, name, mode, islink, data):
143 i = zipfile.ZipInfo(self.prefix + name, self.date_time)
144 i = zipfile.ZipInfo(self.prefix + name, self.date_time)
144 i.compress_type = self.z.compression
145 i.compress_type = self.z.compression
145 # unzip will not honor unix file modes unless file creator is
146 # unzip will not honor unix file modes unless file creator is
146 # set to unix (id 3).
147 # set to unix (id 3).
147 i.create_system = 3
148 i.create_system = 3
148 ftype = stat.S_IFREG
149 ftype = stat.S_IFREG
149 if islink:
150 if islink:
150 mode = 0777
151 mode = 0777
151 ftype = stat.S_IFLNK
152 ftype = stat.S_IFLNK
152 i.external_attr = (mode | ftype) << 16L
153 i.external_attr = (mode | ftype) << 16L
153 self.z.writestr(i, data)
154 self.z.writestr(i, data)
154
155
155 def done(self):
156 def done(self):
156 self.z.close()
157 self.z.close()
157
158
158 class fileit:
159 class fileit:
159 '''write archive as files in directory.'''
160 '''write archive as files in directory.'''
160
161
161 def __init__(self, name, prefix, mtime):
162 def __init__(self, name, prefix, mtime):
162 if prefix:
163 if prefix:
163 raise util.Abort(_('cannot give prefix when archiving to files'))
164 raise util.Abort(_('cannot give prefix when archiving to files'))
164 self.basedir = name
165 self.basedir = name
165 self.opener = util.opener(self.basedir)
166 self.opener = util.opener(self.basedir)
166
167
167 def addfile(self, name, mode, islink, data):
168 def addfile(self, name, mode, islink, data):
168 if islink:
169 if islink:
169 self.opener.symlink(data, name)
170 self.opener.symlink(data, name)
170 return
171 return
171 f = self.opener(name, "w", atomictemp=True)
172 f = self.opener(name, "w", atomictemp=True)
172 f.write(data)
173 f.write(data)
173 f.rename()
174 f.rename()
174 destfile = os.path.join(self.basedir, name)
175 destfile = os.path.join(self.basedir, name)
175 os.chmod(destfile, mode)
176 os.chmod(destfile, mode)
176
177
177 def done(self):
178 def done(self):
178 pass
179 pass
179
180
180 archivers = {
181 archivers = {
181 'files': fileit,
182 'files': fileit,
182 'tar': tarit,
183 'tar': tarit,
183 'tbz2': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'bz2'),
184 'tbz2': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'bz2'),
184 'tgz': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'gz'),
185 'tgz': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'gz'),
185 'uzip': lambda name, prefix, mtime: zipit(name, prefix, mtime, False),
186 'uzip': lambda name, prefix, mtime: zipit(name, prefix, mtime, False),
186 'zip': zipit,
187 'zip': zipit,
187 }
188 }
188
189
189 def archive(repo, dest, node, kind, decode=True, matchfn=None,
190 def archive(repo, dest, node, kind, decode=True, matchfn=None,
190 prefix=None, mtime=None):
191 prefix=None, mtime=None):
191 '''create archive of repo as it was at node.
192 '''create archive of repo as it was at node.
192
193
193 dest can be name of directory, name of archive file, or file
194 dest can be name of directory, name of archive file, or file
194 object to write archive to.
195 object to write archive to.
195
196
196 kind is type of archive to create.
197 kind is type of archive to create.
197
198
198 decode tells whether to put files through decode filters from
199 decode tells whether to put files through decode filters from
199 hgrc.
200 hgrc.
200
201
201 matchfn is function to filter names of files to write to archive.
202 matchfn is function to filter names of files to write to archive.
202
203
203 prefix is name of path to put before every archive member.'''
204 prefix is name of path to put before every archive member.'''
204
205
205 def write(name, mode, islink, getdata):
206 def write(name, mode, islink, getdata):
206 if matchfn and not matchfn(name): return
207 if matchfn and not matchfn(name): return
207 data = getdata()
208 data = getdata()
208 if decode:
209 if decode:
209 data = repo.wwritedata(name, data)
210 data = repo.wwritedata(name, data)
210 archiver.addfile(name, mode, islink, data)
211 archiver.addfile(name, mode, islink, data)
211
212
212 if kind not in archivers:
213 if kind not in archivers:
213 raise util.Abort(_("unknown archive type '%s'") % kind)
214 raise util.Abort(_("unknown archive type '%s'") % kind)
214
215
215 ctx = repo[node]
216 ctx = repo[node]
216 archiver = archivers[kind](dest, prefix, mtime or ctx.date()[0])
217 archiver = archivers[kind](dest, prefix, mtime or ctx.date()[0])
217
218
218 if repo.ui.configbool("ui", "archivemeta", True):
219 if repo.ui.configbool("ui", "archivemeta", True):
219 write('.hg_archival.txt', 0644, False,
220 write('.hg_archival.txt', 0644, False,
220 lambda: 'repo: %s\nnode: %s\n' % (
221 lambda: 'repo: %s\nnode: %s\n' % (
221 hex(repo.changelog.node(0)), hex(node)))
222 hex(repo.changelog.node(0)), hex(node)))
222 for f in ctx:
223 for f in ctx:
223 ff = ctx.flags(f)
224 ff = ctx.flags(f)
224 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, ctx[f].data)
225 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, ctx[f].data)
225 archiver.done()
226 archiver.done()
@@ -1,302 +1,303 b''
1 # bundlerepo.py - repository class for viewing uncompressed bundles
1 # bundlerepo.py - repository class for viewing uncompressed bundles
2 #
2 #
3 # Copyright 2006, 2007 Benoit Boissinot <bboissin@gmail.com>
3 # Copyright 2006, 2007 Benoit Boissinot <bboissin@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 """Repository class for viewing uncompressed bundles.
8 """Repository class for viewing uncompressed bundles.
9
9
10 This provides a read-only repository interface to bundles as if they
10 This provides a read-only repository interface to bundles as if they
11 were part of the actual repository.
11 were part of the actual repository.
12 """
12 """
13
13
14 from node import nullid
14 from node import nullid
15 from i18n import _
15 from i18n import _
16 import changegroup, util, os, struct, bz2, zlib, tempfile, shutil, mdiff
16 import os, struct, bz2, zlib, tempfile, shutil
17 import changegroup, util, mdiff
17 import localrepo, changelog, manifest, filelog, revlog, error
18 import localrepo, changelog, manifest, filelog, revlog, error
18
19
19 class bundlerevlog(revlog.revlog):
20 class bundlerevlog(revlog.revlog):
20 def __init__(self, opener, indexfile, bundlefile,
21 def __init__(self, opener, indexfile, bundlefile,
21 linkmapper=None):
22 linkmapper=None):
22 # How it works:
23 # How it works:
23 # to retrieve a revision, we need to know the offset of
24 # to retrieve a revision, we need to know the offset of
24 # the revision in the bundlefile (an opened file).
25 # the revision in the bundlefile (an opened file).
25 #
26 #
26 # We store this offset in the index (start), to differentiate a
27 # We store this offset in the index (start), to differentiate a
27 # rev in the bundle and from a rev in the revlog, we check
28 # rev in the bundle and from a rev in the revlog, we check
28 # len(index[r]). If the tuple is bigger than 7, it is a bundle
29 # len(index[r]). If the tuple is bigger than 7, it is a bundle
29 # (it is bigger since we store the node to which the delta is)
30 # (it is bigger since we store the node to which the delta is)
30 #
31 #
31 revlog.revlog.__init__(self, opener, indexfile)
32 revlog.revlog.__init__(self, opener, indexfile)
32 self.bundlefile = bundlefile
33 self.bundlefile = bundlefile
33 self.basemap = {}
34 self.basemap = {}
34 def chunkpositer():
35 def chunkpositer():
35 for chunk in changegroup.chunkiter(bundlefile):
36 for chunk in changegroup.chunkiter(bundlefile):
36 pos = bundlefile.tell()
37 pos = bundlefile.tell()
37 yield chunk, pos - len(chunk)
38 yield chunk, pos - len(chunk)
38 n = len(self)
39 n = len(self)
39 prev = None
40 prev = None
40 for chunk, start in chunkpositer():
41 for chunk, start in chunkpositer():
41 size = len(chunk)
42 size = len(chunk)
42 if size < 80:
43 if size < 80:
43 raise util.Abort(_("invalid changegroup"))
44 raise util.Abort(_("invalid changegroup"))
44 start += 80
45 start += 80
45 size -= 80
46 size -= 80
46 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
47 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
47 if node in self.nodemap:
48 if node in self.nodemap:
48 prev = node
49 prev = node
49 continue
50 continue
50 for p in (p1, p2):
51 for p in (p1, p2):
51 if not p in self.nodemap:
52 if not p in self.nodemap:
52 raise error.LookupError(p1, self.indexfile,
53 raise error.LookupError(p1, self.indexfile,
53 _("unknown parent"))
54 _("unknown parent"))
54 if linkmapper is None:
55 if linkmapper is None:
55 link = n
56 link = n
56 else:
57 else:
57 link = linkmapper(cs)
58 link = linkmapper(cs)
58
59
59 if not prev:
60 if not prev:
60 prev = p1
61 prev = p1
61 # start, size, full unc. size, base (unused), link, p1, p2, node
62 # start, size, full unc. size, base (unused), link, p1, p2, node
62 e = (revlog.offset_type(start, 0), size, -1, -1, link,
63 e = (revlog.offset_type(start, 0), size, -1, -1, link,
63 self.rev(p1), self.rev(p2), node)
64 self.rev(p1), self.rev(p2), node)
64 self.basemap[n] = prev
65 self.basemap[n] = prev
65 self.index.insert(-1, e)
66 self.index.insert(-1, e)
66 self.nodemap[node] = n
67 self.nodemap[node] = n
67 prev = node
68 prev = node
68 n += 1
69 n += 1
69
70
70 def bundle(self, rev):
71 def bundle(self, rev):
71 """is rev from the bundle"""
72 """is rev from the bundle"""
72 if rev < 0:
73 if rev < 0:
73 return False
74 return False
74 return rev in self.basemap
75 return rev in self.basemap
75 def bundlebase(self, rev): return self.basemap[rev]
76 def bundlebase(self, rev): return self.basemap[rev]
76 def chunk(self, rev, df=None, cachelen=4096):
77 def chunk(self, rev, df=None, cachelen=4096):
77 # Warning: in case of bundle, the diff is against bundlebase,
78 # Warning: in case of bundle, the diff is against bundlebase,
78 # not against rev - 1
79 # not against rev - 1
79 # XXX: could use some caching
80 # XXX: could use some caching
80 if not self.bundle(rev):
81 if not self.bundle(rev):
81 return revlog.revlog.chunk(self, rev, df)
82 return revlog.revlog.chunk(self, rev, df)
82 self.bundlefile.seek(self.start(rev))
83 self.bundlefile.seek(self.start(rev))
83 return self.bundlefile.read(self.length(rev))
84 return self.bundlefile.read(self.length(rev))
84
85
85 def revdiff(self, rev1, rev2):
86 def revdiff(self, rev1, rev2):
86 """return or calculate a delta between two revisions"""
87 """return or calculate a delta between two revisions"""
87 if self.bundle(rev1) and self.bundle(rev2):
88 if self.bundle(rev1) and self.bundle(rev2):
88 # hot path for bundle
89 # hot path for bundle
89 revb = self.rev(self.bundlebase(rev2))
90 revb = self.rev(self.bundlebase(rev2))
90 if revb == rev1:
91 if revb == rev1:
91 return self.chunk(rev2)
92 return self.chunk(rev2)
92 elif not self.bundle(rev1) and not self.bundle(rev2):
93 elif not self.bundle(rev1) and not self.bundle(rev2):
93 return revlog.revlog.revdiff(self, rev1, rev2)
94 return revlog.revlog.revdiff(self, rev1, rev2)
94
95
95 return mdiff.textdiff(self.revision(self.node(rev1)),
96 return mdiff.textdiff(self.revision(self.node(rev1)),
96 self.revision(self.node(rev2)))
97 self.revision(self.node(rev2)))
97
98
98 def revision(self, node):
99 def revision(self, node):
99 """return an uncompressed revision of a given"""
100 """return an uncompressed revision of a given"""
100 if node == nullid: return ""
101 if node == nullid: return ""
101
102
102 text = None
103 text = None
103 chain = []
104 chain = []
104 iter_node = node
105 iter_node = node
105 rev = self.rev(iter_node)
106 rev = self.rev(iter_node)
106 # reconstruct the revision if it is from a changegroup
107 # reconstruct the revision if it is from a changegroup
107 while self.bundle(rev):
108 while self.bundle(rev):
108 if self._cache and self._cache[0] == iter_node:
109 if self._cache and self._cache[0] == iter_node:
109 text = self._cache[2]
110 text = self._cache[2]
110 break
111 break
111 chain.append(rev)
112 chain.append(rev)
112 iter_node = self.bundlebase(rev)
113 iter_node = self.bundlebase(rev)
113 rev = self.rev(iter_node)
114 rev = self.rev(iter_node)
114 if text is None:
115 if text is None:
115 text = revlog.revlog.revision(self, iter_node)
116 text = revlog.revlog.revision(self, iter_node)
116
117
117 while chain:
118 while chain:
118 delta = self.chunk(chain.pop())
119 delta = self.chunk(chain.pop())
119 text = mdiff.patches(text, [delta])
120 text = mdiff.patches(text, [delta])
120
121
121 p1, p2 = self.parents(node)
122 p1, p2 = self.parents(node)
122 if node != revlog.hash(text, p1, p2):
123 if node != revlog.hash(text, p1, p2):
123 raise error.RevlogError(_("integrity check failed on %s:%d")
124 raise error.RevlogError(_("integrity check failed on %s:%d")
124 % (self.datafile, self.rev(node)))
125 % (self.datafile, self.rev(node)))
125
126
126 self._cache = (node, self.rev(node), text)
127 self._cache = (node, self.rev(node), text)
127 return text
128 return text
128
129
129 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
130 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
130 raise NotImplementedError
131 raise NotImplementedError
131 def addgroup(self, revs, linkmapper, transaction):
132 def addgroup(self, revs, linkmapper, transaction):
132 raise NotImplementedError
133 raise NotImplementedError
133 def strip(self, rev, minlink):
134 def strip(self, rev, minlink):
134 raise NotImplementedError
135 raise NotImplementedError
135 def checksize(self):
136 def checksize(self):
136 raise NotImplementedError
137 raise NotImplementedError
137
138
138 class bundlechangelog(bundlerevlog, changelog.changelog):
139 class bundlechangelog(bundlerevlog, changelog.changelog):
139 def __init__(self, opener, bundlefile):
140 def __init__(self, opener, bundlefile):
140 changelog.changelog.__init__(self, opener)
141 changelog.changelog.__init__(self, opener)
141 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile)
142 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile)
142
143
143 class bundlemanifest(bundlerevlog, manifest.manifest):
144 class bundlemanifest(bundlerevlog, manifest.manifest):
144 def __init__(self, opener, bundlefile, linkmapper):
145 def __init__(self, opener, bundlefile, linkmapper):
145 manifest.manifest.__init__(self, opener)
146 manifest.manifest.__init__(self, opener)
146 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile,
147 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile,
147 linkmapper)
148 linkmapper)
148
149
149 class bundlefilelog(bundlerevlog, filelog.filelog):
150 class bundlefilelog(bundlerevlog, filelog.filelog):
150 def __init__(self, opener, path, bundlefile, linkmapper):
151 def __init__(self, opener, path, bundlefile, linkmapper):
151 filelog.filelog.__init__(self, opener, path)
152 filelog.filelog.__init__(self, opener, path)
152 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile,
153 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile,
153 linkmapper)
154 linkmapper)
154
155
155 class bundlerepository(localrepo.localrepository):
156 class bundlerepository(localrepo.localrepository):
156 def __init__(self, ui, path, bundlename):
157 def __init__(self, ui, path, bundlename):
157 self._tempparent = None
158 self._tempparent = None
158 try:
159 try:
159 localrepo.localrepository.__init__(self, ui, path)
160 localrepo.localrepository.__init__(self, ui, path)
160 except error.RepoError:
161 except error.RepoError:
161 self._tempparent = tempfile.mkdtemp()
162 self._tempparent = tempfile.mkdtemp()
162 localrepo.instance(ui,self._tempparent,1)
163 localrepo.instance(ui,self._tempparent,1)
163 localrepo.localrepository.__init__(self, ui, self._tempparent)
164 localrepo.localrepository.__init__(self, ui, self._tempparent)
164
165
165 if path:
166 if path:
166 self._url = 'bundle:' + path + '+' + bundlename
167 self._url = 'bundle:' + path + '+' + bundlename
167 else:
168 else:
168 self._url = 'bundle:' + bundlename
169 self._url = 'bundle:' + bundlename
169
170
170 self.tempfile = None
171 self.tempfile = None
171 self.bundlefile = open(bundlename, "rb")
172 self.bundlefile = open(bundlename, "rb")
172 header = self.bundlefile.read(6)
173 header = self.bundlefile.read(6)
173 if not header.startswith("HG"):
174 if not header.startswith("HG"):
174 raise util.Abort(_("%s: not a Mercurial bundle file") % bundlename)
175 raise util.Abort(_("%s: not a Mercurial bundle file") % bundlename)
175 elif not header.startswith("HG10"):
176 elif not header.startswith("HG10"):
176 raise util.Abort(_("%s: unknown bundle version") % bundlename)
177 raise util.Abort(_("%s: unknown bundle version") % bundlename)
177 elif (header == "HG10BZ") or (header == "HG10GZ"):
178 elif (header == "HG10BZ") or (header == "HG10GZ"):
178 fdtemp, temp = tempfile.mkstemp(prefix="hg-bundle-",
179 fdtemp, temp = tempfile.mkstemp(prefix="hg-bundle-",
179 suffix=".hg10un", dir=self.path)
180 suffix=".hg10un", dir=self.path)
180 self.tempfile = temp
181 self.tempfile = temp
181 fptemp = os.fdopen(fdtemp, 'wb')
182 fptemp = os.fdopen(fdtemp, 'wb')
182 def generator(f):
183 def generator(f):
183 if header == "HG10BZ":
184 if header == "HG10BZ":
184 zd = bz2.BZ2Decompressor()
185 zd = bz2.BZ2Decompressor()
185 zd.decompress("BZ")
186 zd.decompress("BZ")
186 elif header == "HG10GZ":
187 elif header == "HG10GZ":
187 zd = zlib.decompressobj()
188 zd = zlib.decompressobj()
188 for chunk in f:
189 for chunk in f:
189 yield zd.decompress(chunk)
190 yield zd.decompress(chunk)
190 gen = generator(util.filechunkiter(self.bundlefile, 4096))
191 gen = generator(util.filechunkiter(self.bundlefile, 4096))
191
192
192 try:
193 try:
193 fptemp.write("HG10UN")
194 fptemp.write("HG10UN")
194 for chunk in gen:
195 for chunk in gen:
195 fptemp.write(chunk)
196 fptemp.write(chunk)
196 finally:
197 finally:
197 fptemp.close()
198 fptemp.close()
198 self.bundlefile.close()
199 self.bundlefile.close()
199
200
200 self.bundlefile = open(self.tempfile, "rb")
201 self.bundlefile = open(self.tempfile, "rb")
201 # seek right after the header
202 # seek right after the header
202 self.bundlefile.seek(6)
203 self.bundlefile.seek(6)
203 elif header == "HG10UN":
204 elif header == "HG10UN":
204 # nothing to do
205 # nothing to do
205 pass
206 pass
206 else:
207 else:
207 raise util.Abort(_("%s: unknown bundle compression type")
208 raise util.Abort(_("%s: unknown bundle compression type")
208 % bundlename)
209 % bundlename)
209 # dict with the mapping 'filename' -> position in the bundle
210 # dict with the mapping 'filename' -> position in the bundle
210 self.bundlefilespos = {}
211 self.bundlefilespos = {}
211
212
212 @util.propertycache
213 @util.propertycache
213 def changelog(self):
214 def changelog(self):
214 c = bundlechangelog(self.sopener, self.bundlefile)
215 c = bundlechangelog(self.sopener, self.bundlefile)
215 self.manstart = self.bundlefile.tell()
216 self.manstart = self.bundlefile.tell()
216 return c
217 return c
217
218
218 @util.propertycache
219 @util.propertycache
219 def manifest(self):
220 def manifest(self):
220 self.bundlefile.seek(self.manstart)
221 self.bundlefile.seek(self.manstart)
221 m = bundlemanifest(self.sopener, self.bundlefile, self.changelog.rev)
222 m = bundlemanifest(self.sopener, self.bundlefile, self.changelog.rev)
222 self.filestart = self.bundlefile.tell()
223 self.filestart = self.bundlefile.tell()
223 return m
224 return m
224
225
225 @util.propertycache
226 @util.propertycache
226 def manstart(self):
227 def manstart(self):
227 self.changelog
228 self.changelog
228 return self.manstart
229 return self.manstart
229
230
230 @util.propertycache
231 @util.propertycache
231 def filestart(self):
232 def filestart(self):
232 self.manifest
233 self.manifest
233 return self.filestart
234 return self.filestart
234
235
235 def url(self):
236 def url(self):
236 return self._url
237 return self._url
237
238
238 def file(self, f):
239 def file(self, f):
239 if not self.bundlefilespos:
240 if not self.bundlefilespos:
240 self.bundlefile.seek(self.filestart)
241 self.bundlefile.seek(self.filestart)
241 while 1:
242 while 1:
242 chunk = changegroup.getchunk(self.bundlefile)
243 chunk = changegroup.getchunk(self.bundlefile)
243 if not chunk:
244 if not chunk:
244 break
245 break
245 self.bundlefilespos[chunk] = self.bundlefile.tell()
246 self.bundlefilespos[chunk] = self.bundlefile.tell()
246 for c in changegroup.chunkiter(self.bundlefile):
247 for c in changegroup.chunkiter(self.bundlefile):
247 pass
248 pass
248
249
249 if f[0] == '/':
250 if f[0] == '/':
250 f = f[1:]
251 f = f[1:]
251 if f in self.bundlefilespos:
252 if f in self.bundlefilespos:
252 self.bundlefile.seek(self.bundlefilespos[f])
253 self.bundlefile.seek(self.bundlefilespos[f])
253 return bundlefilelog(self.sopener, f, self.bundlefile,
254 return bundlefilelog(self.sopener, f, self.bundlefile,
254 self.changelog.rev)
255 self.changelog.rev)
255 else:
256 else:
256 return filelog.filelog(self.sopener, f)
257 return filelog.filelog(self.sopener, f)
257
258
258 def close(self):
259 def close(self):
259 """Close assigned bundle file immediately."""
260 """Close assigned bundle file immediately."""
260 self.bundlefile.close()
261 self.bundlefile.close()
261
262
262 def __del__(self):
263 def __del__(self):
263 bundlefile = getattr(self, 'bundlefile', None)
264 bundlefile = getattr(self, 'bundlefile', None)
264 if bundlefile and not bundlefile.closed:
265 if bundlefile and not bundlefile.closed:
265 bundlefile.close()
266 bundlefile.close()
266 tempfile = getattr(self, 'tempfile', None)
267 tempfile = getattr(self, 'tempfile', None)
267 if tempfile is not None:
268 if tempfile is not None:
268 os.unlink(tempfile)
269 os.unlink(tempfile)
269 if self._tempparent:
270 if self._tempparent:
270 shutil.rmtree(self._tempparent, True)
271 shutil.rmtree(self._tempparent, True)
271
272
272 def cancopy(self):
273 def cancopy(self):
273 return False
274 return False
274
275
275 def getcwd(self):
276 def getcwd(self):
276 return os.getcwd() # always outside the repo
277 return os.getcwd() # always outside the repo
277
278
278 def instance(ui, path, create):
279 def instance(ui, path, create):
279 if create:
280 if create:
280 raise util.Abort(_('cannot create new bundle repository'))
281 raise util.Abort(_('cannot create new bundle repository'))
281 parentpath = ui.config("bundle", "mainreporoot", "")
282 parentpath = ui.config("bundle", "mainreporoot", "")
282 if parentpath:
283 if parentpath:
283 # Try to make the full path relative so we get a nice, short URL.
284 # Try to make the full path relative so we get a nice, short URL.
284 # In particular, we don't want temp dir names in test outputs.
285 # In particular, we don't want temp dir names in test outputs.
285 cwd = os.getcwd()
286 cwd = os.getcwd()
286 if parentpath == cwd:
287 if parentpath == cwd:
287 parentpath = ''
288 parentpath = ''
288 else:
289 else:
289 cwd = os.path.join(cwd,'')
290 cwd = os.path.join(cwd,'')
290 if parentpath.startswith(cwd):
291 if parentpath.startswith(cwd):
291 parentpath = parentpath[len(cwd):]
292 parentpath = parentpath[len(cwd):]
292 path = util.drop_scheme('file', path)
293 path = util.drop_scheme('file', path)
293 if path.startswith('bundle:'):
294 if path.startswith('bundle:'):
294 path = util.drop_scheme('bundle', path)
295 path = util.drop_scheme('bundle', path)
295 s = path.split("+", 1)
296 s = path.split("+", 1)
296 if len(s) == 1:
297 if len(s) == 1:
297 repopath, bundlename = parentpath, s[0]
298 repopath, bundlename = parentpath, s[0]
298 else:
299 else:
299 repopath, bundlename = s
300 repopath, bundlename = s
300 else:
301 else:
301 repopath, bundlename = parentpath, path
302 repopath, bundlename = parentpath, path
302 return bundlerepository(ui, repopath, bundlename)
303 return bundlerepository(ui, repopath, bundlename)
@@ -1,139 +1,140 b''
1 # changegroup.py - Mercurial changegroup manipulation functions
1 # changegroup.py - Mercurial changegroup manipulation functions
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from i18n import _
8 from i18n import _
9 import struct, os, bz2, zlib, util, tempfile
9 import util
10 import struct, os, bz2, zlib, tempfile
10
11
11 def getchunk(source):
12 def getchunk(source):
12 """get a chunk from a changegroup"""
13 """get a chunk from a changegroup"""
13 d = source.read(4)
14 d = source.read(4)
14 if not d:
15 if not d:
15 return ""
16 return ""
16 l = struct.unpack(">l", d)[0]
17 l = struct.unpack(">l", d)[0]
17 if l <= 4:
18 if l <= 4:
18 return ""
19 return ""
19 d = source.read(l - 4)
20 d = source.read(l - 4)
20 if len(d) < l - 4:
21 if len(d) < l - 4:
21 raise util.Abort(_("premature EOF reading chunk"
22 raise util.Abort(_("premature EOF reading chunk"
22 " (got %d bytes, expected %d)")
23 " (got %d bytes, expected %d)")
23 % (len(d), l - 4))
24 % (len(d), l - 4))
24 return d
25 return d
25
26
26 def chunkiter(source):
27 def chunkiter(source):
27 """iterate through the chunks in source"""
28 """iterate through the chunks in source"""
28 while 1:
29 while 1:
29 c = getchunk(source)
30 c = getchunk(source)
30 if not c:
31 if not c:
31 break
32 break
32 yield c
33 yield c
33
34
34 def chunkheader(length):
35 def chunkheader(length):
35 """build a changegroup chunk header"""
36 """build a changegroup chunk header"""
36 return struct.pack(">l", length + 4)
37 return struct.pack(">l", length + 4)
37
38
38 def closechunk():
39 def closechunk():
39 return struct.pack(">l", 0)
40 return struct.pack(">l", 0)
40
41
41 class nocompress(object):
42 class nocompress(object):
42 def compress(self, x):
43 def compress(self, x):
43 return x
44 return x
44 def flush(self):
45 def flush(self):
45 return ""
46 return ""
46
47
47 bundletypes = {
48 bundletypes = {
48 "": ("", nocompress),
49 "": ("", nocompress),
49 "HG10UN": ("HG10UN", nocompress),
50 "HG10UN": ("HG10UN", nocompress),
50 "HG10BZ": ("HG10", lambda: bz2.BZ2Compressor()),
51 "HG10BZ": ("HG10", lambda: bz2.BZ2Compressor()),
51 "HG10GZ": ("HG10GZ", lambda: zlib.compressobj()),
52 "HG10GZ": ("HG10GZ", lambda: zlib.compressobj()),
52 }
53 }
53
54
54 # hgweb uses this list to communicate it's preferred type
55 # hgweb uses this list to communicate it's preferred type
55 bundlepriority = ['HG10GZ', 'HG10BZ', 'HG10UN']
56 bundlepriority = ['HG10GZ', 'HG10BZ', 'HG10UN']
56
57
57 def writebundle(cg, filename, bundletype):
58 def writebundle(cg, filename, bundletype):
58 """Write a bundle file and return its filename.
59 """Write a bundle file and return its filename.
59
60
60 Existing files will not be overwritten.
61 Existing files will not be overwritten.
61 If no filename is specified, a temporary file is created.
62 If no filename is specified, a temporary file is created.
62 bz2 compression can be turned off.
63 bz2 compression can be turned off.
63 The bundle file will be deleted in case of errors.
64 The bundle file will be deleted in case of errors.
64 """
65 """
65
66
66 fh = None
67 fh = None
67 cleanup = None
68 cleanup = None
68 try:
69 try:
69 if filename:
70 if filename:
70 fh = open(filename, "wb")
71 fh = open(filename, "wb")
71 else:
72 else:
72 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
73 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
73 fh = os.fdopen(fd, "wb")
74 fh = os.fdopen(fd, "wb")
74 cleanup = filename
75 cleanup = filename
75
76
76 header, compressor = bundletypes[bundletype]
77 header, compressor = bundletypes[bundletype]
77 fh.write(header)
78 fh.write(header)
78 z = compressor()
79 z = compressor()
79
80
80 # parse the changegroup data, otherwise we will block
81 # parse the changegroup data, otherwise we will block
81 # in case of sshrepo because we don't know the end of the stream
82 # in case of sshrepo because we don't know the end of the stream
82
83
83 # an empty chunkiter is the end of the changegroup
84 # an empty chunkiter is the end of the changegroup
84 # a changegroup has at least 2 chunkiters (changelog and manifest).
85 # a changegroup has at least 2 chunkiters (changelog and manifest).
85 # after that, an empty chunkiter is the end of the changegroup
86 # after that, an empty chunkiter is the end of the changegroup
86 empty = False
87 empty = False
87 count = 0
88 count = 0
88 while not empty or count <= 2:
89 while not empty or count <= 2:
89 empty = True
90 empty = True
90 count += 1
91 count += 1
91 for chunk in chunkiter(cg):
92 for chunk in chunkiter(cg):
92 empty = False
93 empty = False
93 fh.write(z.compress(chunkheader(len(chunk))))
94 fh.write(z.compress(chunkheader(len(chunk))))
94 pos = 0
95 pos = 0
95 while pos < len(chunk):
96 while pos < len(chunk):
96 next = pos + 2**20
97 next = pos + 2**20
97 fh.write(z.compress(chunk[pos:next]))
98 fh.write(z.compress(chunk[pos:next]))
98 pos = next
99 pos = next
99 fh.write(z.compress(closechunk()))
100 fh.write(z.compress(closechunk()))
100 fh.write(z.flush())
101 fh.write(z.flush())
101 cleanup = None
102 cleanup = None
102 return filename
103 return filename
103 finally:
104 finally:
104 if fh is not None:
105 if fh is not None:
105 fh.close()
106 fh.close()
106 if cleanup is not None:
107 if cleanup is not None:
107 os.unlink(cleanup)
108 os.unlink(cleanup)
108
109
109 def unbundle(header, fh):
110 def unbundle(header, fh):
110 if header == 'HG10UN':
111 if header == 'HG10UN':
111 return fh
112 return fh
112 elif not header.startswith('HG'):
113 elif not header.startswith('HG'):
113 # old client with uncompressed bundle
114 # old client with uncompressed bundle
114 def generator(f):
115 def generator(f):
115 yield header
116 yield header
116 for chunk in f:
117 for chunk in f:
117 yield chunk
118 yield chunk
118 elif header == 'HG10GZ':
119 elif header == 'HG10GZ':
119 def generator(f):
120 def generator(f):
120 zd = zlib.decompressobj()
121 zd = zlib.decompressobj()
121 for chunk in f:
122 for chunk in f:
122 yield zd.decompress(chunk)
123 yield zd.decompress(chunk)
123 elif header == 'HG10BZ':
124 elif header == 'HG10BZ':
124 def generator(f):
125 def generator(f):
125 zd = bz2.BZ2Decompressor()
126 zd = bz2.BZ2Decompressor()
126 zd.decompress("BZ")
127 zd.decompress("BZ")
127 for chunk in util.filechunkiter(f, 4096):
128 for chunk in util.filechunkiter(f, 4096):
128 yield zd.decompress(chunk)
129 yield zd.decompress(chunk)
129 return util.chunkbuffer(generator(fh))
130 return util.chunkbuffer(generator(fh))
130
131
131 def readbundle(fh, fname):
132 def readbundle(fh, fname):
132 header = fh.read(6)
133 header = fh.read(6)
133 if not header.startswith('HG'):
134 if not header.startswith('HG'):
134 raise util.Abort(_('%s: not a Mercurial bundle file') % fname)
135 raise util.Abort(_('%s: not a Mercurial bundle file') % fname)
135 if not header.startswith('HG10'):
136 if not header.startswith('HG10'):
136 raise util.Abort(_('%s: unknown bundle version') % fname)
137 raise util.Abort(_('%s: unknown bundle version') % fname)
137 elif header not in bundletypes:
138 elif header not in bundletypes:
138 raise util.Abort(_('%s: unknown bundle compression type') % fname)
139 raise util.Abort(_('%s: unknown bundle compression type') % fname)
139 return unbundle(header, fh)
140 return unbundle(header, fh)
@@ -1,1225 +1,1225 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import os, sys, bisect, stat, encoding
10 import os, sys, bisect, stat, errno
11 import mdiff, bdiff, util, templater, templatefilters, patch, errno, error
11 import mdiff, bdiff, util, templater, templatefilters, patch, error, encoding
12 import match as _match
12 import match as _match
13
13
14 revrangesep = ':'
14 revrangesep = ':'
15
15
16 def findpossible(cmd, table, strict=False):
16 def findpossible(cmd, table, strict=False):
17 """
17 """
18 Return cmd -> (aliases, command table entry)
18 Return cmd -> (aliases, command table entry)
19 for each matching command.
19 for each matching command.
20 Return debug commands (or their aliases) only if no normal command matches.
20 Return debug commands (or their aliases) only if no normal command matches.
21 """
21 """
22 choice = {}
22 choice = {}
23 debugchoice = {}
23 debugchoice = {}
24 for e in table.keys():
24 for e in table.keys():
25 aliases = e.lstrip("^").split("|")
25 aliases = e.lstrip("^").split("|")
26 found = None
26 found = None
27 if cmd in aliases:
27 if cmd in aliases:
28 found = cmd
28 found = cmd
29 elif not strict:
29 elif not strict:
30 for a in aliases:
30 for a in aliases:
31 if a.startswith(cmd):
31 if a.startswith(cmd):
32 found = a
32 found = a
33 break
33 break
34 if found is not None:
34 if found is not None:
35 if aliases[0].startswith("debug") or found.startswith("debug"):
35 if aliases[0].startswith("debug") or found.startswith("debug"):
36 debugchoice[found] = (aliases, table[e])
36 debugchoice[found] = (aliases, table[e])
37 else:
37 else:
38 choice[found] = (aliases, table[e])
38 choice[found] = (aliases, table[e])
39
39
40 if not choice and debugchoice:
40 if not choice and debugchoice:
41 choice = debugchoice
41 choice = debugchoice
42
42
43 return choice
43 return choice
44
44
45 def findcmd(cmd, table, strict=True):
45 def findcmd(cmd, table, strict=True):
46 """Return (aliases, command table entry) for command string."""
46 """Return (aliases, command table entry) for command string."""
47 choice = findpossible(cmd, table, strict)
47 choice = findpossible(cmd, table, strict)
48
48
49 if cmd in choice:
49 if cmd in choice:
50 return choice[cmd]
50 return choice[cmd]
51
51
52 if len(choice) > 1:
52 if len(choice) > 1:
53 clist = choice.keys()
53 clist = choice.keys()
54 clist.sort()
54 clist.sort()
55 raise error.AmbiguousCommand(cmd, clist)
55 raise error.AmbiguousCommand(cmd, clist)
56
56
57 if choice:
57 if choice:
58 return choice.values()[0]
58 return choice.values()[0]
59
59
60 raise error.UnknownCommand(cmd)
60 raise error.UnknownCommand(cmd)
61
61
62 def bail_if_changed(repo):
62 def bail_if_changed(repo):
63 if repo.dirstate.parents()[1] != nullid:
63 if repo.dirstate.parents()[1] != nullid:
64 raise util.Abort(_('outstanding uncommitted merge'))
64 raise util.Abort(_('outstanding uncommitted merge'))
65 modified, added, removed, deleted = repo.status()[:4]
65 modified, added, removed, deleted = repo.status()[:4]
66 if modified or added or removed or deleted:
66 if modified or added or removed or deleted:
67 raise util.Abort(_("outstanding uncommitted changes"))
67 raise util.Abort(_("outstanding uncommitted changes"))
68
68
69 def logmessage(opts):
69 def logmessage(opts):
70 """ get the log message according to -m and -l option """
70 """ get the log message according to -m and -l option """
71 message = opts.get('message')
71 message = opts.get('message')
72 logfile = opts.get('logfile')
72 logfile = opts.get('logfile')
73
73
74 if message and logfile:
74 if message and logfile:
75 raise util.Abort(_('options --message and --logfile are mutually '
75 raise util.Abort(_('options --message and --logfile are mutually '
76 'exclusive'))
76 'exclusive'))
77 if not message and logfile:
77 if not message and logfile:
78 try:
78 try:
79 if logfile == '-':
79 if logfile == '-':
80 message = sys.stdin.read()
80 message = sys.stdin.read()
81 else:
81 else:
82 message = open(logfile).read()
82 message = open(logfile).read()
83 except IOError, inst:
83 except IOError, inst:
84 raise util.Abort(_("can't read commit message '%s': %s") %
84 raise util.Abort(_("can't read commit message '%s': %s") %
85 (logfile, inst.strerror))
85 (logfile, inst.strerror))
86 return message
86 return message
87
87
88 def loglimit(opts):
88 def loglimit(opts):
89 """get the log limit according to option -l/--limit"""
89 """get the log limit according to option -l/--limit"""
90 limit = opts.get('limit')
90 limit = opts.get('limit')
91 if limit:
91 if limit:
92 try:
92 try:
93 limit = int(limit)
93 limit = int(limit)
94 except ValueError:
94 except ValueError:
95 raise util.Abort(_('limit must be a positive integer'))
95 raise util.Abort(_('limit must be a positive integer'))
96 if limit <= 0: raise util.Abort(_('limit must be positive'))
96 if limit <= 0: raise util.Abort(_('limit must be positive'))
97 else:
97 else:
98 limit = sys.maxint
98 limit = sys.maxint
99 return limit
99 return limit
100
100
101 def remoteui(src, opts):
101 def remoteui(src, opts):
102 'build a remote ui from ui or repo and opts'
102 'build a remote ui from ui or repo and opts'
103 if hasattr(src, 'baseui'): # looks like a repository
103 if hasattr(src, 'baseui'): # looks like a repository
104 dst = src.baseui # drop repo-specific config
104 dst = src.baseui # drop repo-specific config
105 src = src.ui # copy target options from repo
105 src = src.ui # copy target options from repo
106 else: # assume it's a global ui object
106 else: # assume it's a global ui object
107 dst = src # keep all global options
107 dst = src # keep all global options
108
108
109 # copy ssh-specific options
109 # copy ssh-specific options
110 for o in 'ssh', 'remotecmd':
110 for o in 'ssh', 'remotecmd':
111 v = opts.get(o) or src.config('ui', o)
111 v = opts.get(o) or src.config('ui', o)
112 if v:
112 if v:
113 dst.setconfig("ui", o, v)
113 dst.setconfig("ui", o, v)
114 # copy bundle-specific options
114 # copy bundle-specific options
115 r = src.config('bundle', 'mainreporoot')
115 r = src.config('bundle', 'mainreporoot')
116 if r:
116 if r:
117 dst.setconfig('bundle', 'mainreporoot', r)
117 dst.setconfig('bundle', 'mainreporoot', r)
118
118
119 return dst
119 return dst
120
120
121 def revpair(repo, revs):
121 def revpair(repo, revs):
122 '''return pair of nodes, given list of revisions. second item can
122 '''return pair of nodes, given list of revisions. second item can
123 be None, meaning use working dir.'''
123 be None, meaning use working dir.'''
124
124
125 def revfix(repo, val, defval):
125 def revfix(repo, val, defval):
126 if not val and val != 0 and defval is not None:
126 if not val and val != 0 and defval is not None:
127 val = defval
127 val = defval
128 return repo.lookup(val)
128 return repo.lookup(val)
129
129
130 if not revs:
130 if not revs:
131 return repo.dirstate.parents()[0], None
131 return repo.dirstate.parents()[0], None
132 end = None
132 end = None
133 if len(revs) == 1:
133 if len(revs) == 1:
134 if revrangesep in revs[0]:
134 if revrangesep in revs[0]:
135 start, end = revs[0].split(revrangesep, 1)
135 start, end = revs[0].split(revrangesep, 1)
136 start = revfix(repo, start, 0)
136 start = revfix(repo, start, 0)
137 end = revfix(repo, end, len(repo) - 1)
137 end = revfix(repo, end, len(repo) - 1)
138 else:
138 else:
139 start = revfix(repo, revs[0], None)
139 start = revfix(repo, revs[0], None)
140 elif len(revs) == 2:
140 elif len(revs) == 2:
141 if revrangesep in revs[0] or revrangesep in revs[1]:
141 if revrangesep in revs[0] or revrangesep in revs[1]:
142 raise util.Abort(_('too many revisions specified'))
142 raise util.Abort(_('too many revisions specified'))
143 start = revfix(repo, revs[0], None)
143 start = revfix(repo, revs[0], None)
144 end = revfix(repo, revs[1], None)
144 end = revfix(repo, revs[1], None)
145 else:
145 else:
146 raise util.Abort(_('too many revisions specified'))
146 raise util.Abort(_('too many revisions specified'))
147 return start, end
147 return start, end
148
148
149 def revrange(repo, revs):
149 def revrange(repo, revs):
150 """Yield revision as strings from a list of revision specifications."""
150 """Yield revision as strings from a list of revision specifications."""
151
151
152 def revfix(repo, val, defval):
152 def revfix(repo, val, defval):
153 if not val and val != 0 and defval is not None:
153 if not val and val != 0 and defval is not None:
154 return defval
154 return defval
155 return repo.changelog.rev(repo.lookup(val))
155 return repo.changelog.rev(repo.lookup(val))
156
156
157 seen, l = {}, []
157 seen, l = {}, []
158 for spec in revs:
158 for spec in revs:
159 if revrangesep in spec:
159 if revrangesep in spec:
160 start, end = spec.split(revrangesep, 1)
160 start, end = spec.split(revrangesep, 1)
161 start = revfix(repo, start, 0)
161 start = revfix(repo, start, 0)
162 end = revfix(repo, end, len(repo) - 1)
162 end = revfix(repo, end, len(repo) - 1)
163 step = start > end and -1 or 1
163 step = start > end and -1 or 1
164 for rev in xrange(start, end+step, step):
164 for rev in xrange(start, end+step, step):
165 if rev in seen:
165 if rev in seen:
166 continue
166 continue
167 seen[rev] = 1
167 seen[rev] = 1
168 l.append(rev)
168 l.append(rev)
169 else:
169 else:
170 rev = revfix(repo, spec, None)
170 rev = revfix(repo, spec, None)
171 if rev in seen:
171 if rev in seen:
172 continue
172 continue
173 seen[rev] = 1
173 seen[rev] = 1
174 l.append(rev)
174 l.append(rev)
175
175
176 return l
176 return l
177
177
178 def make_filename(repo, pat, node,
178 def make_filename(repo, pat, node,
179 total=None, seqno=None, revwidth=None, pathname=None):
179 total=None, seqno=None, revwidth=None, pathname=None):
180 node_expander = {
180 node_expander = {
181 'H': lambda: hex(node),
181 'H': lambda: hex(node),
182 'R': lambda: str(repo.changelog.rev(node)),
182 'R': lambda: str(repo.changelog.rev(node)),
183 'h': lambda: short(node),
183 'h': lambda: short(node),
184 }
184 }
185 expander = {
185 expander = {
186 '%': lambda: '%',
186 '%': lambda: '%',
187 'b': lambda: os.path.basename(repo.root),
187 'b': lambda: os.path.basename(repo.root),
188 }
188 }
189
189
190 try:
190 try:
191 if node:
191 if node:
192 expander.update(node_expander)
192 expander.update(node_expander)
193 if node:
193 if node:
194 expander['r'] = (lambda:
194 expander['r'] = (lambda:
195 str(repo.changelog.rev(node)).zfill(revwidth or 0))
195 str(repo.changelog.rev(node)).zfill(revwidth or 0))
196 if total is not None:
196 if total is not None:
197 expander['N'] = lambda: str(total)
197 expander['N'] = lambda: str(total)
198 if seqno is not None:
198 if seqno is not None:
199 expander['n'] = lambda: str(seqno)
199 expander['n'] = lambda: str(seqno)
200 if total is not None and seqno is not None:
200 if total is not None and seqno is not None:
201 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
201 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
202 if pathname is not None:
202 if pathname is not None:
203 expander['s'] = lambda: os.path.basename(pathname)
203 expander['s'] = lambda: os.path.basename(pathname)
204 expander['d'] = lambda: os.path.dirname(pathname) or '.'
204 expander['d'] = lambda: os.path.dirname(pathname) or '.'
205 expander['p'] = lambda: pathname
205 expander['p'] = lambda: pathname
206
206
207 newname = []
207 newname = []
208 patlen = len(pat)
208 patlen = len(pat)
209 i = 0
209 i = 0
210 while i < patlen:
210 while i < patlen:
211 c = pat[i]
211 c = pat[i]
212 if c == '%':
212 if c == '%':
213 i += 1
213 i += 1
214 c = pat[i]
214 c = pat[i]
215 c = expander[c]()
215 c = expander[c]()
216 newname.append(c)
216 newname.append(c)
217 i += 1
217 i += 1
218 return ''.join(newname)
218 return ''.join(newname)
219 except KeyError, inst:
219 except KeyError, inst:
220 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
220 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
221 inst.args[0])
221 inst.args[0])
222
222
223 def make_file(repo, pat, node=None,
223 def make_file(repo, pat, node=None,
224 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
224 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
225
225
226 writable = 'w' in mode or 'a' in mode
226 writable = 'w' in mode or 'a' in mode
227
227
228 if not pat or pat == '-':
228 if not pat or pat == '-':
229 return writable and sys.stdout or sys.stdin
229 return writable and sys.stdout or sys.stdin
230 if hasattr(pat, 'write') and writable:
230 if hasattr(pat, 'write') and writable:
231 return pat
231 return pat
232 if hasattr(pat, 'read') and 'r' in mode:
232 if hasattr(pat, 'read') and 'r' in mode:
233 return pat
233 return pat
234 return open(make_filename(repo, pat, node, total, seqno, revwidth,
234 return open(make_filename(repo, pat, node, total, seqno, revwidth,
235 pathname),
235 pathname),
236 mode)
236 mode)
237
237
238 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
238 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
239 if not globbed and default == 'relpath':
239 if not globbed and default == 'relpath':
240 pats = util.expand_glob(pats or [])
240 pats = util.expand_glob(pats or [])
241 m = _match.match(repo.root, repo.getcwd(), pats,
241 m = _match.match(repo.root, repo.getcwd(), pats,
242 opts.get('include'), opts.get('exclude'), default)
242 opts.get('include'), opts.get('exclude'), default)
243 def badfn(f, msg):
243 def badfn(f, msg):
244 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
244 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
245 return False
245 return False
246 m.bad = badfn
246 m.bad = badfn
247 return m
247 return m
248
248
249 def matchall(repo):
249 def matchall(repo):
250 return _match.always(repo.root, repo.getcwd())
250 return _match.always(repo.root, repo.getcwd())
251
251
252 def matchfiles(repo, files):
252 def matchfiles(repo, files):
253 return _match.exact(repo.root, repo.getcwd(), files)
253 return _match.exact(repo.root, repo.getcwd(), files)
254
254
255 def findrenames(repo, added=None, removed=None, threshold=0.5):
255 def findrenames(repo, added=None, removed=None, threshold=0.5):
256 '''find renamed files -- yields (before, after, score) tuples'''
256 '''find renamed files -- yields (before, after, score) tuples'''
257 if added is None or removed is None:
257 if added is None or removed is None:
258 added, removed = repo.status()[1:3]
258 added, removed = repo.status()[1:3]
259 ctx = repo['.']
259 ctx = repo['.']
260 for a in added:
260 for a in added:
261 aa = repo.wread(a)
261 aa = repo.wread(a)
262 bestname, bestscore = None, threshold
262 bestname, bestscore = None, threshold
263 for r in removed:
263 for r in removed:
264 rr = ctx.filectx(r).data()
264 rr = ctx.filectx(r).data()
265
265
266 # bdiff.blocks() returns blocks of matching lines
266 # bdiff.blocks() returns blocks of matching lines
267 # count the number of bytes in each
267 # count the number of bytes in each
268 equal = 0
268 equal = 0
269 alines = mdiff.splitnewlines(aa)
269 alines = mdiff.splitnewlines(aa)
270 matches = bdiff.blocks(aa, rr)
270 matches = bdiff.blocks(aa, rr)
271 for x1,x2,y1,y2 in matches:
271 for x1,x2,y1,y2 in matches:
272 for line in alines[x1:x2]:
272 for line in alines[x1:x2]:
273 equal += len(line)
273 equal += len(line)
274
274
275 lengths = len(aa) + len(rr)
275 lengths = len(aa) + len(rr)
276 if lengths:
276 if lengths:
277 myscore = equal*2.0 / lengths
277 myscore = equal*2.0 / lengths
278 if myscore >= bestscore:
278 if myscore >= bestscore:
279 bestname, bestscore = r, myscore
279 bestname, bestscore = r, myscore
280 if bestname:
280 if bestname:
281 yield bestname, a, bestscore
281 yield bestname, a, bestscore
282
282
283 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
283 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
284 if dry_run is None:
284 if dry_run is None:
285 dry_run = opts.get('dry_run')
285 dry_run = opts.get('dry_run')
286 if similarity is None:
286 if similarity is None:
287 similarity = float(opts.get('similarity') or 0)
287 similarity = float(opts.get('similarity') or 0)
288 add, remove = [], []
288 add, remove = [], []
289 mapping = {}
289 mapping = {}
290 audit_path = util.path_auditor(repo.root)
290 audit_path = util.path_auditor(repo.root)
291 m = match(repo, pats, opts)
291 m = match(repo, pats, opts)
292 for abs in repo.walk(m):
292 for abs in repo.walk(m):
293 target = repo.wjoin(abs)
293 target = repo.wjoin(abs)
294 good = True
294 good = True
295 try:
295 try:
296 audit_path(abs)
296 audit_path(abs)
297 except:
297 except:
298 good = False
298 good = False
299 rel = m.rel(abs)
299 rel = m.rel(abs)
300 exact = m.exact(abs)
300 exact = m.exact(abs)
301 if good and abs not in repo.dirstate:
301 if good and abs not in repo.dirstate:
302 add.append(abs)
302 add.append(abs)
303 mapping[abs] = rel, m.exact(abs)
303 mapping[abs] = rel, m.exact(abs)
304 if repo.ui.verbose or not exact:
304 if repo.ui.verbose or not exact:
305 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
305 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
306 if repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
306 if repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
307 or (os.path.isdir(target) and not os.path.islink(target))):
307 or (os.path.isdir(target) and not os.path.islink(target))):
308 remove.append(abs)
308 remove.append(abs)
309 mapping[abs] = rel, exact
309 mapping[abs] = rel, exact
310 if repo.ui.verbose or not exact:
310 if repo.ui.verbose or not exact:
311 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
311 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
312 if not dry_run:
312 if not dry_run:
313 repo.remove(remove)
313 repo.remove(remove)
314 repo.add(add)
314 repo.add(add)
315 if similarity > 0:
315 if similarity > 0:
316 for old, new, score in findrenames(repo, add, remove, similarity):
316 for old, new, score in findrenames(repo, add, remove, similarity):
317 oldrel, oldexact = mapping[old]
317 oldrel, oldexact = mapping[old]
318 newrel, newexact = mapping[new]
318 newrel, newexact = mapping[new]
319 if repo.ui.verbose or not oldexact or not newexact:
319 if repo.ui.verbose or not oldexact or not newexact:
320 repo.ui.status(_('recording removal of %s as rename to %s '
320 repo.ui.status(_('recording removal of %s as rename to %s '
321 '(%d%% similar)\n') %
321 '(%d%% similar)\n') %
322 (oldrel, newrel, score * 100))
322 (oldrel, newrel, score * 100))
323 if not dry_run:
323 if not dry_run:
324 repo.copy(old, new)
324 repo.copy(old, new)
325
325
326 def copy(ui, repo, pats, opts, rename=False):
326 def copy(ui, repo, pats, opts, rename=False):
327 # called with the repo lock held
327 # called with the repo lock held
328 #
328 #
329 # hgsep => pathname that uses "/" to separate directories
329 # hgsep => pathname that uses "/" to separate directories
330 # ossep => pathname that uses os.sep to separate directories
330 # ossep => pathname that uses os.sep to separate directories
331 cwd = repo.getcwd()
331 cwd = repo.getcwd()
332 targets = {}
332 targets = {}
333 after = opts.get("after")
333 after = opts.get("after")
334 dryrun = opts.get("dry_run")
334 dryrun = opts.get("dry_run")
335
335
336 def walkpat(pat):
336 def walkpat(pat):
337 srcs = []
337 srcs = []
338 m = match(repo, [pat], opts, globbed=True)
338 m = match(repo, [pat], opts, globbed=True)
339 for abs in repo.walk(m):
339 for abs in repo.walk(m):
340 state = repo.dirstate[abs]
340 state = repo.dirstate[abs]
341 rel = m.rel(abs)
341 rel = m.rel(abs)
342 exact = m.exact(abs)
342 exact = m.exact(abs)
343 if state in '?r':
343 if state in '?r':
344 if exact and state == '?':
344 if exact and state == '?':
345 ui.warn(_('%s: not copying - file is not managed\n') % rel)
345 ui.warn(_('%s: not copying - file is not managed\n') % rel)
346 if exact and state == 'r':
346 if exact and state == 'r':
347 ui.warn(_('%s: not copying - file has been marked for'
347 ui.warn(_('%s: not copying - file has been marked for'
348 ' remove\n') % rel)
348 ' remove\n') % rel)
349 continue
349 continue
350 # abs: hgsep
350 # abs: hgsep
351 # rel: ossep
351 # rel: ossep
352 srcs.append((abs, rel, exact))
352 srcs.append((abs, rel, exact))
353 return srcs
353 return srcs
354
354
355 # abssrc: hgsep
355 # abssrc: hgsep
356 # relsrc: ossep
356 # relsrc: ossep
357 # otarget: ossep
357 # otarget: ossep
358 def copyfile(abssrc, relsrc, otarget, exact):
358 def copyfile(abssrc, relsrc, otarget, exact):
359 abstarget = util.canonpath(repo.root, cwd, otarget)
359 abstarget = util.canonpath(repo.root, cwd, otarget)
360 reltarget = repo.pathto(abstarget, cwd)
360 reltarget = repo.pathto(abstarget, cwd)
361 target = repo.wjoin(abstarget)
361 target = repo.wjoin(abstarget)
362 src = repo.wjoin(abssrc)
362 src = repo.wjoin(abssrc)
363 state = repo.dirstate[abstarget]
363 state = repo.dirstate[abstarget]
364
364
365 # check for collisions
365 # check for collisions
366 prevsrc = targets.get(abstarget)
366 prevsrc = targets.get(abstarget)
367 if prevsrc is not None:
367 if prevsrc is not None:
368 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
368 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
369 (reltarget, repo.pathto(abssrc, cwd),
369 (reltarget, repo.pathto(abssrc, cwd),
370 repo.pathto(prevsrc, cwd)))
370 repo.pathto(prevsrc, cwd)))
371 return
371 return
372
372
373 # check for overwrites
373 # check for overwrites
374 exists = os.path.exists(target)
374 exists = os.path.exists(target)
375 if not after and exists or after and state in 'mn':
375 if not after and exists or after and state in 'mn':
376 if not opts['force']:
376 if not opts['force']:
377 ui.warn(_('%s: not overwriting - file exists\n') %
377 ui.warn(_('%s: not overwriting - file exists\n') %
378 reltarget)
378 reltarget)
379 return
379 return
380
380
381 if after:
381 if after:
382 if not exists:
382 if not exists:
383 return
383 return
384 elif not dryrun:
384 elif not dryrun:
385 try:
385 try:
386 if exists:
386 if exists:
387 os.unlink(target)
387 os.unlink(target)
388 targetdir = os.path.dirname(target) or '.'
388 targetdir = os.path.dirname(target) or '.'
389 if not os.path.isdir(targetdir):
389 if not os.path.isdir(targetdir):
390 os.makedirs(targetdir)
390 os.makedirs(targetdir)
391 util.copyfile(src, target)
391 util.copyfile(src, target)
392 except IOError, inst:
392 except IOError, inst:
393 if inst.errno == errno.ENOENT:
393 if inst.errno == errno.ENOENT:
394 ui.warn(_('%s: deleted in working copy\n') % relsrc)
394 ui.warn(_('%s: deleted in working copy\n') % relsrc)
395 else:
395 else:
396 ui.warn(_('%s: cannot copy - %s\n') %
396 ui.warn(_('%s: cannot copy - %s\n') %
397 (relsrc, inst.strerror))
397 (relsrc, inst.strerror))
398 return True # report a failure
398 return True # report a failure
399
399
400 if ui.verbose or not exact:
400 if ui.verbose or not exact:
401 if rename:
401 if rename:
402 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
402 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
403 else:
403 else:
404 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
404 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
405
405
406 targets[abstarget] = abssrc
406 targets[abstarget] = abssrc
407
407
408 # fix up dirstate
408 # fix up dirstate
409 origsrc = repo.dirstate.copied(abssrc) or abssrc
409 origsrc = repo.dirstate.copied(abssrc) or abssrc
410 if abstarget == origsrc: # copying back a copy?
410 if abstarget == origsrc: # copying back a copy?
411 if state not in 'mn' and not dryrun:
411 if state not in 'mn' and not dryrun:
412 repo.dirstate.normallookup(abstarget)
412 repo.dirstate.normallookup(abstarget)
413 else:
413 else:
414 if repo.dirstate[origsrc] == 'a' and origsrc == abssrc:
414 if repo.dirstate[origsrc] == 'a' and origsrc == abssrc:
415 if not ui.quiet:
415 if not ui.quiet:
416 ui.warn(_("%s has not been committed yet, so no copy "
416 ui.warn(_("%s has not been committed yet, so no copy "
417 "data will be stored for %s.\n")
417 "data will be stored for %s.\n")
418 % (repo.pathto(origsrc, cwd), reltarget))
418 % (repo.pathto(origsrc, cwd), reltarget))
419 if repo.dirstate[abstarget] in '?r' and not dryrun:
419 if repo.dirstate[abstarget] in '?r' and not dryrun:
420 repo.add([abstarget])
420 repo.add([abstarget])
421 elif not dryrun:
421 elif not dryrun:
422 repo.copy(origsrc, abstarget)
422 repo.copy(origsrc, abstarget)
423
423
424 if rename and not dryrun:
424 if rename and not dryrun:
425 repo.remove([abssrc], not after)
425 repo.remove([abssrc], not after)
426
426
427 # pat: ossep
427 # pat: ossep
428 # dest ossep
428 # dest ossep
429 # srcs: list of (hgsep, hgsep, ossep, bool)
429 # srcs: list of (hgsep, hgsep, ossep, bool)
430 # return: function that takes hgsep and returns ossep
430 # return: function that takes hgsep and returns ossep
431 def targetpathfn(pat, dest, srcs):
431 def targetpathfn(pat, dest, srcs):
432 if os.path.isdir(pat):
432 if os.path.isdir(pat):
433 abspfx = util.canonpath(repo.root, cwd, pat)
433 abspfx = util.canonpath(repo.root, cwd, pat)
434 abspfx = util.localpath(abspfx)
434 abspfx = util.localpath(abspfx)
435 if destdirexists:
435 if destdirexists:
436 striplen = len(os.path.split(abspfx)[0])
436 striplen = len(os.path.split(abspfx)[0])
437 else:
437 else:
438 striplen = len(abspfx)
438 striplen = len(abspfx)
439 if striplen:
439 if striplen:
440 striplen += len(os.sep)
440 striplen += len(os.sep)
441 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
441 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
442 elif destdirexists:
442 elif destdirexists:
443 res = lambda p: os.path.join(dest,
443 res = lambda p: os.path.join(dest,
444 os.path.basename(util.localpath(p)))
444 os.path.basename(util.localpath(p)))
445 else:
445 else:
446 res = lambda p: dest
446 res = lambda p: dest
447 return res
447 return res
448
448
449 # pat: ossep
449 # pat: ossep
450 # dest ossep
450 # dest ossep
451 # srcs: list of (hgsep, hgsep, ossep, bool)
451 # srcs: list of (hgsep, hgsep, ossep, bool)
452 # return: function that takes hgsep and returns ossep
452 # return: function that takes hgsep and returns ossep
453 def targetpathafterfn(pat, dest, srcs):
453 def targetpathafterfn(pat, dest, srcs):
454 if util.patkind(pat, None)[0]:
454 if util.patkind(pat, None)[0]:
455 # a mercurial pattern
455 # a mercurial pattern
456 res = lambda p: os.path.join(dest,
456 res = lambda p: os.path.join(dest,
457 os.path.basename(util.localpath(p)))
457 os.path.basename(util.localpath(p)))
458 else:
458 else:
459 abspfx = util.canonpath(repo.root, cwd, pat)
459 abspfx = util.canonpath(repo.root, cwd, pat)
460 if len(abspfx) < len(srcs[0][0]):
460 if len(abspfx) < len(srcs[0][0]):
461 # A directory. Either the target path contains the last
461 # A directory. Either the target path contains the last
462 # component of the source path or it does not.
462 # component of the source path or it does not.
463 def evalpath(striplen):
463 def evalpath(striplen):
464 score = 0
464 score = 0
465 for s in srcs:
465 for s in srcs:
466 t = os.path.join(dest, util.localpath(s[0])[striplen:])
466 t = os.path.join(dest, util.localpath(s[0])[striplen:])
467 if os.path.exists(t):
467 if os.path.exists(t):
468 score += 1
468 score += 1
469 return score
469 return score
470
470
471 abspfx = util.localpath(abspfx)
471 abspfx = util.localpath(abspfx)
472 striplen = len(abspfx)
472 striplen = len(abspfx)
473 if striplen:
473 if striplen:
474 striplen += len(os.sep)
474 striplen += len(os.sep)
475 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
475 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
476 score = evalpath(striplen)
476 score = evalpath(striplen)
477 striplen1 = len(os.path.split(abspfx)[0])
477 striplen1 = len(os.path.split(abspfx)[0])
478 if striplen1:
478 if striplen1:
479 striplen1 += len(os.sep)
479 striplen1 += len(os.sep)
480 if evalpath(striplen1) > score:
480 if evalpath(striplen1) > score:
481 striplen = striplen1
481 striplen = striplen1
482 res = lambda p: os.path.join(dest,
482 res = lambda p: os.path.join(dest,
483 util.localpath(p)[striplen:])
483 util.localpath(p)[striplen:])
484 else:
484 else:
485 # a file
485 # a file
486 if destdirexists:
486 if destdirexists:
487 res = lambda p: os.path.join(dest,
487 res = lambda p: os.path.join(dest,
488 os.path.basename(util.localpath(p)))
488 os.path.basename(util.localpath(p)))
489 else:
489 else:
490 res = lambda p: dest
490 res = lambda p: dest
491 return res
491 return res
492
492
493
493
494 pats = util.expand_glob(pats)
494 pats = util.expand_glob(pats)
495 if not pats:
495 if not pats:
496 raise util.Abort(_('no source or destination specified'))
496 raise util.Abort(_('no source or destination specified'))
497 if len(pats) == 1:
497 if len(pats) == 1:
498 raise util.Abort(_('no destination specified'))
498 raise util.Abort(_('no destination specified'))
499 dest = pats.pop()
499 dest = pats.pop()
500 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
500 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
501 if not destdirexists:
501 if not destdirexists:
502 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
502 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
503 raise util.Abort(_('with multiple sources, destination must be an '
503 raise util.Abort(_('with multiple sources, destination must be an '
504 'existing directory'))
504 'existing directory'))
505 if util.endswithsep(dest):
505 if util.endswithsep(dest):
506 raise util.Abort(_('destination %s is not a directory') % dest)
506 raise util.Abort(_('destination %s is not a directory') % dest)
507
507
508 tfn = targetpathfn
508 tfn = targetpathfn
509 if after:
509 if after:
510 tfn = targetpathafterfn
510 tfn = targetpathafterfn
511 copylist = []
511 copylist = []
512 for pat in pats:
512 for pat in pats:
513 srcs = walkpat(pat)
513 srcs = walkpat(pat)
514 if not srcs:
514 if not srcs:
515 continue
515 continue
516 copylist.append((tfn(pat, dest, srcs), srcs))
516 copylist.append((tfn(pat, dest, srcs), srcs))
517 if not copylist:
517 if not copylist:
518 raise util.Abort(_('no files to copy'))
518 raise util.Abort(_('no files to copy'))
519
519
520 errors = 0
520 errors = 0
521 for targetpath, srcs in copylist:
521 for targetpath, srcs in copylist:
522 for abssrc, relsrc, exact in srcs:
522 for abssrc, relsrc, exact in srcs:
523 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
523 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
524 errors += 1
524 errors += 1
525
525
526 if errors:
526 if errors:
527 ui.warn(_('(consider using --after)\n'))
527 ui.warn(_('(consider using --after)\n'))
528
528
529 return errors
529 return errors
530
530
531 def service(opts, parentfn=None, initfn=None, runfn=None):
531 def service(opts, parentfn=None, initfn=None, runfn=None):
532 '''Run a command as a service.'''
532 '''Run a command as a service.'''
533
533
534 if opts['daemon'] and not opts['daemon_pipefds']:
534 if opts['daemon'] and not opts['daemon_pipefds']:
535 rfd, wfd = os.pipe()
535 rfd, wfd = os.pipe()
536 args = sys.argv[:]
536 args = sys.argv[:]
537 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
537 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
538 # Don't pass --cwd to the child process, because we've already
538 # Don't pass --cwd to the child process, because we've already
539 # changed directory.
539 # changed directory.
540 for i in xrange(1,len(args)):
540 for i in xrange(1,len(args)):
541 if args[i].startswith('--cwd='):
541 if args[i].startswith('--cwd='):
542 del args[i]
542 del args[i]
543 break
543 break
544 elif args[i].startswith('--cwd'):
544 elif args[i].startswith('--cwd'):
545 del args[i:i+2]
545 del args[i:i+2]
546 break
546 break
547 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
547 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
548 args[0], args)
548 args[0], args)
549 os.close(wfd)
549 os.close(wfd)
550 os.read(rfd, 1)
550 os.read(rfd, 1)
551 if parentfn:
551 if parentfn:
552 return parentfn(pid)
552 return parentfn(pid)
553 else:
553 else:
554 os._exit(0)
554 os._exit(0)
555
555
556 if initfn:
556 if initfn:
557 initfn()
557 initfn()
558
558
559 if opts['pid_file']:
559 if opts['pid_file']:
560 fp = open(opts['pid_file'], 'w')
560 fp = open(opts['pid_file'], 'w')
561 fp.write(str(os.getpid()) + '\n')
561 fp.write(str(os.getpid()) + '\n')
562 fp.close()
562 fp.close()
563
563
564 if opts['daemon_pipefds']:
564 if opts['daemon_pipefds']:
565 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
565 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
566 os.close(rfd)
566 os.close(rfd)
567 try:
567 try:
568 os.setsid()
568 os.setsid()
569 except AttributeError:
569 except AttributeError:
570 pass
570 pass
571 os.write(wfd, 'y')
571 os.write(wfd, 'y')
572 os.close(wfd)
572 os.close(wfd)
573 sys.stdout.flush()
573 sys.stdout.flush()
574 sys.stderr.flush()
574 sys.stderr.flush()
575 fd = os.open(util.nulldev, os.O_RDWR)
575 fd = os.open(util.nulldev, os.O_RDWR)
576 if fd != 0: os.dup2(fd, 0)
576 if fd != 0: os.dup2(fd, 0)
577 if fd != 1: os.dup2(fd, 1)
577 if fd != 1: os.dup2(fd, 1)
578 if fd != 2: os.dup2(fd, 2)
578 if fd != 2: os.dup2(fd, 2)
579 if fd not in (0, 1, 2): os.close(fd)
579 if fd not in (0, 1, 2): os.close(fd)
580
580
581 if runfn:
581 if runfn:
582 return runfn()
582 return runfn()
583
583
584 class changeset_printer(object):
584 class changeset_printer(object):
585 '''show changeset information when templating not requested.'''
585 '''show changeset information when templating not requested.'''
586
586
587 def __init__(self, ui, repo, patch, diffopts, buffered):
587 def __init__(self, ui, repo, patch, diffopts, buffered):
588 self.ui = ui
588 self.ui = ui
589 self.repo = repo
589 self.repo = repo
590 self.buffered = buffered
590 self.buffered = buffered
591 self.patch = patch
591 self.patch = patch
592 self.diffopts = diffopts
592 self.diffopts = diffopts
593 self.header = {}
593 self.header = {}
594 self.hunk = {}
594 self.hunk = {}
595 self.lastheader = None
595 self.lastheader = None
596
596
597 def flush(self, rev):
597 def flush(self, rev):
598 if rev in self.header:
598 if rev in self.header:
599 h = self.header[rev]
599 h = self.header[rev]
600 if h != self.lastheader:
600 if h != self.lastheader:
601 self.lastheader = h
601 self.lastheader = h
602 self.ui.write(h)
602 self.ui.write(h)
603 del self.header[rev]
603 del self.header[rev]
604 if rev in self.hunk:
604 if rev in self.hunk:
605 self.ui.write(self.hunk[rev])
605 self.ui.write(self.hunk[rev])
606 del self.hunk[rev]
606 del self.hunk[rev]
607 return 1
607 return 1
608 return 0
608 return 0
609
609
610 def show(self, ctx, copies=(), **props):
610 def show(self, ctx, copies=(), **props):
611 if self.buffered:
611 if self.buffered:
612 self.ui.pushbuffer()
612 self.ui.pushbuffer()
613 self._show(ctx, copies, props)
613 self._show(ctx, copies, props)
614 self.hunk[ctx.rev()] = self.ui.popbuffer()
614 self.hunk[ctx.rev()] = self.ui.popbuffer()
615 else:
615 else:
616 self._show(ctx, copies, props)
616 self._show(ctx, copies, props)
617
617
618 def _show(self, ctx, copies, props):
618 def _show(self, ctx, copies, props):
619 '''show a single changeset or file revision'''
619 '''show a single changeset or file revision'''
620 changenode = ctx.node()
620 changenode = ctx.node()
621 rev = ctx.rev()
621 rev = ctx.rev()
622
622
623 if self.ui.quiet:
623 if self.ui.quiet:
624 self.ui.write("%d:%s\n" % (rev, short(changenode)))
624 self.ui.write("%d:%s\n" % (rev, short(changenode)))
625 return
625 return
626
626
627 log = self.repo.changelog
627 log = self.repo.changelog
628 changes = log.read(changenode)
628 changes = log.read(changenode)
629 date = util.datestr(changes[2])
629 date = util.datestr(changes[2])
630 extra = changes[5]
630 extra = changes[5]
631 branch = extra.get("branch")
631 branch = extra.get("branch")
632
632
633 hexfunc = self.ui.debugflag and hex or short
633 hexfunc = self.ui.debugflag and hex or short
634
634
635 parents = [(p, hexfunc(log.node(p)))
635 parents = [(p, hexfunc(log.node(p)))
636 for p in self._meaningful_parentrevs(log, rev)]
636 for p in self._meaningful_parentrevs(log, rev)]
637
637
638 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
638 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
639
639
640 # don't show the default branch name
640 # don't show the default branch name
641 if branch != 'default':
641 if branch != 'default':
642 branch = encoding.tolocal(branch)
642 branch = encoding.tolocal(branch)
643 self.ui.write(_("branch: %s\n") % branch)
643 self.ui.write(_("branch: %s\n") % branch)
644 for tag in self.repo.nodetags(changenode):
644 for tag in self.repo.nodetags(changenode):
645 self.ui.write(_("tag: %s\n") % tag)
645 self.ui.write(_("tag: %s\n") % tag)
646 for parent in parents:
646 for parent in parents:
647 self.ui.write(_("parent: %d:%s\n") % parent)
647 self.ui.write(_("parent: %d:%s\n") % parent)
648
648
649 if self.ui.debugflag:
649 if self.ui.debugflag:
650 self.ui.write(_("manifest: %d:%s\n") %
650 self.ui.write(_("manifest: %d:%s\n") %
651 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
651 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
652 self.ui.write(_("user: %s\n") % changes[1])
652 self.ui.write(_("user: %s\n") % changes[1])
653 self.ui.write(_("date: %s\n") % date)
653 self.ui.write(_("date: %s\n") % date)
654
654
655 if self.ui.debugflag:
655 if self.ui.debugflag:
656 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
656 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
657 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
657 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
658 files):
658 files):
659 if value:
659 if value:
660 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
660 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
661 elif changes[3] and self.ui.verbose:
661 elif changes[3] and self.ui.verbose:
662 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
662 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
663 if copies and self.ui.verbose:
663 if copies and self.ui.verbose:
664 copies = ['%s (%s)' % c for c in copies]
664 copies = ['%s (%s)' % c for c in copies]
665 self.ui.write(_("copies: %s\n") % ' '.join(copies))
665 self.ui.write(_("copies: %s\n") % ' '.join(copies))
666
666
667 if extra and self.ui.debugflag:
667 if extra and self.ui.debugflag:
668 for key, value in sorted(extra.items()):
668 for key, value in sorted(extra.items()):
669 self.ui.write(_("extra: %s=%s\n")
669 self.ui.write(_("extra: %s=%s\n")
670 % (key, value.encode('string_escape')))
670 % (key, value.encode('string_escape')))
671
671
672 description = changes[4].strip()
672 description = changes[4].strip()
673 if description:
673 if description:
674 if self.ui.verbose:
674 if self.ui.verbose:
675 self.ui.write(_("description:\n"))
675 self.ui.write(_("description:\n"))
676 self.ui.write(description)
676 self.ui.write(description)
677 self.ui.write("\n\n")
677 self.ui.write("\n\n")
678 else:
678 else:
679 self.ui.write(_("summary: %s\n") %
679 self.ui.write(_("summary: %s\n") %
680 description.splitlines()[0])
680 description.splitlines()[0])
681 self.ui.write("\n")
681 self.ui.write("\n")
682
682
683 self.showpatch(changenode)
683 self.showpatch(changenode)
684
684
685 def showpatch(self, node):
685 def showpatch(self, node):
686 if self.patch:
686 if self.patch:
687 prev = self.repo.changelog.parents(node)[0]
687 prev = self.repo.changelog.parents(node)[0]
688 chunks = patch.diff(self.repo, prev, node, match=self.patch,
688 chunks = patch.diff(self.repo, prev, node, match=self.patch,
689 opts=patch.diffopts(self.ui, self.diffopts))
689 opts=patch.diffopts(self.ui, self.diffopts))
690 for chunk in chunks:
690 for chunk in chunks:
691 self.ui.write(chunk)
691 self.ui.write(chunk)
692 self.ui.write("\n")
692 self.ui.write("\n")
693
693
694 def _meaningful_parentrevs(self, log, rev):
694 def _meaningful_parentrevs(self, log, rev):
695 """Return list of meaningful (or all if debug) parentrevs for rev.
695 """Return list of meaningful (or all if debug) parentrevs for rev.
696
696
697 For merges (two non-nullrev revisions) both parents are meaningful.
697 For merges (two non-nullrev revisions) both parents are meaningful.
698 Otherwise the first parent revision is considered meaningful if it
698 Otherwise the first parent revision is considered meaningful if it
699 is not the preceding revision.
699 is not the preceding revision.
700 """
700 """
701 parents = log.parentrevs(rev)
701 parents = log.parentrevs(rev)
702 if not self.ui.debugflag and parents[1] == nullrev:
702 if not self.ui.debugflag and parents[1] == nullrev:
703 if parents[0] >= rev - 1:
703 if parents[0] >= rev - 1:
704 parents = []
704 parents = []
705 else:
705 else:
706 parents = [parents[0]]
706 parents = [parents[0]]
707 return parents
707 return parents
708
708
709
709
710 class changeset_templater(changeset_printer):
710 class changeset_templater(changeset_printer):
711 '''format changeset information.'''
711 '''format changeset information.'''
712
712
713 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
713 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
714 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
714 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
715 filters = templatefilters.filters.copy()
715 filters = templatefilters.filters.copy()
716 filters['formatnode'] = (ui.debugflag and (lambda x: x)
716 filters['formatnode'] = (ui.debugflag and (lambda x: x)
717 or (lambda x: x[:12]))
717 or (lambda x: x[:12]))
718 self.t = templater.templater(mapfile, filters,
718 self.t = templater.templater(mapfile, filters,
719 cache={
719 cache={
720 'parent': '{rev}:{node|formatnode} ',
720 'parent': '{rev}:{node|formatnode} ',
721 'manifest': '{rev}:{node|formatnode}',
721 'manifest': '{rev}:{node|formatnode}',
722 'filecopy': '{name} ({source})'})
722 'filecopy': '{name} ({source})'})
723
723
724 def use_template(self, t):
724 def use_template(self, t):
725 '''set template string to use'''
725 '''set template string to use'''
726 self.t.cache['changeset'] = t
726 self.t.cache['changeset'] = t
727
727
728 def _meaningful_parentrevs(self, ctx):
728 def _meaningful_parentrevs(self, ctx):
729 """Return list of meaningful (or all if debug) parentrevs for rev.
729 """Return list of meaningful (or all if debug) parentrevs for rev.
730 """
730 """
731 parents = ctx.parents()
731 parents = ctx.parents()
732 if len(parents) > 1:
732 if len(parents) > 1:
733 return parents
733 return parents
734 if self.ui.debugflag:
734 if self.ui.debugflag:
735 return [parents[0], self.repo['null']]
735 return [parents[0], self.repo['null']]
736 if parents[0].rev() >= ctx.rev() - 1:
736 if parents[0].rev() >= ctx.rev() - 1:
737 return []
737 return []
738 return parents
738 return parents
739
739
740 def _show(self, ctx, copies, props):
740 def _show(self, ctx, copies, props):
741 '''show a single changeset or file revision'''
741 '''show a single changeset or file revision'''
742
742
743 def showlist(name, values, plural=None, **args):
743 def showlist(name, values, plural=None, **args):
744 '''expand set of values.
744 '''expand set of values.
745 name is name of key in template map.
745 name is name of key in template map.
746 values is list of strings or dicts.
746 values is list of strings or dicts.
747 plural is plural of name, if not simply name + 's'.
747 plural is plural of name, if not simply name + 's'.
748
748
749 expansion works like this, given name 'foo'.
749 expansion works like this, given name 'foo'.
750
750
751 if values is empty, expand 'no_foos'.
751 if values is empty, expand 'no_foos'.
752
752
753 if 'foo' not in template map, return values as a string,
753 if 'foo' not in template map, return values as a string,
754 joined by space.
754 joined by space.
755
755
756 expand 'start_foos'.
756 expand 'start_foos'.
757
757
758 for each value, expand 'foo'. if 'last_foo' in template
758 for each value, expand 'foo'. if 'last_foo' in template
759 map, expand it instead of 'foo' for last key.
759 map, expand it instead of 'foo' for last key.
760
760
761 expand 'end_foos'.
761 expand 'end_foos'.
762 '''
762 '''
763 if plural: names = plural
763 if plural: names = plural
764 else: names = name + 's'
764 else: names = name + 's'
765 if not values:
765 if not values:
766 noname = 'no_' + names
766 noname = 'no_' + names
767 if noname in self.t:
767 if noname in self.t:
768 yield self.t(noname, **args)
768 yield self.t(noname, **args)
769 return
769 return
770 if name not in self.t:
770 if name not in self.t:
771 if isinstance(values[0], str):
771 if isinstance(values[0], str):
772 yield ' '.join(values)
772 yield ' '.join(values)
773 else:
773 else:
774 for v in values:
774 for v in values:
775 yield dict(v, **args)
775 yield dict(v, **args)
776 return
776 return
777 startname = 'start_' + names
777 startname = 'start_' + names
778 if startname in self.t:
778 if startname in self.t:
779 yield self.t(startname, **args)
779 yield self.t(startname, **args)
780 vargs = args.copy()
780 vargs = args.copy()
781 def one(v, tag=name):
781 def one(v, tag=name):
782 try:
782 try:
783 vargs.update(v)
783 vargs.update(v)
784 except (AttributeError, ValueError):
784 except (AttributeError, ValueError):
785 try:
785 try:
786 for a, b in v:
786 for a, b in v:
787 vargs[a] = b
787 vargs[a] = b
788 except ValueError:
788 except ValueError:
789 vargs[name] = v
789 vargs[name] = v
790 return self.t(tag, **vargs)
790 return self.t(tag, **vargs)
791 lastname = 'last_' + name
791 lastname = 'last_' + name
792 if lastname in self.t:
792 if lastname in self.t:
793 last = values.pop()
793 last = values.pop()
794 else:
794 else:
795 last = None
795 last = None
796 for v in values:
796 for v in values:
797 yield one(v)
797 yield one(v)
798 if last is not None:
798 if last is not None:
799 yield one(last, tag=lastname)
799 yield one(last, tag=lastname)
800 endname = 'end_' + names
800 endname = 'end_' + names
801 if endname in self.t:
801 if endname in self.t:
802 yield self.t(endname, **args)
802 yield self.t(endname, **args)
803
803
804 def showbranches(**args):
804 def showbranches(**args):
805 branch = ctx.branch()
805 branch = ctx.branch()
806 if branch != 'default':
806 if branch != 'default':
807 branch = encoding.tolocal(branch)
807 branch = encoding.tolocal(branch)
808 return showlist('branch', [branch], plural='branches', **args)
808 return showlist('branch', [branch], plural='branches', **args)
809
809
810 def showparents(**args):
810 def showparents(**args):
811 parents = [[('rev', p.rev()), ('node', p.hex())]
811 parents = [[('rev', p.rev()), ('node', p.hex())]
812 for p in self._meaningful_parentrevs(ctx)]
812 for p in self._meaningful_parentrevs(ctx)]
813 return showlist('parent', parents, **args)
813 return showlist('parent', parents, **args)
814
814
815 def showtags(**args):
815 def showtags(**args):
816 return showlist('tag', ctx.tags(), **args)
816 return showlist('tag', ctx.tags(), **args)
817
817
818 def showextras(**args):
818 def showextras(**args):
819 for key, value in sorted(ctx.extra().items()):
819 for key, value in sorted(ctx.extra().items()):
820 args = args.copy()
820 args = args.copy()
821 args.update(dict(key=key, value=value))
821 args.update(dict(key=key, value=value))
822 yield self.t('extra', **args)
822 yield self.t('extra', **args)
823
823
824 def showcopies(**args):
824 def showcopies(**args):
825 c = [{'name': x[0], 'source': x[1]} for x in copies]
825 c = [{'name': x[0], 'source': x[1]} for x in copies]
826 return showlist('file_copy', c, plural='file_copies', **args)
826 return showlist('file_copy', c, plural='file_copies', **args)
827
827
828 files = []
828 files = []
829 def getfiles():
829 def getfiles():
830 if not files:
830 if not files:
831 files[:] = self.repo.status(ctx.parents()[0].node(),
831 files[:] = self.repo.status(ctx.parents()[0].node(),
832 ctx.node())[:3]
832 ctx.node())[:3]
833 return files
833 return files
834 def showfiles(**args):
834 def showfiles(**args):
835 return showlist('file', ctx.files(), **args)
835 return showlist('file', ctx.files(), **args)
836 def showmods(**args):
836 def showmods(**args):
837 return showlist('file_mod', getfiles()[0], **args)
837 return showlist('file_mod', getfiles()[0], **args)
838 def showadds(**args):
838 def showadds(**args):
839 return showlist('file_add', getfiles()[1], **args)
839 return showlist('file_add', getfiles()[1], **args)
840 def showdels(**args):
840 def showdels(**args):
841 return showlist('file_del', getfiles()[2], **args)
841 return showlist('file_del', getfiles()[2], **args)
842 def showmanifest(**args):
842 def showmanifest(**args):
843 args = args.copy()
843 args = args.copy()
844 args.update(dict(rev=self.repo.manifest.rev(ctx.changeset()[0]),
844 args.update(dict(rev=self.repo.manifest.rev(ctx.changeset()[0]),
845 node=hex(ctx.changeset()[0])))
845 node=hex(ctx.changeset()[0])))
846 return self.t('manifest', **args)
846 return self.t('manifest', **args)
847
847
848 def showdiffstat(**args):
848 def showdiffstat(**args):
849 diff = patch.diff(self.repo, ctx.parents()[0].node(), ctx.node())
849 diff = patch.diff(self.repo, ctx.parents()[0].node(), ctx.node())
850 files, adds, removes = 0, 0, 0
850 files, adds, removes = 0, 0, 0
851 for i in patch.diffstatdata(util.iterlines(diff)):
851 for i in patch.diffstatdata(util.iterlines(diff)):
852 files += 1
852 files += 1
853 adds += i[1]
853 adds += i[1]
854 removes += i[2]
854 removes += i[2]
855 return '%s: +%s/-%s' % (files, adds, removes)
855 return '%s: +%s/-%s' % (files, adds, removes)
856
856
857 defprops = {
857 defprops = {
858 'author': ctx.user(),
858 'author': ctx.user(),
859 'branches': showbranches,
859 'branches': showbranches,
860 'date': ctx.date(),
860 'date': ctx.date(),
861 'desc': ctx.description().strip(),
861 'desc': ctx.description().strip(),
862 'file_adds': showadds,
862 'file_adds': showadds,
863 'file_dels': showdels,
863 'file_dels': showdels,
864 'file_mods': showmods,
864 'file_mods': showmods,
865 'files': showfiles,
865 'files': showfiles,
866 'file_copies': showcopies,
866 'file_copies': showcopies,
867 'manifest': showmanifest,
867 'manifest': showmanifest,
868 'node': ctx.hex(),
868 'node': ctx.hex(),
869 'parents': showparents,
869 'parents': showparents,
870 'rev': ctx.rev(),
870 'rev': ctx.rev(),
871 'tags': showtags,
871 'tags': showtags,
872 'extras': showextras,
872 'extras': showextras,
873 'diffstat': showdiffstat,
873 'diffstat': showdiffstat,
874 }
874 }
875 props = props.copy()
875 props = props.copy()
876 props.update(defprops)
876 props.update(defprops)
877
877
878 # find correct templates for current mode
878 # find correct templates for current mode
879
879
880 tmplmodes = [
880 tmplmodes = [
881 (True, None),
881 (True, None),
882 (self.ui.verbose, 'verbose'),
882 (self.ui.verbose, 'verbose'),
883 (self.ui.quiet, 'quiet'),
883 (self.ui.quiet, 'quiet'),
884 (self.ui.debugflag, 'debug'),
884 (self.ui.debugflag, 'debug'),
885 ]
885 ]
886
886
887 types = {'header': '', 'changeset': 'changeset'}
887 types = {'header': '', 'changeset': 'changeset'}
888 for mode, postfix in tmplmodes:
888 for mode, postfix in tmplmodes:
889 for type in types:
889 for type in types:
890 cur = postfix and ('%s_%s' % (type, postfix)) or type
890 cur = postfix and ('%s_%s' % (type, postfix)) or type
891 if mode and cur in self.t:
891 if mode and cur in self.t:
892 types[type] = cur
892 types[type] = cur
893
893
894 try:
894 try:
895
895
896 # write header
896 # write header
897 if types['header']:
897 if types['header']:
898 h = templater.stringify(self.t(types['header'], **props))
898 h = templater.stringify(self.t(types['header'], **props))
899 if self.buffered:
899 if self.buffered:
900 self.header[ctx.rev()] = h
900 self.header[ctx.rev()] = h
901 else:
901 else:
902 self.ui.write(h)
902 self.ui.write(h)
903
903
904 # write changeset metadata, then patch if requested
904 # write changeset metadata, then patch if requested
905 key = types['changeset']
905 key = types['changeset']
906 self.ui.write(templater.stringify(self.t(key, **props)))
906 self.ui.write(templater.stringify(self.t(key, **props)))
907 self.showpatch(ctx.node())
907 self.showpatch(ctx.node())
908
908
909 except KeyError, inst:
909 except KeyError, inst:
910 msg = _("%s: no key named '%s'")
910 msg = _("%s: no key named '%s'")
911 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
911 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
912 except SyntaxError, inst:
912 except SyntaxError, inst:
913 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
913 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
914
914
915 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
915 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
916 """show one changeset using template or regular display.
916 """show one changeset using template or regular display.
917
917
918 Display format will be the first non-empty hit of:
918 Display format will be the first non-empty hit of:
919 1. option 'template'
919 1. option 'template'
920 2. option 'style'
920 2. option 'style'
921 3. [ui] setting 'logtemplate'
921 3. [ui] setting 'logtemplate'
922 4. [ui] setting 'style'
922 4. [ui] setting 'style'
923 If all of these values are either the unset or the empty string,
923 If all of these values are either the unset or the empty string,
924 regular display via changeset_printer() is done.
924 regular display via changeset_printer() is done.
925 """
925 """
926 # options
926 # options
927 patch = False
927 patch = False
928 if opts.get('patch'):
928 if opts.get('patch'):
929 patch = matchfn or matchall(repo)
929 patch = matchfn or matchall(repo)
930
930
931 tmpl = opts.get('template')
931 tmpl = opts.get('template')
932 style = None
932 style = None
933 if tmpl:
933 if tmpl:
934 tmpl = templater.parsestring(tmpl, quoted=False)
934 tmpl = templater.parsestring(tmpl, quoted=False)
935 else:
935 else:
936 style = opts.get('style')
936 style = opts.get('style')
937
937
938 # ui settings
938 # ui settings
939 if not (tmpl or style):
939 if not (tmpl or style):
940 tmpl = ui.config('ui', 'logtemplate')
940 tmpl = ui.config('ui', 'logtemplate')
941 if tmpl:
941 if tmpl:
942 tmpl = templater.parsestring(tmpl)
942 tmpl = templater.parsestring(tmpl)
943 else:
943 else:
944 style = ui.config('ui', 'style')
944 style = ui.config('ui', 'style')
945
945
946 if not (tmpl or style):
946 if not (tmpl or style):
947 return changeset_printer(ui, repo, patch, opts, buffered)
947 return changeset_printer(ui, repo, patch, opts, buffered)
948
948
949 mapfile = None
949 mapfile = None
950 if style and not tmpl:
950 if style and not tmpl:
951 mapfile = style
951 mapfile = style
952 if not os.path.split(mapfile)[0]:
952 if not os.path.split(mapfile)[0]:
953 mapname = (templater.templatepath('map-cmdline.' + mapfile)
953 mapname = (templater.templatepath('map-cmdline.' + mapfile)
954 or templater.templatepath(mapfile))
954 or templater.templatepath(mapfile))
955 if mapname: mapfile = mapname
955 if mapname: mapfile = mapname
956
956
957 try:
957 try:
958 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
958 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
959 except SyntaxError, inst:
959 except SyntaxError, inst:
960 raise util.Abort(inst.args[0])
960 raise util.Abort(inst.args[0])
961 if tmpl: t.use_template(tmpl)
961 if tmpl: t.use_template(tmpl)
962 return t
962 return t
963
963
964 def finddate(ui, repo, date):
964 def finddate(ui, repo, date):
965 """Find the tipmost changeset that matches the given date spec"""
965 """Find the tipmost changeset that matches the given date spec"""
966 df = util.matchdate(date)
966 df = util.matchdate(date)
967 get = util.cachefunc(lambda r: repo[r].changeset())
967 get = util.cachefunc(lambda r: repo[r].changeset())
968 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
968 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
969 results = {}
969 results = {}
970 for st, rev, fns in changeiter:
970 for st, rev, fns in changeiter:
971 if st == 'add':
971 if st == 'add':
972 d = get(rev)[2]
972 d = get(rev)[2]
973 if df(d[0]):
973 if df(d[0]):
974 results[rev] = d
974 results[rev] = d
975 elif st == 'iter':
975 elif st == 'iter':
976 if rev in results:
976 if rev in results:
977 ui.status(_("Found revision %s from %s\n") %
977 ui.status(_("Found revision %s from %s\n") %
978 (rev, util.datestr(results[rev])))
978 (rev, util.datestr(results[rev])))
979 return str(rev)
979 return str(rev)
980
980
981 raise util.Abort(_("revision matching date not found"))
981 raise util.Abort(_("revision matching date not found"))
982
982
983 def walkchangerevs(ui, repo, pats, change, opts):
983 def walkchangerevs(ui, repo, pats, change, opts):
984 '''Iterate over files and the revs in which they changed.
984 '''Iterate over files and the revs in which they changed.
985
985
986 Callers most commonly need to iterate backwards over the history
986 Callers most commonly need to iterate backwards over the history
987 in which they are interested. Doing so has awful (quadratic-looking)
987 in which they are interested. Doing so has awful (quadratic-looking)
988 performance, so we use iterators in a "windowed" way.
988 performance, so we use iterators in a "windowed" way.
989
989
990 We walk a window of revisions in the desired order. Within the
990 We walk a window of revisions in the desired order. Within the
991 window, we first walk forwards to gather data, then in the desired
991 window, we first walk forwards to gather data, then in the desired
992 order (usually backwards) to display it.
992 order (usually backwards) to display it.
993
993
994 This function returns an (iterator, matchfn) tuple. The iterator
994 This function returns an (iterator, matchfn) tuple. The iterator
995 yields 3-tuples. They will be of one of the following forms:
995 yields 3-tuples. They will be of one of the following forms:
996
996
997 "window", incrementing, lastrev: stepping through a window,
997 "window", incrementing, lastrev: stepping through a window,
998 positive if walking forwards through revs, last rev in the
998 positive if walking forwards through revs, last rev in the
999 sequence iterated over - use to reset state for the current window
999 sequence iterated over - use to reset state for the current window
1000
1000
1001 "add", rev, fns: out-of-order traversal of the given file names
1001 "add", rev, fns: out-of-order traversal of the given file names
1002 fns, which changed during revision rev - use to gather data for
1002 fns, which changed during revision rev - use to gather data for
1003 possible display
1003 possible display
1004
1004
1005 "iter", rev, None: in-order traversal of the revs earlier iterated
1005 "iter", rev, None: in-order traversal of the revs earlier iterated
1006 over with "add" - use to display data'''
1006 over with "add" - use to display data'''
1007
1007
1008 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1008 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1009 if start < end:
1009 if start < end:
1010 while start < end:
1010 while start < end:
1011 yield start, min(windowsize, end-start)
1011 yield start, min(windowsize, end-start)
1012 start += windowsize
1012 start += windowsize
1013 if windowsize < sizelimit:
1013 if windowsize < sizelimit:
1014 windowsize *= 2
1014 windowsize *= 2
1015 else:
1015 else:
1016 while start > end:
1016 while start > end:
1017 yield start, min(windowsize, start-end-1)
1017 yield start, min(windowsize, start-end-1)
1018 start -= windowsize
1018 start -= windowsize
1019 if windowsize < sizelimit:
1019 if windowsize < sizelimit:
1020 windowsize *= 2
1020 windowsize *= 2
1021
1021
1022 m = match(repo, pats, opts)
1022 m = match(repo, pats, opts)
1023 follow = opts.get('follow') or opts.get('follow_first')
1023 follow = opts.get('follow') or opts.get('follow_first')
1024
1024
1025 if not len(repo):
1025 if not len(repo):
1026 return [], m
1026 return [], m
1027
1027
1028 if follow:
1028 if follow:
1029 defrange = '%s:0' % repo['.'].rev()
1029 defrange = '%s:0' % repo['.'].rev()
1030 else:
1030 else:
1031 defrange = '-1:0'
1031 defrange = '-1:0'
1032 revs = revrange(repo, opts['rev'] or [defrange])
1032 revs = revrange(repo, opts['rev'] or [defrange])
1033 wanted = set()
1033 wanted = set()
1034 slowpath = m.anypats() or (m.files() and opts.get('removed'))
1034 slowpath = m.anypats() or (m.files() and opts.get('removed'))
1035 fncache = {}
1035 fncache = {}
1036
1036
1037 if not slowpath and not m.files():
1037 if not slowpath and not m.files():
1038 # No files, no patterns. Display all revs.
1038 # No files, no patterns. Display all revs.
1039 wanted = set(revs)
1039 wanted = set(revs)
1040 copies = []
1040 copies = []
1041 if not slowpath:
1041 if not slowpath:
1042 # Only files, no patterns. Check the history of each file.
1042 # Only files, no patterns. Check the history of each file.
1043 def filerevgen(filelog, node):
1043 def filerevgen(filelog, node):
1044 cl_count = len(repo)
1044 cl_count = len(repo)
1045 if node is None:
1045 if node is None:
1046 last = len(filelog) - 1
1046 last = len(filelog) - 1
1047 else:
1047 else:
1048 last = filelog.rev(node)
1048 last = filelog.rev(node)
1049 for i, window in increasing_windows(last, nullrev):
1049 for i, window in increasing_windows(last, nullrev):
1050 revs = []
1050 revs = []
1051 for j in xrange(i - window, i + 1):
1051 for j in xrange(i - window, i + 1):
1052 n = filelog.node(j)
1052 n = filelog.node(j)
1053 revs.append((filelog.linkrev(j),
1053 revs.append((filelog.linkrev(j),
1054 follow and filelog.renamed(n)))
1054 follow and filelog.renamed(n)))
1055 for rev in reversed(revs):
1055 for rev in reversed(revs):
1056 # only yield rev for which we have the changelog, it can
1056 # only yield rev for which we have the changelog, it can
1057 # happen while doing "hg log" during a pull or commit
1057 # happen while doing "hg log" during a pull or commit
1058 if rev[0] < cl_count:
1058 if rev[0] < cl_count:
1059 yield rev
1059 yield rev
1060 def iterfiles():
1060 def iterfiles():
1061 for filename in m.files():
1061 for filename in m.files():
1062 yield filename, None
1062 yield filename, None
1063 for filename_node in copies:
1063 for filename_node in copies:
1064 yield filename_node
1064 yield filename_node
1065 minrev, maxrev = min(revs), max(revs)
1065 minrev, maxrev = min(revs), max(revs)
1066 for file_, node in iterfiles():
1066 for file_, node in iterfiles():
1067 filelog = repo.file(file_)
1067 filelog = repo.file(file_)
1068 if not len(filelog):
1068 if not len(filelog):
1069 if node is None:
1069 if node is None:
1070 # A zero count may be a directory or deleted file, so
1070 # A zero count may be a directory or deleted file, so
1071 # try to find matching entries on the slow path.
1071 # try to find matching entries on the slow path.
1072 if follow:
1072 if follow:
1073 raise util.Abort(_('cannot follow nonexistent file: "%s"') % file_)
1073 raise util.Abort(_('cannot follow nonexistent file: "%s"') % file_)
1074 slowpath = True
1074 slowpath = True
1075 break
1075 break
1076 else:
1076 else:
1077 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1077 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1078 % (file_, short(node)))
1078 % (file_, short(node)))
1079 continue
1079 continue
1080 for rev, copied in filerevgen(filelog, node):
1080 for rev, copied in filerevgen(filelog, node):
1081 if rev <= maxrev:
1081 if rev <= maxrev:
1082 if rev < minrev:
1082 if rev < minrev:
1083 break
1083 break
1084 fncache.setdefault(rev, [])
1084 fncache.setdefault(rev, [])
1085 fncache[rev].append(file_)
1085 fncache[rev].append(file_)
1086 wanted.add(rev)
1086 wanted.add(rev)
1087 if follow and copied:
1087 if follow and copied:
1088 copies.append(copied)
1088 copies.append(copied)
1089 if slowpath:
1089 if slowpath:
1090 if follow:
1090 if follow:
1091 raise util.Abort(_('can only follow copies/renames for explicit '
1091 raise util.Abort(_('can only follow copies/renames for explicit '
1092 'file names'))
1092 'file names'))
1093
1093
1094 # The slow path checks files modified in every changeset.
1094 # The slow path checks files modified in every changeset.
1095 def changerevgen():
1095 def changerevgen():
1096 for i, window in increasing_windows(len(repo) - 1, nullrev):
1096 for i, window in increasing_windows(len(repo) - 1, nullrev):
1097 for j in xrange(i - window, i + 1):
1097 for j in xrange(i - window, i + 1):
1098 yield j, change(j)[3]
1098 yield j, change(j)[3]
1099
1099
1100 for rev, changefiles in changerevgen():
1100 for rev, changefiles in changerevgen():
1101 matches = filter(m, changefiles)
1101 matches = filter(m, changefiles)
1102 if matches:
1102 if matches:
1103 fncache[rev] = matches
1103 fncache[rev] = matches
1104 wanted.add(rev)
1104 wanted.add(rev)
1105
1105
1106 class followfilter:
1106 class followfilter:
1107 def __init__(self, onlyfirst=False):
1107 def __init__(self, onlyfirst=False):
1108 self.startrev = nullrev
1108 self.startrev = nullrev
1109 self.roots = []
1109 self.roots = []
1110 self.onlyfirst = onlyfirst
1110 self.onlyfirst = onlyfirst
1111
1111
1112 def match(self, rev):
1112 def match(self, rev):
1113 def realparents(rev):
1113 def realparents(rev):
1114 if self.onlyfirst:
1114 if self.onlyfirst:
1115 return repo.changelog.parentrevs(rev)[0:1]
1115 return repo.changelog.parentrevs(rev)[0:1]
1116 else:
1116 else:
1117 return filter(lambda x: x != nullrev,
1117 return filter(lambda x: x != nullrev,
1118 repo.changelog.parentrevs(rev))
1118 repo.changelog.parentrevs(rev))
1119
1119
1120 if self.startrev == nullrev:
1120 if self.startrev == nullrev:
1121 self.startrev = rev
1121 self.startrev = rev
1122 return True
1122 return True
1123
1123
1124 if rev > self.startrev:
1124 if rev > self.startrev:
1125 # forward: all descendants
1125 # forward: all descendants
1126 if not self.roots:
1126 if not self.roots:
1127 self.roots.append(self.startrev)
1127 self.roots.append(self.startrev)
1128 for parent in realparents(rev):
1128 for parent in realparents(rev):
1129 if parent in self.roots:
1129 if parent in self.roots:
1130 self.roots.append(rev)
1130 self.roots.append(rev)
1131 return True
1131 return True
1132 else:
1132 else:
1133 # backwards: all parents
1133 # backwards: all parents
1134 if not self.roots:
1134 if not self.roots:
1135 self.roots.extend(realparents(self.startrev))
1135 self.roots.extend(realparents(self.startrev))
1136 if rev in self.roots:
1136 if rev in self.roots:
1137 self.roots.remove(rev)
1137 self.roots.remove(rev)
1138 self.roots.extend(realparents(rev))
1138 self.roots.extend(realparents(rev))
1139 return True
1139 return True
1140
1140
1141 return False
1141 return False
1142
1142
1143 # it might be worthwhile to do this in the iterator if the rev range
1143 # it might be worthwhile to do this in the iterator if the rev range
1144 # is descending and the prune args are all within that range
1144 # is descending and the prune args are all within that range
1145 for rev in opts.get('prune', ()):
1145 for rev in opts.get('prune', ()):
1146 rev = repo.changelog.rev(repo.lookup(rev))
1146 rev = repo.changelog.rev(repo.lookup(rev))
1147 ff = followfilter()
1147 ff = followfilter()
1148 stop = min(revs[0], revs[-1])
1148 stop = min(revs[0], revs[-1])
1149 for x in xrange(rev, stop-1, -1):
1149 for x in xrange(rev, stop-1, -1):
1150 if ff.match(x):
1150 if ff.match(x):
1151 wanted.discard(x)
1151 wanted.discard(x)
1152
1152
1153 def iterate():
1153 def iterate():
1154 if follow and not m.files():
1154 if follow and not m.files():
1155 ff = followfilter(onlyfirst=opts.get('follow_first'))
1155 ff = followfilter(onlyfirst=opts.get('follow_first'))
1156 def want(rev):
1156 def want(rev):
1157 return ff.match(rev) and rev in wanted
1157 return ff.match(rev) and rev in wanted
1158 else:
1158 else:
1159 def want(rev):
1159 def want(rev):
1160 return rev in wanted
1160 return rev in wanted
1161
1161
1162 for i, window in increasing_windows(0, len(revs)):
1162 for i, window in increasing_windows(0, len(revs)):
1163 yield 'window', revs[0] < revs[-1], revs[-1]
1163 yield 'window', revs[0] < revs[-1], revs[-1]
1164 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1164 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1165 for rev in sorted(nrevs):
1165 for rev in sorted(nrevs):
1166 fns = fncache.get(rev)
1166 fns = fncache.get(rev)
1167 if not fns:
1167 if not fns:
1168 def fns_generator():
1168 def fns_generator():
1169 for f in change(rev)[3]:
1169 for f in change(rev)[3]:
1170 if m(f):
1170 if m(f):
1171 yield f
1171 yield f
1172 fns = fns_generator()
1172 fns = fns_generator()
1173 yield 'add', rev, fns
1173 yield 'add', rev, fns
1174 for rev in nrevs:
1174 for rev in nrevs:
1175 yield 'iter', rev, None
1175 yield 'iter', rev, None
1176 return iterate(), m
1176 return iterate(), m
1177
1177
1178 def commit(ui, repo, commitfunc, pats, opts):
1178 def commit(ui, repo, commitfunc, pats, opts):
1179 '''commit the specified files or all outstanding changes'''
1179 '''commit the specified files or all outstanding changes'''
1180 date = opts.get('date')
1180 date = opts.get('date')
1181 if date:
1181 if date:
1182 opts['date'] = util.parsedate(date)
1182 opts['date'] = util.parsedate(date)
1183 message = logmessage(opts)
1183 message = logmessage(opts)
1184
1184
1185 # extract addremove carefully -- this function can be called from a command
1185 # extract addremove carefully -- this function can be called from a command
1186 # that doesn't support addremove
1186 # that doesn't support addremove
1187 if opts.get('addremove'):
1187 if opts.get('addremove'):
1188 addremove(repo, pats, opts)
1188 addremove(repo, pats, opts)
1189
1189
1190 m = match(repo, pats, opts)
1190 m = match(repo, pats, opts)
1191 if pats:
1191 if pats:
1192 modified, added, removed = repo.status(match=m)[:3]
1192 modified, added, removed = repo.status(match=m)[:3]
1193 files = sorted(modified + added + removed)
1193 files = sorted(modified + added + removed)
1194
1194
1195 def is_dir(f):
1195 def is_dir(f):
1196 name = f + '/'
1196 name = f + '/'
1197 i = bisect.bisect(files, name)
1197 i = bisect.bisect(files, name)
1198 return i < len(files) and files[i].startswith(name)
1198 return i < len(files) and files[i].startswith(name)
1199
1199
1200 for f in m.files():
1200 for f in m.files():
1201 if f == '.':
1201 if f == '.':
1202 continue
1202 continue
1203 if f not in files:
1203 if f not in files:
1204 rf = repo.wjoin(f)
1204 rf = repo.wjoin(f)
1205 rel = repo.pathto(f)
1205 rel = repo.pathto(f)
1206 try:
1206 try:
1207 mode = os.lstat(rf)[stat.ST_MODE]
1207 mode = os.lstat(rf)[stat.ST_MODE]
1208 except OSError:
1208 except OSError:
1209 if is_dir(f): # deleted directory ?
1209 if is_dir(f): # deleted directory ?
1210 continue
1210 continue
1211 raise util.Abort(_("file %s not found!") % rel)
1211 raise util.Abort(_("file %s not found!") % rel)
1212 if stat.S_ISDIR(mode):
1212 if stat.S_ISDIR(mode):
1213 if not is_dir(f):
1213 if not is_dir(f):
1214 raise util.Abort(_("no match under directory %s!")
1214 raise util.Abort(_("no match under directory %s!")
1215 % rel)
1215 % rel)
1216 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1216 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1217 raise util.Abort(_("can't commit %s: "
1217 raise util.Abort(_("can't commit %s: "
1218 "unsupported file type!") % rel)
1218 "unsupported file type!") % rel)
1219 elif f not in repo.dirstate:
1219 elif f not in repo.dirstate:
1220 raise util.Abort(_("file %s not tracked!") % rel)
1220 raise util.Abort(_("file %s not tracked!") % rel)
1221 m = matchfiles(repo, files)
1221 m = matchfiles(repo, files)
1222 try:
1222 try:
1223 return commitfunc(ui, repo, message, m, opts)
1223 return commitfunc(ui, repo, message, m, opts)
1224 except ValueError, inst:
1224 except ValueError, inst:
1225 raise util.Abort(str(inst))
1225 raise util.Abort(str(inst))
@@ -1,3468 +1,3469 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from lock import release
9 from lock import release
10 from i18n import _, gettext
10 from i18n import _, gettext
11 import os, re, sys, textwrap, subprocess
11 import os, re, sys, textwrap, subprocess, difflib, time
12 import hg, util, revlog, bundlerepo, extensions, copies, context, error
12 import hg, util, revlog, bundlerepo, extensions, copies, context, error
13 import difflib, patch, time, help, mdiff, tempfile, url, encoding
13 import patch, help, mdiff, tempfile, url, encoding
14 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
14 import archival, changegroup, cmdutil, sshserver, hbisect
15 from hgweb import server
15 import merge as merge_
16 import merge as merge_
16
17
17 # Commands start here, listed alphabetically
18 # Commands start here, listed alphabetically
18
19
19 def add(ui, repo, *pats, **opts):
20 def add(ui, repo, *pats, **opts):
20 """add the specified files on the next commit
21 """add the specified files on the next commit
21
22
22 Schedule files to be version controlled and added to the
23 Schedule files to be version controlled and added to the
23 repository.
24 repository.
24
25
25 The files will be added to the repository at the next commit. To
26 The files will be added to the repository at the next commit. To
26 undo an add before that, see hg revert.
27 undo an add before that, see hg revert.
27
28
28 If no names are given, add all files to the repository.
29 If no names are given, add all files to the repository.
29 """
30 """
30
31
31 rejected = None
32 rejected = None
32 exacts = {}
33 exacts = {}
33 names = []
34 names = []
34 m = cmdutil.match(repo, pats, opts)
35 m = cmdutil.match(repo, pats, opts)
35 m.bad = lambda x,y: True
36 m.bad = lambda x,y: True
36 for abs in repo.walk(m):
37 for abs in repo.walk(m):
37 if m.exact(abs):
38 if m.exact(abs):
38 if ui.verbose:
39 if ui.verbose:
39 ui.status(_('adding %s\n') % m.rel(abs))
40 ui.status(_('adding %s\n') % m.rel(abs))
40 names.append(abs)
41 names.append(abs)
41 exacts[abs] = 1
42 exacts[abs] = 1
42 elif abs not in repo.dirstate:
43 elif abs not in repo.dirstate:
43 ui.status(_('adding %s\n') % m.rel(abs))
44 ui.status(_('adding %s\n') % m.rel(abs))
44 names.append(abs)
45 names.append(abs)
45 if not opts.get('dry_run'):
46 if not opts.get('dry_run'):
46 rejected = repo.add(names)
47 rejected = repo.add(names)
47 rejected = [p for p in rejected if p in exacts]
48 rejected = [p for p in rejected if p in exacts]
48 return rejected and 1 or 0
49 return rejected and 1 or 0
49
50
50 def addremove(ui, repo, *pats, **opts):
51 def addremove(ui, repo, *pats, **opts):
51 """add all new files, delete all missing files
52 """add all new files, delete all missing files
52
53
53 Add all new files and remove all missing files from the
54 Add all new files and remove all missing files from the
54 repository.
55 repository.
55
56
56 New files are ignored if they match any of the patterns in
57 New files are ignored if they match any of the patterns in
57 .hgignore. As with add, these changes take effect at the next
58 .hgignore. As with add, these changes take effect at the next
58 commit.
59 commit.
59
60
60 Use the -s/--similarity option to detect renamed files. With a
61 Use the -s/--similarity option to detect renamed files. With a
61 parameter > 0, this compares every removed file with every added
62 parameter > 0, this compares every removed file with every added
62 file and records those similar enough as renames. This option
63 file and records those similar enough as renames. This option
63 takes a percentage between 0 (disabled) and 100 (files must be
64 takes a percentage between 0 (disabled) and 100 (files must be
64 identical) as its parameter. Detecting renamed files this way can
65 identical) as its parameter. Detecting renamed files this way can
65 be expensive.
66 be expensive.
66 """
67 """
67 try:
68 try:
68 sim = float(opts.get('similarity') or 0)
69 sim = float(opts.get('similarity') or 0)
69 except ValueError:
70 except ValueError:
70 raise util.Abort(_('similarity must be a number'))
71 raise util.Abort(_('similarity must be a number'))
71 if sim < 0 or sim > 100:
72 if sim < 0 or sim > 100:
72 raise util.Abort(_('similarity must be between 0 and 100'))
73 raise util.Abort(_('similarity must be between 0 and 100'))
73 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
74 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
74
75
75 def annotate(ui, repo, *pats, **opts):
76 def annotate(ui, repo, *pats, **opts):
76 """show changeset information per file line
77 """show changeset information per file line
77
78
78 List changes in files, showing the revision id responsible for
79 List changes in files, showing the revision id responsible for
79 each line
80 each line
80
81
81 This command is useful to discover who did a change or when a
82 This command is useful to discover who did a change or when a
82 change took place.
83 change took place.
83
84
84 Without the -a/--text option, annotate will avoid processing files
85 Without the -a/--text option, annotate will avoid processing files
85 it detects as binary. With -a, annotate will generate an
86 it detects as binary. With -a, annotate will generate an
86 annotation anyway, probably with undesirable results.
87 annotation anyway, probably with undesirable results.
87 """
88 """
88 datefunc = ui.quiet and util.shortdate or util.datestr
89 datefunc = ui.quiet and util.shortdate or util.datestr
89 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
90 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
90
91
91 if not pats:
92 if not pats:
92 raise util.Abort(_('at least one file name or pattern required'))
93 raise util.Abort(_('at least one file name or pattern required'))
93
94
94 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
95 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
95 ('number', lambda x: str(x[0].rev())),
96 ('number', lambda x: str(x[0].rev())),
96 ('changeset', lambda x: short(x[0].node())),
97 ('changeset', lambda x: short(x[0].node())),
97 ('date', getdate),
98 ('date', getdate),
98 ('follow', lambda x: x[0].path()),
99 ('follow', lambda x: x[0].path()),
99 ]
100 ]
100
101
101 if (not opts.get('user') and not opts.get('changeset') and not opts.get('date')
102 if (not opts.get('user') and not opts.get('changeset') and not opts.get('date')
102 and not opts.get('follow')):
103 and not opts.get('follow')):
103 opts['number'] = 1
104 opts['number'] = 1
104
105
105 linenumber = opts.get('line_number') is not None
106 linenumber = opts.get('line_number') is not None
106 if (linenumber and (not opts.get('changeset')) and (not opts.get('number'))):
107 if (linenumber and (not opts.get('changeset')) and (not opts.get('number'))):
107 raise util.Abort(_('at least one of -n/-c is required for -l'))
108 raise util.Abort(_('at least one of -n/-c is required for -l'))
108
109
109 funcmap = [func for op, func in opmap if opts.get(op)]
110 funcmap = [func for op, func in opmap if opts.get(op)]
110 if linenumber:
111 if linenumber:
111 lastfunc = funcmap[-1]
112 lastfunc = funcmap[-1]
112 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
113 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
113
114
114 ctx = repo[opts.get('rev')]
115 ctx = repo[opts.get('rev')]
115
116
116 m = cmdutil.match(repo, pats, opts)
117 m = cmdutil.match(repo, pats, opts)
117 for abs in ctx.walk(m):
118 for abs in ctx.walk(m):
118 fctx = ctx[abs]
119 fctx = ctx[abs]
119 if not opts.get('text') and util.binary(fctx.data()):
120 if not opts.get('text') and util.binary(fctx.data()):
120 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
121 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
121 continue
122 continue
122
123
123 lines = fctx.annotate(follow=opts.get('follow'),
124 lines = fctx.annotate(follow=opts.get('follow'),
124 linenumber=linenumber)
125 linenumber=linenumber)
125 pieces = []
126 pieces = []
126
127
127 for f in funcmap:
128 for f in funcmap:
128 l = [f(n) for n, dummy in lines]
129 l = [f(n) for n, dummy in lines]
129 if l:
130 if l:
130 ml = max(map(len, l))
131 ml = max(map(len, l))
131 pieces.append(["%*s" % (ml, x) for x in l])
132 pieces.append(["%*s" % (ml, x) for x in l])
132
133
133 if pieces:
134 if pieces:
134 for p, l in zip(zip(*pieces), lines):
135 for p, l in zip(zip(*pieces), lines):
135 ui.write("%s: %s" % (" ".join(p), l[1]))
136 ui.write("%s: %s" % (" ".join(p), l[1]))
136
137
137 def archive(ui, repo, dest, **opts):
138 def archive(ui, repo, dest, **opts):
138 '''create unversioned archive of a repository revision
139 '''create unversioned archive of a repository revision
139
140
140 By default, the revision used is the parent of the working
141 By default, the revision used is the parent of the working
141 directory; use -r/--rev to specify a different revision.
142 directory; use -r/--rev to specify a different revision.
142
143
143 To specify the type of archive to create, use -t/--type. Valid
144 To specify the type of archive to create, use -t/--type. Valid
144 types are:
145 types are:
145
146
146 "files" (default): a directory full of files
147 "files" (default): a directory full of files
147 "tar": tar archive, uncompressed
148 "tar": tar archive, uncompressed
148 "tbz2": tar archive, compressed using bzip2
149 "tbz2": tar archive, compressed using bzip2
149 "tgz": tar archive, compressed using gzip
150 "tgz": tar archive, compressed using gzip
150 "uzip": zip archive, uncompressed
151 "uzip": zip archive, uncompressed
151 "zip": zip archive, compressed using deflate
152 "zip": zip archive, compressed using deflate
152
153
153 The exact name of the destination archive or directory is given
154 The exact name of the destination archive or directory is given
154 using a format string; see 'hg help export' for details.
155 using a format string; see 'hg help export' for details.
155
156
156 Each member added to an archive file has a directory prefix
157 Each member added to an archive file has a directory prefix
157 prepended. Use -p/--prefix to specify a format string for the
158 prepended. Use -p/--prefix to specify a format string for the
158 prefix. The default is the basename of the archive, with suffixes
159 prefix. The default is the basename of the archive, with suffixes
159 removed.
160 removed.
160 '''
161 '''
161
162
162 ctx = repo[opts.get('rev')]
163 ctx = repo[opts.get('rev')]
163 if not ctx:
164 if not ctx:
164 raise util.Abort(_('no working directory: please specify a revision'))
165 raise util.Abort(_('no working directory: please specify a revision'))
165 node = ctx.node()
166 node = ctx.node()
166 dest = cmdutil.make_filename(repo, dest, node)
167 dest = cmdutil.make_filename(repo, dest, node)
167 if os.path.realpath(dest) == repo.root:
168 if os.path.realpath(dest) == repo.root:
168 raise util.Abort(_('repository root cannot be destination'))
169 raise util.Abort(_('repository root cannot be destination'))
169 matchfn = cmdutil.match(repo, [], opts)
170 matchfn = cmdutil.match(repo, [], opts)
170 kind = opts.get('type') or 'files'
171 kind = opts.get('type') or 'files'
171 prefix = opts.get('prefix')
172 prefix = opts.get('prefix')
172 if dest == '-':
173 if dest == '-':
173 if kind == 'files':
174 if kind == 'files':
174 raise util.Abort(_('cannot archive plain files to stdout'))
175 raise util.Abort(_('cannot archive plain files to stdout'))
175 dest = sys.stdout
176 dest = sys.stdout
176 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
177 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
177 prefix = cmdutil.make_filename(repo, prefix, node)
178 prefix = cmdutil.make_filename(repo, prefix, node)
178 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
179 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
179 matchfn, prefix)
180 matchfn, prefix)
180
181
181 def backout(ui, repo, node=None, rev=None, **opts):
182 def backout(ui, repo, node=None, rev=None, **opts):
182 '''reverse effect of earlier changeset
183 '''reverse effect of earlier changeset
183
184
184 Commit the backed out changes as a new changeset. The new
185 Commit the backed out changes as a new changeset. The new
185 changeset is a child of the backed out changeset.
186 changeset is a child of the backed out changeset.
186
187
187 If you back out a changeset other than the tip, a new head is
188 If you back out a changeset other than the tip, a new head is
188 created. This head will be the new tip and you should merge this
189 created. This head will be the new tip and you should merge this
189 backout changeset with another head (current one by default).
190 backout changeset with another head (current one by default).
190
191
191 The --merge option remembers the parent of the working directory
192 The --merge option remembers the parent of the working directory
192 before starting the backout, then merges the new head with that
193 before starting the backout, then merges the new head with that
193 changeset afterwards. This saves you from doing the merge by hand.
194 changeset afterwards. This saves you from doing the merge by hand.
194 The result of this merge is not committed, as with a normal merge.
195 The result of this merge is not committed, as with a normal merge.
195
196
196 See \'hg help dates\' for a list of formats valid for -d/--date.
197 See \'hg help dates\' for a list of formats valid for -d/--date.
197 '''
198 '''
198 if rev and node:
199 if rev and node:
199 raise util.Abort(_("please specify just one revision"))
200 raise util.Abort(_("please specify just one revision"))
200
201
201 if not rev:
202 if not rev:
202 rev = node
203 rev = node
203
204
204 if not rev:
205 if not rev:
205 raise util.Abort(_("please specify a revision to backout"))
206 raise util.Abort(_("please specify a revision to backout"))
206
207
207 date = opts.get('date')
208 date = opts.get('date')
208 if date:
209 if date:
209 opts['date'] = util.parsedate(date)
210 opts['date'] = util.parsedate(date)
210
211
211 cmdutil.bail_if_changed(repo)
212 cmdutil.bail_if_changed(repo)
212 node = repo.lookup(rev)
213 node = repo.lookup(rev)
213
214
214 op1, op2 = repo.dirstate.parents()
215 op1, op2 = repo.dirstate.parents()
215 a = repo.changelog.ancestor(op1, node)
216 a = repo.changelog.ancestor(op1, node)
216 if a != node:
217 if a != node:
217 raise util.Abort(_('cannot back out change on a different branch'))
218 raise util.Abort(_('cannot back out change on a different branch'))
218
219
219 p1, p2 = repo.changelog.parents(node)
220 p1, p2 = repo.changelog.parents(node)
220 if p1 == nullid:
221 if p1 == nullid:
221 raise util.Abort(_('cannot back out a change with no parents'))
222 raise util.Abort(_('cannot back out a change with no parents'))
222 if p2 != nullid:
223 if p2 != nullid:
223 if not opts.get('parent'):
224 if not opts.get('parent'):
224 raise util.Abort(_('cannot back out a merge changeset without '
225 raise util.Abort(_('cannot back out a merge changeset without '
225 '--parent'))
226 '--parent'))
226 p = repo.lookup(opts['parent'])
227 p = repo.lookup(opts['parent'])
227 if p not in (p1, p2):
228 if p not in (p1, p2):
228 raise util.Abort(_('%s is not a parent of %s') %
229 raise util.Abort(_('%s is not a parent of %s') %
229 (short(p), short(node)))
230 (short(p), short(node)))
230 parent = p
231 parent = p
231 else:
232 else:
232 if opts.get('parent'):
233 if opts.get('parent'):
233 raise util.Abort(_('cannot use --parent on non-merge changeset'))
234 raise util.Abort(_('cannot use --parent on non-merge changeset'))
234 parent = p1
235 parent = p1
235
236
236 # the backout should appear on the same branch
237 # the backout should appear on the same branch
237 branch = repo.dirstate.branch()
238 branch = repo.dirstate.branch()
238 hg.clean(repo, node, show_stats=False)
239 hg.clean(repo, node, show_stats=False)
239 repo.dirstate.setbranch(branch)
240 repo.dirstate.setbranch(branch)
240 revert_opts = opts.copy()
241 revert_opts = opts.copy()
241 revert_opts['date'] = None
242 revert_opts['date'] = None
242 revert_opts['all'] = True
243 revert_opts['all'] = True
243 revert_opts['rev'] = hex(parent)
244 revert_opts['rev'] = hex(parent)
244 revert_opts['no_backup'] = None
245 revert_opts['no_backup'] = None
245 revert(ui, repo, **revert_opts)
246 revert(ui, repo, **revert_opts)
246 commit_opts = opts.copy()
247 commit_opts = opts.copy()
247 commit_opts['addremove'] = False
248 commit_opts['addremove'] = False
248 if not commit_opts['message'] and not commit_opts['logfile']:
249 if not commit_opts['message'] and not commit_opts['logfile']:
249 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
250 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
250 commit_opts['force_editor'] = True
251 commit_opts['force_editor'] = True
251 commit(ui, repo, **commit_opts)
252 commit(ui, repo, **commit_opts)
252 def nice(node):
253 def nice(node):
253 return '%d:%s' % (repo.changelog.rev(node), short(node))
254 return '%d:%s' % (repo.changelog.rev(node), short(node))
254 ui.status(_('changeset %s backs out changeset %s\n') %
255 ui.status(_('changeset %s backs out changeset %s\n') %
255 (nice(repo.changelog.tip()), nice(node)))
256 (nice(repo.changelog.tip()), nice(node)))
256 if op1 != node:
257 if op1 != node:
257 hg.clean(repo, op1, show_stats=False)
258 hg.clean(repo, op1, show_stats=False)
258 if opts.get('merge'):
259 if opts.get('merge'):
259 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
260 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
260 hg.merge(repo, hex(repo.changelog.tip()))
261 hg.merge(repo, hex(repo.changelog.tip()))
261 else:
262 else:
262 ui.status(_('the backout changeset is a new head - '
263 ui.status(_('the backout changeset is a new head - '
263 'do not forget to merge\n'))
264 'do not forget to merge\n'))
264 ui.status(_('(use "backout --merge" '
265 ui.status(_('(use "backout --merge" '
265 'if you want to auto-merge)\n'))
266 'if you want to auto-merge)\n'))
266
267
267 def bisect(ui, repo, rev=None, extra=None, command=None,
268 def bisect(ui, repo, rev=None, extra=None, command=None,
268 reset=None, good=None, bad=None, skip=None, noupdate=None):
269 reset=None, good=None, bad=None, skip=None, noupdate=None):
269 """subdivision search of changesets
270 """subdivision search of changesets
270
271
271 This command helps to find changesets which introduce problems. To
272 This command helps to find changesets which introduce problems. To
272 use, mark the earliest changeset you know exhibits the problem as
273 use, mark the earliest changeset you know exhibits the problem as
273 bad, then mark the latest changeset which is free from the problem
274 bad, then mark the latest changeset which is free from the problem
274 as good. Bisect will update your working directory to a revision
275 as good. Bisect will update your working directory to a revision
275 for testing (unless the -U/--noupdate option is specified). Once
276 for testing (unless the -U/--noupdate option is specified). Once
276 you have performed tests, mark the working directory as bad or
277 you have performed tests, mark the working directory as bad or
277 good and bisect will either update to another candidate changeset
278 good and bisect will either update to another candidate changeset
278 or announce that it has found the bad revision.
279 or announce that it has found the bad revision.
279
280
280 As a shortcut, you can also use the revision argument to mark a
281 As a shortcut, you can also use the revision argument to mark a
281 revision as good or bad without checking it out first.
282 revision as good or bad without checking it out first.
282
283
283 If you supply a command it will be used for automatic bisection.
284 If you supply a command it will be used for automatic bisection.
284 Its exit status will be used as flag to mark revision as bad or
285 Its exit status will be used as flag to mark revision as bad or
285 good. In case exit status is 0 the revision is marked as good, 125
286 good. In case exit status is 0 the revision is marked as good, 125
286 - skipped, 127 (command not found) - bisection will be aborted;
287 - skipped, 127 (command not found) - bisection will be aborted;
287 any other status bigger than 0 will mark revision as bad.
288 any other status bigger than 0 will mark revision as bad.
288 """
289 """
289 def print_result(nodes, good):
290 def print_result(nodes, good):
290 displayer = cmdutil.show_changeset(ui, repo, {})
291 displayer = cmdutil.show_changeset(ui, repo, {})
291 if len(nodes) == 1:
292 if len(nodes) == 1:
292 # narrowed it down to a single revision
293 # narrowed it down to a single revision
293 if good:
294 if good:
294 ui.write(_("The first good revision is:\n"))
295 ui.write(_("The first good revision is:\n"))
295 else:
296 else:
296 ui.write(_("The first bad revision is:\n"))
297 ui.write(_("The first bad revision is:\n"))
297 displayer.show(repo[nodes[0]])
298 displayer.show(repo[nodes[0]])
298 else:
299 else:
299 # multiple possible revisions
300 # multiple possible revisions
300 if good:
301 if good:
301 ui.write(_("Due to skipped revisions, the first "
302 ui.write(_("Due to skipped revisions, the first "
302 "good revision could be any of:\n"))
303 "good revision could be any of:\n"))
303 else:
304 else:
304 ui.write(_("Due to skipped revisions, the first "
305 ui.write(_("Due to skipped revisions, the first "
305 "bad revision could be any of:\n"))
306 "bad revision could be any of:\n"))
306 for n in nodes:
307 for n in nodes:
307 displayer.show(repo[n])
308 displayer.show(repo[n])
308
309
309 def check_state(state, interactive=True):
310 def check_state(state, interactive=True):
310 if not state['good'] or not state['bad']:
311 if not state['good'] or not state['bad']:
311 if (good or bad or skip or reset) and interactive:
312 if (good or bad or skip or reset) and interactive:
312 return
313 return
313 if not state['good']:
314 if not state['good']:
314 raise util.Abort(_('cannot bisect (no known good revisions)'))
315 raise util.Abort(_('cannot bisect (no known good revisions)'))
315 else:
316 else:
316 raise util.Abort(_('cannot bisect (no known bad revisions)'))
317 raise util.Abort(_('cannot bisect (no known bad revisions)'))
317 return True
318 return True
318
319
319 # backward compatibility
320 # backward compatibility
320 if rev in "good bad reset init".split():
321 if rev in "good bad reset init".split():
321 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
322 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
322 cmd, rev, extra = rev, extra, None
323 cmd, rev, extra = rev, extra, None
323 if cmd == "good":
324 if cmd == "good":
324 good = True
325 good = True
325 elif cmd == "bad":
326 elif cmd == "bad":
326 bad = True
327 bad = True
327 else:
328 else:
328 reset = True
329 reset = True
329 elif extra or good + bad + skip + reset + bool(command) > 1:
330 elif extra or good + bad + skip + reset + bool(command) > 1:
330 raise util.Abort(_('incompatible arguments'))
331 raise util.Abort(_('incompatible arguments'))
331
332
332 if reset:
333 if reset:
333 p = repo.join("bisect.state")
334 p = repo.join("bisect.state")
334 if os.path.exists(p):
335 if os.path.exists(p):
335 os.unlink(p)
336 os.unlink(p)
336 return
337 return
337
338
338 state = hbisect.load_state(repo)
339 state = hbisect.load_state(repo)
339
340
340 if command:
341 if command:
341 commandpath = util.find_exe(command)
342 commandpath = util.find_exe(command)
342 changesets = 1
343 changesets = 1
343 try:
344 try:
344 while changesets:
345 while changesets:
345 # update state
346 # update state
346 status = subprocess.call([commandpath])
347 status = subprocess.call([commandpath])
347 if status == 125:
348 if status == 125:
348 transition = "skip"
349 transition = "skip"
349 elif status == 0:
350 elif status == 0:
350 transition = "good"
351 transition = "good"
351 # status < 0 means process was killed
352 # status < 0 means process was killed
352 elif status == 127:
353 elif status == 127:
353 raise util.Abort(_("failed to execute %s") % command)
354 raise util.Abort(_("failed to execute %s") % command)
354 elif status < 0:
355 elif status < 0:
355 raise util.Abort(_("%s killed") % command)
356 raise util.Abort(_("%s killed") % command)
356 else:
357 else:
357 transition = "bad"
358 transition = "bad"
358 node = repo.lookup(rev or '.')
359 node = repo.lookup(rev or '.')
359 state[transition].append(node)
360 state[transition].append(node)
360 ui.note(_('Changeset %s: %s\n') % (short(node), transition))
361 ui.note(_('Changeset %s: %s\n') % (short(node), transition))
361 check_state(state, interactive=False)
362 check_state(state, interactive=False)
362 # bisect
363 # bisect
363 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
364 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
364 # update to next check
365 # update to next check
365 cmdutil.bail_if_changed(repo)
366 cmdutil.bail_if_changed(repo)
366 hg.clean(repo, nodes[0], show_stats=False)
367 hg.clean(repo, nodes[0], show_stats=False)
367 finally:
368 finally:
368 hbisect.save_state(repo, state)
369 hbisect.save_state(repo, state)
369 return print_result(nodes, not status)
370 return print_result(nodes, not status)
370
371
371 # update state
372 # update state
372 node = repo.lookup(rev or '.')
373 node = repo.lookup(rev or '.')
373 if good:
374 if good:
374 state['good'].append(node)
375 state['good'].append(node)
375 elif bad:
376 elif bad:
376 state['bad'].append(node)
377 state['bad'].append(node)
377 elif skip:
378 elif skip:
378 state['skip'].append(node)
379 state['skip'].append(node)
379
380
380 hbisect.save_state(repo, state)
381 hbisect.save_state(repo, state)
381
382
382 if not check_state(state):
383 if not check_state(state):
383 return
384 return
384
385
385 # actually bisect
386 # actually bisect
386 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
387 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
387 if changesets == 0:
388 if changesets == 0:
388 print_result(nodes, good)
389 print_result(nodes, good)
389 else:
390 else:
390 assert len(nodes) == 1 # only a single node can be tested next
391 assert len(nodes) == 1 # only a single node can be tested next
391 node = nodes[0]
392 node = nodes[0]
392 # compute the approximate number of remaining tests
393 # compute the approximate number of remaining tests
393 tests, size = 0, 2
394 tests, size = 0, 2
394 while size <= changesets:
395 while size <= changesets:
395 tests, size = tests + 1, size * 2
396 tests, size = tests + 1, size * 2
396 rev = repo.changelog.rev(node)
397 rev = repo.changelog.rev(node)
397 ui.write(_("Testing changeset %s:%s "
398 ui.write(_("Testing changeset %s:%s "
398 "(%s changesets remaining, ~%s tests)\n")
399 "(%s changesets remaining, ~%s tests)\n")
399 % (rev, short(node), changesets, tests))
400 % (rev, short(node), changesets, tests))
400 if not noupdate:
401 if not noupdate:
401 cmdutil.bail_if_changed(repo)
402 cmdutil.bail_if_changed(repo)
402 return hg.clean(repo, node)
403 return hg.clean(repo, node)
403
404
404 def branch(ui, repo, label=None, **opts):
405 def branch(ui, repo, label=None, **opts):
405 """set or show the current branch name
406 """set or show the current branch name
406
407
407 With no argument, show the current branch name. With one argument,
408 With no argument, show the current branch name. With one argument,
408 set the working directory branch name (the branch does not exist
409 set the working directory branch name (the branch does not exist
409 in the repository until the next commit). It is recommended to use
410 in the repository until the next commit). It is recommended to use
410 the 'default' branch as your primary development branch.
411 the 'default' branch as your primary development branch.
411
412
412 Unless -f/--force is specified, branch will not let you set a
413 Unless -f/--force is specified, branch will not let you set a
413 branch name that shadows an existing branch.
414 branch name that shadows an existing branch.
414
415
415 Use -C/--clean to reset the working directory branch to that of
416 Use -C/--clean to reset the working directory branch to that of
416 the parent of the working directory, negating a previous branch
417 the parent of the working directory, negating a previous branch
417 change.
418 change.
418
419
419 Use the command 'hg update' to switch to an existing branch.
420 Use the command 'hg update' to switch to an existing branch.
420 """
421 """
421
422
422 if opts.get('clean'):
423 if opts.get('clean'):
423 label = repo[None].parents()[0].branch()
424 label = repo[None].parents()[0].branch()
424 repo.dirstate.setbranch(label)
425 repo.dirstate.setbranch(label)
425 ui.status(_('reset working directory to branch %s\n') % label)
426 ui.status(_('reset working directory to branch %s\n') % label)
426 elif label:
427 elif label:
427 if not opts.get('force') and label in repo.branchtags():
428 if not opts.get('force') and label in repo.branchtags():
428 if label not in [p.branch() for p in repo.parents()]:
429 if label not in [p.branch() for p in repo.parents()]:
429 raise util.Abort(_('a branch of the same name already exists'
430 raise util.Abort(_('a branch of the same name already exists'
430 ' (use --force to override)'))
431 ' (use --force to override)'))
431 repo.dirstate.setbranch(encoding.fromlocal(label))
432 repo.dirstate.setbranch(encoding.fromlocal(label))
432 ui.status(_('marked working directory as branch %s\n') % label)
433 ui.status(_('marked working directory as branch %s\n') % label)
433 else:
434 else:
434 ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch()))
435 ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch()))
435
436
436 def branches(ui, repo, active=False):
437 def branches(ui, repo, active=False):
437 """list repository named branches
438 """list repository named branches
438
439
439 List the repository's named branches, indicating which ones are
440 List the repository's named branches, indicating which ones are
440 inactive. If active is specified, only show active branches.
441 inactive. If active is specified, only show active branches.
441
442
442 A branch is considered active if it contains repository heads.
443 A branch is considered active if it contains repository heads.
443
444
444 Use the command 'hg update' to switch to an existing branch.
445 Use the command 'hg update' to switch to an existing branch.
445 """
446 """
446 hexfunc = ui.debugflag and hex or short
447 hexfunc = ui.debugflag and hex or short
447 activebranches = [encoding.tolocal(repo[n].branch())
448 activebranches = [encoding.tolocal(repo[n].branch())
448 for n in repo.heads(closed=False)]
449 for n in repo.heads(closed=False)]
449 branches = sorted([(tag in activebranches, repo.changelog.rev(node), tag)
450 branches = sorted([(tag in activebranches, repo.changelog.rev(node), tag)
450 for tag, node in repo.branchtags().items()],
451 for tag, node in repo.branchtags().items()],
451 reverse=True)
452 reverse=True)
452
453
453 for isactive, node, tag in branches:
454 for isactive, node, tag in branches:
454 if (not active) or isactive:
455 if (not active) or isactive:
455 if ui.quiet:
456 if ui.quiet:
456 ui.write("%s\n" % tag)
457 ui.write("%s\n" % tag)
457 else:
458 else:
458 hn = repo.lookup(node)
459 hn = repo.lookup(node)
459 if isactive:
460 if isactive:
460 notice = ''
461 notice = ''
461 elif hn not in repo.branchheads(tag, closed=False):
462 elif hn not in repo.branchheads(tag, closed=False):
462 notice = ' (closed)'
463 notice = ' (closed)'
463 else:
464 else:
464 notice = ' (inactive)'
465 notice = ' (inactive)'
465 rev = str(node).rjust(31 - encoding.colwidth(tag))
466 rev = str(node).rjust(31 - encoding.colwidth(tag))
466 data = tag, rev, hexfunc(hn), notice
467 data = tag, rev, hexfunc(hn), notice
467 ui.write("%s %s:%s%s\n" % data)
468 ui.write("%s %s:%s%s\n" % data)
468
469
469 def bundle(ui, repo, fname, dest=None, **opts):
470 def bundle(ui, repo, fname, dest=None, **opts):
470 """create a changegroup file
471 """create a changegroup file
471
472
472 Generate a compressed changegroup file collecting changesets not
473 Generate a compressed changegroup file collecting changesets not
473 known to be in another repository.
474 known to be in another repository.
474
475
475 If no destination repository is specified the destination is
476 If no destination repository is specified the destination is
476 assumed to have all the nodes specified by one or more --base
477 assumed to have all the nodes specified by one or more --base
477 parameters. To create a bundle containing all changesets, use
478 parameters. To create a bundle containing all changesets, use
478 -a/--all (or --base null). To change the compression method
479 -a/--all (or --base null). To change the compression method
479 applied, use the -t/--type option (by default, bundles are
480 applied, use the -t/--type option (by default, bundles are
480 compressed using bz2).
481 compressed using bz2).
481
482
482 The bundle file can then be transferred using conventional means
483 The bundle file can then be transferred using conventional means
483 and applied to another repository with the unbundle or pull
484 and applied to another repository with the unbundle or pull
484 command. This is useful when direct push and pull are not
485 command. This is useful when direct push and pull are not
485 available or when exporting an entire repository is undesirable.
486 available or when exporting an entire repository is undesirable.
486
487
487 Applying bundles preserves all changeset contents including
488 Applying bundles preserves all changeset contents including
488 permissions, copy/rename information, and revision history.
489 permissions, copy/rename information, and revision history.
489 """
490 """
490 revs = opts.get('rev') or None
491 revs = opts.get('rev') or None
491 if revs:
492 if revs:
492 revs = [repo.lookup(rev) for rev in revs]
493 revs = [repo.lookup(rev) for rev in revs]
493 if opts.get('all'):
494 if opts.get('all'):
494 base = ['null']
495 base = ['null']
495 else:
496 else:
496 base = opts.get('base')
497 base = opts.get('base')
497 if base:
498 if base:
498 if dest:
499 if dest:
499 raise util.Abort(_("--base is incompatible with specifiying "
500 raise util.Abort(_("--base is incompatible with specifiying "
500 "a destination"))
501 "a destination"))
501 base = [repo.lookup(rev) for rev in base]
502 base = [repo.lookup(rev) for rev in base]
502 # create the right base
503 # create the right base
503 # XXX: nodesbetween / changegroup* should be "fixed" instead
504 # XXX: nodesbetween / changegroup* should be "fixed" instead
504 o = []
505 o = []
505 has = {nullid: None}
506 has = {nullid: None}
506 for n in base:
507 for n in base:
507 has.update(repo.changelog.reachable(n))
508 has.update(repo.changelog.reachable(n))
508 if revs:
509 if revs:
509 visit = list(revs)
510 visit = list(revs)
510 else:
511 else:
511 visit = repo.changelog.heads()
512 visit = repo.changelog.heads()
512 seen = {}
513 seen = {}
513 while visit:
514 while visit:
514 n = visit.pop(0)
515 n = visit.pop(0)
515 parents = [p for p in repo.changelog.parents(n) if p not in has]
516 parents = [p for p in repo.changelog.parents(n) if p not in has]
516 if len(parents) == 0:
517 if len(parents) == 0:
517 o.insert(0, n)
518 o.insert(0, n)
518 else:
519 else:
519 for p in parents:
520 for p in parents:
520 if p not in seen:
521 if p not in seen:
521 seen[p] = 1
522 seen[p] = 1
522 visit.append(p)
523 visit.append(p)
523 else:
524 else:
524 dest, revs, checkout = hg.parseurl(
525 dest, revs, checkout = hg.parseurl(
525 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
526 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
526 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
527 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
527 o = repo.findoutgoing(other, force=opts.get('force'))
528 o = repo.findoutgoing(other, force=opts.get('force'))
528
529
529 if revs:
530 if revs:
530 cg = repo.changegroupsubset(o, revs, 'bundle')
531 cg = repo.changegroupsubset(o, revs, 'bundle')
531 else:
532 else:
532 cg = repo.changegroup(o, 'bundle')
533 cg = repo.changegroup(o, 'bundle')
533
534
534 bundletype = opts.get('type', 'bzip2').lower()
535 bundletype = opts.get('type', 'bzip2').lower()
535 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
536 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
536 bundletype = btypes.get(bundletype)
537 bundletype = btypes.get(bundletype)
537 if bundletype not in changegroup.bundletypes:
538 if bundletype not in changegroup.bundletypes:
538 raise util.Abort(_('unknown bundle type specified with --type'))
539 raise util.Abort(_('unknown bundle type specified with --type'))
539
540
540 changegroup.writebundle(cg, fname, bundletype)
541 changegroup.writebundle(cg, fname, bundletype)
541
542
542 def cat(ui, repo, file1, *pats, **opts):
543 def cat(ui, repo, file1, *pats, **opts):
543 """output the current or given revision of files
544 """output the current or given revision of files
544
545
545 Print the specified files as they were at the given revision. If
546 Print the specified files as they were at the given revision. If
546 no revision is given, the parent of the working directory is used,
547 no revision is given, the parent of the working directory is used,
547 or tip if no revision is checked out.
548 or tip if no revision is checked out.
548
549
549 Output may be to a file, in which case the name of the file is
550 Output may be to a file, in which case the name of the file is
550 given using a format string. The formatting rules are the same as
551 given using a format string. The formatting rules are the same as
551 for the export command, with the following additions:
552 for the export command, with the following additions:
552
553
553 %s basename of file being printed
554 %s basename of file being printed
554 %d dirname of file being printed, or '.' if in repository root
555 %d dirname of file being printed, or '.' if in repository root
555 %p root-relative path name of file being printed
556 %p root-relative path name of file being printed
556 """
557 """
557 ctx = repo[opts.get('rev')]
558 ctx = repo[opts.get('rev')]
558 err = 1
559 err = 1
559 m = cmdutil.match(repo, (file1,) + pats, opts)
560 m = cmdutil.match(repo, (file1,) + pats, opts)
560 for abs in ctx.walk(m):
561 for abs in ctx.walk(m):
561 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
562 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
562 data = ctx[abs].data()
563 data = ctx[abs].data()
563 if opts.get('decode'):
564 if opts.get('decode'):
564 data = repo.wwritedata(abs, data)
565 data = repo.wwritedata(abs, data)
565 fp.write(data)
566 fp.write(data)
566 err = 0
567 err = 0
567 return err
568 return err
568
569
569 def clone(ui, source, dest=None, **opts):
570 def clone(ui, source, dest=None, **opts):
570 """make a copy of an existing repository
571 """make a copy of an existing repository
571
572
572 Create a copy of an existing repository in a new directory.
573 Create a copy of an existing repository in a new directory.
573
574
574 If no destination directory name is specified, it defaults to the
575 If no destination directory name is specified, it defaults to the
575 basename of the source.
576 basename of the source.
576
577
577 The location of the source is added to the new repository's
578 The location of the source is added to the new repository's
578 .hg/hgrc file, as the default to be used for future pulls.
579 .hg/hgrc file, as the default to be used for future pulls.
579
580
580 If you use the -r/--rev option to clone up to a specific revision,
581 If you use the -r/--rev option to clone up to a specific revision,
581 no subsequent revisions (including subsequent tags) will be
582 no subsequent revisions (including subsequent tags) will be
582 present in the cloned repository. This option implies --pull, even
583 present in the cloned repository. This option implies --pull, even
583 on local repositories.
584 on local repositories.
584
585
585 By default, clone will check out the head of the 'default' branch.
586 By default, clone will check out the head of the 'default' branch.
586 If the -U/--noupdate option is used, the new clone will contain
587 If the -U/--noupdate option is used, the new clone will contain
587 only a repository (.hg) and no working copy (the working copy
588 only a repository (.hg) and no working copy (the working copy
588 parent is the null revision).
589 parent is the null revision).
589
590
590 See 'hg help urls' for valid source format details.
591 See 'hg help urls' for valid source format details.
591
592
592 It is possible to specify an ssh:// URL as the destination, but no
593 It is possible to specify an ssh:// URL as the destination, but no
593 .hg/hgrc and working directory will be created on the remote side.
594 .hg/hgrc and working directory will be created on the remote side.
594 Look at the help text for URLs for important details about ssh://
595 Look at the help text for URLs for important details about ssh://
595 URLs.
596 URLs.
596
597
597 For efficiency, hardlinks are used for cloning whenever the source
598 For efficiency, hardlinks are used for cloning whenever the source
598 and destination are on the same filesystem (note this applies only
599 and destination are on the same filesystem (note this applies only
599 to the repository data, not to the checked out files). Some
600 to the repository data, not to the checked out files). Some
600 filesystems, such as AFS, implement hardlinking incorrectly, but
601 filesystems, such as AFS, implement hardlinking incorrectly, but
601 do not report errors. In these cases, use the --pull option to
602 do not report errors. In these cases, use the --pull option to
602 avoid hardlinking.
603 avoid hardlinking.
603
604
604 In some cases, you can clone repositories and checked out files
605 In some cases, you can clone repositories and checked out files
605 using full hardlinks with
606 using full hardlinks with
606
607
607 $ cp -al REPO REPOCLONE
608 $ cp -al REPO REPOCLONE
608
609
609 This is the fastest way to clone, but it is not always safe. The
610 This is the fastest way to clone, but it is not always safe. The
610 operation is not atomic (making sure REPO is not modified during
611 operation is not atomic (making sure REPO is not modified during
611 the operation is up to you) and you have to make sure your editor
612 the operation is up to you) and you have to make sure your editor
612 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
613 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
613 this is not compatible with certain extensions that place their
614 this is not compatible with certain extensions that place their
614 metadata under the .hg directory, such as mq.
615 metadata under the .hg directory, such as mq.
615
616
616 """
617 """
617 hg.clone(cmdutil.remoteui(ui, opts), source, dest,
618 hg.clone(cmdutil.remoteui(ui, opts), source, dest,
618 pull=opts.get('pull'),
619 pull=opts.get('pull'),
619 stream=opts.get('uncompressed'),
620 stream=opts.get('uncompressed'),
620 rev=opts.get('rev'),
621 rev=opts.get('rev'),
621 update=not opts.get('noupdate'))
622 update=not opts.get('noupdate'))
622
623
623 def commit(ui, repo, *pats, **opts):
624 def commit(ui, repo, *pats, **opts):
624 """commit the specified files or all outstanding changes
625 """commit the specified files or all outstanding changes
625
626
626 Commit changes to the given files into the repository. Unlike a
627 Commit changes to the given files into the repository. Unlike a
627 centralized RCS, this operation is a local operation. See hg push
628 centralized RCS, this operation is a local operation. See hg push
628 for means to actively distribute your changes.
629 for means to actively distribute your changes.
629
630
630 If a list of files is omitted, all changes reported by "hg status"
631 If a list of files is omitted, all changes reported by "hg status"
631 will be committed.
632 will be committed.
632
633
633 If you are committing the result of a merge, do not provide any
634 If you are committing the result of a merge, do not provide any
634 file names or -I/-X filters.
635 file names or -I/-X filters.
635
636
636 If no commit message is specified, the configured editor is
637 If no commit message is specified, the configured editor is
637 started to prompt you for a message.
638 started to prompt you for a message.
638
639
639 See 'hg help dates' for a list of formats valid for -d/--date.
640 See 'hg help dates' for a list of formats valid for -d/--date.
640 """
641 """
641 extra = {}
642 extra = {}
642 if opts.get('close_branch'):
643 if opts.get('close_branch'):
643 extra['close'] = 1
644 extra['close'] = 1
644 def commitfunc(ui, repo, message, match, opts):
645 def commitfunc(ui, repo, message, match, opts):
645 return repo.commit(match.files(), message, opts.get('user'),
646 return repo.commit(match.files(), message, opts.get('user'),
646 opts.get('date'), match, force_editor=opts.get('force_editor'),
647 opts.get('date'), match, force_editor=opts.get('force_editor'),
647 extra=extra)
648 extra=extra)
648
649
649 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
650 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
650 if not node:
651 if not node:
651 return
652 return
652 cl = repo.changelog
653 cl = repo.changelog
653 rev = cl.rev(node)
654 rev = cl.rev(node)
654 parents = cl.parentrevs(rev)
655 parents = cl.parentrevs(rev)
655 if rev - 1 in parents:
656 if rev - 1 in parents:
656 # one of the parents was the old tip
657 # one of the parents was the old tip
657 pass
658 pass
658 elif (parents == (nullrev, nullrev) or
659 elif (parents == (nullrev, nullrev) or
659 len(cl.heads(cl.node(parents[0]))) > 1 and
660 len(cl.heads(cl.node(parents[0]))) > 1 and
660 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
661 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
661 ui.status(_('created new head\n'))
662 ui.status(_('created new head\n'))
662
663
663 if ui.debugflag:
664 if ui.debugflag:
664 ui.write(_('committed changeset %d:%s\n') % (rev,hex(node)))
665 ui.write(_('committed changeset %d:%s\n') % (rev,hex(node)))
665 elif ui.verbose:
666 elif ui.verbose:
666 ui.write(_('committed changeset %d:%s\n') % (rev,short(node)))
667 ui.write(_('committed changeset %d:%s\n') % (rev,short(node)))
667
668
668 def copy(ui, repo, *pats, **opts):
669 def copy(ui, repo, *pats, **opts):
669 """mark files as copied for the next commit
670 """mark files as copied for the next commit
670
671
671 Mark dest as having copies of source files. If dest is a
672 Mark dest as having copies of source files. If dest is a
672 directory, copies are put in that directory. If dest is a file,
673 directory, copies are put in that directory. If dest is a file,
673 the source must be a single file.
674 the source must be a single file.
674
675
675 By default, this command copies the contents of files as they
676 By default, this command copies the contents of files as they
676 stand in the working directory. If invoked with -A/--after, the
677 stand in the working directory. If invoked with -A/--after, the
677 operation is recorded, but no copying is performed.
678 operation is recorded, but no copying is performed.
678
679
679 This command takes effect with the next commit. To undo a copy
680 This command takes effect with the next commit. To undo a copy
680 before that, see hg revert.
681 before that, see hg revert.
681 """
682 """
682 wlock = repo.wlock(False)
683 wlock = repo.wlock(False)
683 try:
684 try:
684 return cmdutil.copy(ui, repo, pats, opts)
685 return cmdutil.copy(ui, repo, pats, opts)
685 finally:
686 finally:
686 wlock.release()
687 wlock.release()
687
688
688 def debugancestor(ui, repo, *args):
689 def debugancestor(ui, repo, *args):
689 """find the ancestor revision of two revisions in a given index"""
690 """find the ancestor revision of two revisions in a given index"""
690 if len(args) == 3:
691 if len(args) == 3:
691 index, rev1, rev2 = args
692 index, rev1, rev2 = args
692 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
693 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
693 lookup = r.lookup
694 lookup = r.lookup
694 elif len(args) == 2:
695 elif len(args) == 2:
695 if not repo:
696 if not repo:
696 raise util.Abort(_("There is no Mercurial repository here "
697 raise util.Abort(_("There is no Mercurial repository here "
697 "(.hg not found)"))
698 "(.hg not found)"))
698 rev1, rev2 = args
699 rev1, rev2 = args
699 r = repo.changelog
700 r = repo.changelog
700 lookup = repo.lookup
701 lookup = repo.lookup
701 else:
702 else:
702 raise util.Abort(_('either two or three arguments required'))
703 raise util.Abort(_('either two or three arguments required'))
703 a = r.ancestor(lookup(rev1), lookup(rev2))
704 a = r.ancestor(lookup(rev1), lookup(rev2))
704 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
705 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
705
706
706 def debugcommands(ui, cmd='', *args):
707 def debugcommands(ui, cmd='', *args):
707 for cmd, vals in sorted(table.iteritems()):
708 for cmd, vals in sorted(table.iteritems()):
708 cmd = cmd.split('|')[0].strip('^')
709 cmd = cmd.split('|')[0].strip('^')
709 opts = ', '.join([i[1] for i in vals[1]])
710 opts = ', '.join([i[1] for i in vals[1]])
710 ui.write('%s: %s\n' % (cmd, opts))
711 ui.write('%s: %s\n' % (cmd, opts))
711
712
712 def debugcomplete(ui, cmd='', **opts):
713 def debugcomplete(ui, cmd='', **opts):
713 """returns the completion list associated with the given command"""
714 """returns the completion list associated with the given command"""
714
715
715 if opts.get('options'):
716 if opts.get('options'):
716 options = []
717 options = []
717 otables = [globalopts]
718 otables = [globalopts]
718 if cmd:
719 if cmd:
719 aliases, entry = cmdutil.findcmd(cmd, table, False)
720 aliases, entry = cmdutil.findcmd(cmd, table, False)
720 otables.append(entry[1])
721 otables.append(entry[1])
721 for t in otables:
722 for t in otables:
722 for o in t:
723 for o in t:
723 if o[0]:
724 if o[0]:
724 options.append('-%s' % o[0])
725 options.append('-%s' % o[0])
725 options.append('--%s' % o[1])
726 options.append('--%s' % o[1])
726 ui.write("%s\n" % "\n".join(options))
727 ui.write("%s\n" % "\n".join(options))
727 return
728 return
728
729
729 cmdlist = cmdutil.findpossible(cmd, table)
730 cmdlist = cmdutil.findpossible(cmd, table)
730 if ui.verbose:
731 if ui.verbose:
731 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
732 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
732 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
733 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
733
734
734 def debugfsinfo(ui, path = "."):
735 def debugfsinfo(ui, path = "."):
735 file('.debugfsinfo', 'w').write('')
736 file('.debugfsinfo', 'w').write('')
736 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
737 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
737 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
738 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
738 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
739 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
739 and 'yes' or 'no'))
740 and 'yes' or 'no'))
740 os.unlink('.debugfsinfo')
741 os.unlink('.debugfsinfo')
741
742
742 def debugrebuildstate(ui, repo, rev="tip"):
743 def debugrebuildstate(ui, repo, rev="tip"):
743 """rebuild the dirstate as it would look like for the given revision"""
744 """rebuild the dirstate as it would look like for the given revision"""
744 ctx = repo[rev]
745 ctx = repo[rev]
745 wlock = repo.wlock()
746 wlock = repo.wlock()
746 try:
747 try:
747 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
748 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
748 finally:
749 finally:
749 wlock.release()
750 wlock.release()
750
751
751 def debugcheckstate(ui, repo):
752 def debugcheckstate(ui, repo):
752 """validate the correctness of the current dirstate"""
753 """validate the correctness of the current dirstate"""
753 parent1, parent2 = repo.dirstate.parents()
754 parent1, parent2 = repo.dirstate.parents()
754 m1 = repo[parent1].manifest()
755 m1 = repo[parent1].manifest()
755 m2 = repo[parent2].manifest()
756 m2 = repo[parent2].manifest()
756 errors = 0
757 errors = 0
757 for f in repo.dirstate:
758 for f in repo.dirstate:
758 state = repo.dirstate[f]
759 state = repo.dirstate[f]
759 if state in "nr" and f not in m1:
760 if state in "nr" and f not in m1:
760 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
761 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
761 errors += 1
762 errors += 1
762 if state in "a" and f in m1:
763 if state in "a" and f in m1:
763 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
764 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
764 errors += 1
765 errors += 1
765 if state in "m" and f not in m1 and f not in m2:
766 if state in "m" and f not in m1 and f not in m2:
766 ui.warn(_("%s in state %s, but not in either manifest\n") %
767 ui.warn(_("%s in state %s, but not in either manifest\n") %
767 (f, state))
768 (f, state))
768 errors += 1
769 errors += 1
769 for f in m1:
770 for f in m1:
770 state = repo.dirstate[f]
771 state = repo.dirstate[f]
771 if state not in "nrm":
772 if state not in "nrm":
772 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
773 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
773 errors += 1
774 errors += 1
774 if errors:
775 if errors:
775 error = _(".hg/dirstate inconsistent with current parent's manifest")
776 error = _(".hg/dirstate inconsistent with current parent's manifest")
776 raise util.Abort(error)
777 raise util.Abort(error)
777
778
778 def showconfig(ui, repo, *values, **opts):
779 def showconfig(ui, repo, *values, **opts):
779 """show combined config settings from all hgrc files
780 """show combined config settings from all hgrc files
780
781
781 With no args, print names and values of all config items.
782 With no args, print names and values of all config items.
782
783
783 With one arg of the form section.name, print just the value of
784 With one arg of the form section.name, print just the value of
784 that config item.
785 that config item.
785
786
786 With multiple args, print names and values of all config items
787 With multiple args, print names and values of all config items
787 with matching section names.
788 with matching section names.
788
789
789 With the --debug flag, the source (filename and line number) is
790 With the --debug flag, the source (filename and line number) is
790 printed for each config item.
791 printed for each config item.
791 """
792 """
792
793
793 untrusted = bool(opts.get('untrusted'))
794 untrusted = bool(opts.get('untrusted'))
794 if values:
795 if values:
795 if len([v for v in values if '.' in v]) > 1:
796 if len([v for v in values if '.' in v]) > 1:
796 raise util.Abort(_('only one config item permitted'))
797 raise util.Abort(_('only one config item permitted'))
797 for section, name, value in ui.walkconfig(untrusted=untrusted):
798 for section, name, value in ui.walkconfig(untrusted=untrusted):
798 sectname = section + '.' + name
799 sectname = section + '.' + name
799 if values:
800 if values:
800 for v in values:
801 for v in values:
801 if v == section:
802 if v == section:
802 ui.debug('%s: ' %
803 ui.debug('%s: ' %
803 ui.configsource(section, name, untrusted))
804 ui.configsource(section, name, untrusted))
804 ui.write('%s=%s\n' % (sectname, value))
805 ui.write('%s=%s\n' % (sectname, value))
805 elif v == sectname:
806 elif v == sectname:
806 ui.debug('%s: ' %
807 ui.debug('%s: ' %
807 ui.configsource(section, name, untrusted))
808 ui.configsource(section, name, untrusted))
808 ui.write(value, '\n')
809 ui.write(value, '\n')
809 else:
810 else:
810 ui.debug('%s: ' %
811 ui.debug('%s: ' %
811 ui.configsource(section, name, untrusted))
812 ui.configsource(section, name, untrusted))
812 ui.write('%s=%s\n' % (sectname, value))
813 ui.write('%s=%s\n' % (sectname, value))
813
814
814 def debugsetparents(ui, repo, rev1, rev2=None):
815 def debugsetparents(ui, repo, rev1, rev2=None):
815 """manually set the parents of the current working directory
816 """manually set the parents of the current working directory
816
817
817 This is useful for writing repository conversion tools, but should
818 This is useful for writing repository conversion tools, but should
818 be used with care.
819 be used with care.
819 """
820 """
820
821
821 if not rev2:
822 if not rev2:
822 rev2 = hex(nullid)
823 rev2 = hex(nullid)
823
824
824 wlock = repo.wlock()
825 wlock = repo.wlock()
825 try:
826 try:
826 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
827 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
827 finally:
828 finally:
828 wlock.release()
829 wlock.release()
829
830
830 def debugstate(ui, repo, nodates=None):
831 def debugstate(ui, repo, nodates=None):
831 """show the contents of the current dirstate"""
832 """show the contents of the current dirstate"""
832 timestr = ""
833 timestr = ""
833 showdate = not nodates
834 showdate = not nodates
834 for file_, ent in sorted(repo.dirstate._map.iteritems()):
835 for file_, ent in sorted(repo.dirstate._map.iteritems()):
835 if showdate:
836 if showdate:
836 if ent[3] == -1:
837 if ent[3] == -1:
837 # Pad or slice to locale representation
838 # Pad or slice to locale representation
838 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
839 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
839 timestr = 'unset'
840 timestr = 'unset'
840 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
841 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
841 else:
842 else:
842 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
843 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
843 if ent[1] & 020000:
844 if ent[1] & 020000:
844 mode = 'lnk'
845 mode = 'lnk'
845 else:
846 else:
846 mode = '%3o' % (ent[1] & 0777)
847 mode = '%3o' % (ent[1] & 0777)
847 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
848 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
848 for f in repo.dirstate.copies():
849 for f in repo.dirstate.copies():
849 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
850 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
850
851
851 def debugdata(ui, file_, rev):
852 def debugdata(ui, file_, rev):
852 """dump the contents of a data file revision"""
853 """dump the contents of a data file revision"""
853 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
854 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
854 try:
855 try:
855 ui.write(r.revision(r.lookup(rev)))
856 ui.write(r.revision(r.lookup(rev)))
856 except KeyError:
857 except KeyError:
857 raise util.Abort(_('invalid revision identifier %s') % rev)
858 raise util.Abort(_('invalid revision identifier %s') % rev)
858
859
859 def debugdate(ui, date, range=None, **opts):
860 def debugdate(ui, date, range=None, **opts):
860 """parse and display a date"""
861 """parse and display a date"""
861 if opts["extended"]:
862 if opts["extended"]:
862 d = util.parsedate(date, util.extendeddateformats)
863 d = util.parsedate(date, util.extendeddateformats)
863 else:
864 else:
864 d = util.parsedate(date)
865 d = util.parsedate(date)
865 ui.write("internal: %s %s\n" % d)
866 ui.write("internal: %s %s\n" % d)
866 ui.write("standard: %s\n" % util.datestr(d))
867 ui.write("standard: %s\n" % util.datestr(d))
867 if range:
868 if range:
868 m = util.matchdate(range)
869 m = util.matchdate(range)
869 ui.write("match: %s\n" % m(d[0]))
870 ui.write("match: %s\n" % m(d[0]))
870
871
871 def debugindex(ui, file_):
872 def debugindex(ui, file_):
872 """dump the contents of an index file"""
873 """dump the contents of an index file"""
873 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
874 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
874 ui.write(" rev offset length base linkrev"
875 ui.write(" rev offset length base linkrev"
875 " nodeid p1 p2\n")
876 " nodeid p1 p2\n")
876 for i in r:
877 for i in r:
877 node = r.node(i)
878 node = r.node(i)
878 try:
879 try:
879 pp = r.parents(node)
880 pp = r.parents(node)
880 except:
881 except:
881 pp = [nullid, nullid]
882 pp = [nullid, nullid]
882 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
883 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
883 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
884 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
884 short(node), short(pp[0]), short(pp[1])))
885 short(node), short(pp[0]), short(pp[1])))
885
886
886 def debugindexdot(ui, file_):
887 def debugindexdot(ui, file_):
887 """dump an index DAG as a .dot file"""
888 """dump an index DAG as a .dot file"""
888 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
889 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
889 ui.write("digraph G {\n")
890 ui.write("digraph G {\n")
890 for i in r:
891 for i in r:
891 node = r.node(i)
892 node = r.node(i)
892 pp = r.parents(node)
893 pp = r.parents(node)
893 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
894 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
894 if pp[1] != nullid:
895 if pp[1] != nullid:
895 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
896 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
896 ui.write("}\n")
897 ui.write("}\n")
897
898
898 def debuginstall(ui):
899 def debuginstall(ui):
899 '''test Mercurial installation'''
900 '''test Mercurial installation'''
900
901
901 def writetemp(contents):
902 def writetemp(contents):
902 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
903 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
903 f = os.fdopen(fd, "wb")
904 f = os.fdopen(fd, "wb")
904 f.write(contents)
905 f.write(contents)
905 f.close()
906 f.close()
906 return name
907 return name
907
908
908 problems = 0
909 problems = 0
909
910
910 # encoding
911 # encoding
911 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
912 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
912 try:
913 try:
913 encoding.fromlocal("test")
914 encoding.fromlocal("test")
914 except util.Abort, inst:
915 except util.Abort, inst:
915 ui.write(" %s\n" % inst)
916 ui.write(" %s\n" % inst)
916 ui.write(_(" (check that your locale is properly set)\n"))
917 ui.write(_(" (check that your locale is properly set)\n"))
917 problems += 1
918 problems += 1
918
919
919 # compiled modules
920 # compiled modules
920 ui.status(_("Checking extensions...\n"))
921 ui.status(_("Checking extensions...\n"))
921 try:
922 try:
922 import bdiff, mpatch, base85
923 import bdiff, mpatch, base85
923 except Exception, inst:
924 except Exception, inst:
924 ui.write(" %s\n" % inst)
925 ui.write(" %s\n" % inst)
925 ui.write(_(" One or more extensions could not be found"))
926 ui.write(_(" One or more extensions could not be found"))
926 ui.write(_(" (check that you compiled the extensions)\n"))
927 ui.write(_(" (check that you compiled the extensions)\n"))
927 problems += 1
928 problems += 1
928
929
929 # templates
930 # templates
930 ui.status(_("Checking templates...\n"))
931 ui.status(_("Checking templates...\n"))
931 try:
932 try:
932 import templater
933 import templater
933 templater.templater(templater.templatepath("map-cmdline.default"))
934 templater.templater(templater.templatepath("map-cmdline.default"))
934 except Exception, inst:
935 except Exception, inst:
935 ui.write(" %s\n" % inst)
936 ui.write(" %s\n" % inst)
936 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
937 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
937 problems += 1
938 problems += 1
938
939
939 # patch
940 # patch
940 ui.status(_("Checking patch...\n"))
941 ui.status(_("Checking patch...\n"))
941 patchproblems = 0
942 patchproblems = 0
942 a = "1\n2\n3\n4\n"
943 a = "1\n2\n3\n4\n"
943 b = "1\n2\n3\ninsert\n4\n"
944 b = "1\n2\n3\ninsert\n4\n"
944 fa = writetemp(a)
945 fa = writetemp(a)
945 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
946 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
946 os.path.basename(fa))
947 os.path.basename(fa))
947 fd = writetemp(d)
948 fd = writetemp(d)
948
949
949 files = {}
950 files = {}
950 try:
951 try:
951 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
952 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
952 except util.Abort, e:
953 except util.Abort, e:
953 ui.write(_(" patch call failed:\n"))
954 ui.write(_(" patch call failed:\n"))
954 ui.write(" " + str(e) + "\n")
955 ui.write(" " + str(e) + "\n")
955 patchproblems += 1
956 patchproblems += 1
956 else:
957 else:
957 if list(files) != [os.path.basename(fa)]:
958 if list(files) != [os.path.basename(fa)]:
958 ui.write(_(" unexpected patch output!\n"))
959 ui.write(_(" unexpected patch output!\n"))
959 patchproblems += 1
960 patchproblems += 1
960 a = file(fa).read()
961 a = file(fa).read()
961 if a != b:
962 if a != b:
962 ui.write(_(" patch test failed!\n"))
963 ui.write(_(" patch test failed!\n"))
963 patchproblems += 1
964 patchproblems += 1
964
965
965 if patchproblems:
966 if patchproblems:
966 if ui.config('ui', 'patch'):
967 if ui.config('ui', 'patch'):
967 ui.write(_(" (Current patch tool may be incompatible with patch,"
968 ui.write(_(" (Current patch tool may be incompatible with patch,"
968 " or misconfigured. Please check your .hgrc file)\n"))
969 " or misconfigured. Please check your .hgrc file)\n"))
969 else:
970 else:
970 ui.write(_(" Internal patcher failure, please report this error"
971 ui.write(_(" Internal patcher failure, please report this error"
971 " to http://www.selenic.com/mercurial/bts\n"))
972 " to http://www.selenic.com/mercurial/bts\n"))
972 problems += patchproblems
973 problems += patchproblems
973
974
974 os.unlink(fa)
975 os.unlink(fa)
975 os.unlink(fd)
976 os.unlink(fd)
976
977
977 # editor
978 # editor
978 ui.status(_("Checking commit editor...\n"))
979 ui.status(_("Checking commit editor...\n"))
979 editor = ui.geteditor()
980 editor = ui.geteditor()
980 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
981 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
981 if not cmdpath:
982 if not cmdpath:
982 if editor == 'vi':
983 if editor == 'vi':
983 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
984 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
984 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
985 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
985 else:
986 else:
986 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
987 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
987 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
988 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
988 problems += 1
989 problems += 1
989
990
990 # check username
991 # check username
991 ui.status(_("Checking username...\n"))
992 ui.status(_("Checking username...\n"))
992 user = os.environ.get("HGUSER")
993 user = os.environ.get("HGUSER")
993 if user is None:
994 if user is None:
994 user = ui.config("ui", "username")
995 user = ui.config("ui", "username")
995 if user is None:
996 if user is None:
996 user = os.environ.get("EMAIL")
997 user = os.environ.get("EMAIL")
997 if not user:
998 if not user:
998 ui.warn(" ")
999 ui.warn(" ")
999 ui.username()
1000 ui.username()
1000 ui.write(_(" (specify a username in your .hgrc file)\n"))
1001 ui.write(_(" (specify a username in your .hgrc file)\n"))
1001
1002
1002 if not problems:
1003 if not problems:
1003 ui.status(_("No problems detected\n"))
1004 ui.status(_("No problems detected\n"))
1004 else:
1005 else:
1005 ui.write(_("%s problems detected,"
1006 ui.write(_("%s problems detected,"
1006 " please check your install!\n") % problems)
1007 " please check your install!\n") % problems)
1007
1008
1008 return problems
1009 return problems
1009
1010
1010 def debugrename(ui, repo, file1, *pats, **opts):
1011 def debugrename(ui, repo, file1, *pats, **opts):
1011 """dump rename information"""
1012 """dump rename information"""
1012
1013
1013 ctx = repo[opts.get('rev')]
1014 ctx = repo[opts.get('rev')]
1014 m = cmdutil.match(repo, (file1,) + pats, opts)
1015 m = cmdutil.match(repo, (file1,) + pats, opts)
1015 for abs in ctx.walk(m):
1016 for abs in ctx.walk(m):
1016 fctx = ctx[abs]
1017 fctx = ctx[abs]
1017 o = fctx.filelog().renamed(fctx.filenode())
1018 o = fctx.filelog().renamed(fctx.filenode())
1018 rel = m.rel(abs)
1019 rel = m.rel(abs)
1019 if o:
1020 if o:
1020 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1021 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1021 else:
1022 else:
1022 ui.write(_("%s not renamed\n") % rel)
1023 ui.write(_("%s not renamed\n") % rel)
1023
1024
1024 def debugwalk(ui, repo, *pats, **opts):
1025 def debugwalk(ui, repo, *pats, **opts):
1025 """show how files match on given patterns"""
1026 """show how files match on given patterns"""
1026 m = cmdutil.match(repo, pats, opts)
1027 m = cmdutil.match(repo, pats, opts)
1027 items = list(repo.walk(m))
1028 items = list(repo.walk(m))
1028 if not items:
1029 if not items:
1029 return
1030 return
1030 fmt = 'f %%-%ds %%-%ds %%s' % (
1031 fmt = 'f %%-%ds %%-%ds %%s' % (
1031 max([len(abs) for abs in items]),
1032 max([len(abs) for abs in items]),
1032 max([len(m.rel(abs)) for abs in items]))
1033 max([len(m.rel(abs)) for abs in items]))
1033 for abs in items:
1034 for abs in items:
1034 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1035 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1035 ui.write("%s\n" % line.rstrip())
1036 ui.write("%s\n" % line.rstrip())
1036
1037
1037 def diff(ui, repo, *pats, **opts):
1038 def diff(ui, repo, *pats, **opts):
1038 """diff repository (or selected files)
1039 """diff repository (or selected files)
1039
1040
1040 Show differences between revisions for the specified files.
1041 Show differences between revisions for the specified files.
1041
1042
1042 Differences between files are shown using the unified diff format.
1043 Differences between files are shown using the unified diff format.
1043
1044
1044 NOTE: diff may generate unexpected results for merges, as it will
1045 NOTE: diff may generate unexpected results for merges, as it will
1045 default to comparing against the working directory's first parent
1046 default to comparing against the working directory's first parent
1046 changeset if no revisions are specified.
1047 changeset if no revisions are specified.
1047
1048
1048 When two revision arguments are given, then changes are shown
1049 When two revision arguments are given, then changes are shown
1049 between those revisions. If only one revision is specified then
1050 between those revisions. If only one revision is specified then
1050 that revision is compared to the working directory, and, when no
1051 that revision is compared to the working directory, and, when no
1051 revisions are specified, the working directory files are compared
1052 revisions are specified, the working directory files are compared
1052 to its parent.
1053 to its parent.
1053
1054
1054 Without the -a/--text option, diff will avoid generating diffs of
1055 Without the -a/--text option, diff will avoid generating diffs of
1055 files it detects as binary. With -a, diff will generate a diff
1056 files it detects as binary. With -a, diff will generate a diff
1056 anyway, probably with undesirable results.
1057 anyway, probably with undesirable results.
1057
1058
1058 Use the -g/--git option to generate diffs in the git extended diff
1059 Use the -g/--git option to generate diffs in the git extended diff
1059 format. For more information, read 'hg help diffs'.
1060 format. For more information, read 'hg help diffs'.
1060 """
1061 """
1061
1062
1062 revs = opts.get('rev')
1063 revs = opts.get('rev')
1063 change = opts.get('change')
1064 change = opts.get('change')
1064
1065
1065 if revs and change:
1066 if revs and change:
1066 msg = _('cannot specify --rev and --change at the same time')
1067 msg = _('cannot specify --rev and --change at the same time')
1067 raise util.Abort(msg)
1068 raise util.Abort(msg)
1068 elif change:
1069 elif change:
1069 node2 = repo.lookup(change)
1070 node2 = repo.lookup(change)
1070 node1 = repo[node2].parents()[0].node()
1071 node1 = repo[node2].parents()[0].node()
1071 else:
1072 else:
1072 node1, node2 = cmdutil.revpair(repo, revs)
1073 node1, node2 = cmdutil.revpair(repo, revs)
1073
1074
1074 m = cmdutil.match(repo, pats, opts)
1075 m = cmdutil.match(repo, pats, opts)
1075 it = patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
1076 it = patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
1076 for chunk in it:
1077 for chunk in it:
1077 repo.ui.write(chunk)
1078 repo.ui.write(chunk)
1078
1079
1079 def export(ui, repo, *changesets, **opts):
1080 def export(ui, repo, *changesets, **opts):
1080 """dump the header and diffs for one or more changesets
1081 """dump the header and diffs for one or more changesets
1081
1082
1082 Print the changeset header and diffs for one or more revisions.
1083 Print the changeset header and diffs for one or more revisions.
1083
1084
1084 The information shown in the changeset header is: author,
1085 The information shown in the changeset header is: author,
1085 changeset hash, parent(s) and commit comment.
1086 changeset hash, parent(s) and commit comment.
1086
1087
1087 NOTE: export may generate unexpected diff output for merge
1088 NOTE: export may generate unexpected diff output for merge
1088 changesets, as it will compare the merge changeset against its
1089 changesets, as it will compare the merge changeset against its
1089 first parent only.
1090 first parent only.
1090
1091
1091 Output may be to a file, in which case the name of the file is
1092 Output may be to a file, in which case the name of the file is
1092 given using a format string. The formatting rules are as follows:
1093 given using a format string. The formatting rules are as follows:
1093
1094
1094 %% literal "%" character
1095 %% literal "%" character
1095 %H changeset hash (40 bytes of hexadecimal)
1096 %H changeset hash (40 bytes of hexadecimal)
1096 %N number of patches being generated
1097 %N number of patches being generated
1097 %R changeset revision number
1098 %R changeset revision number
1098 %b basename of the exporting repository
1099 %b basename of the exporting repository
1099 %h short-form changeset hash (12 bytes of hexadecimal)
1100 %h short-form changeset hash (12 bytes of hexadecimal)
1100 %n zero-padded sequence number, starting at 1
1101 %n zero-padded sequence number, starting at 1
1101 %r zero-padded changeset revision number
1102 %r zero-padded changeset revision number
1102
1103
1103 Without the -a/--text option, export will avoid generating diffs
1104 Without the -a/--text option, export will avoid generating diffs
1104 of files it detects as binary. With -a, export will generate a
1105 of files it detects as binary. With -a, export will generate a
1105 diff anyway, probably with undesirable results.
1106 diff anyway, probably with undesirable results.
1106
1107
1107 Use the -g/--git option to generate diffs in the git extended diff
1108 Use the -g/--git option to generate diffs in the git extended diff
1108 format. Read the diffs help topic for more information.
1109 format. Read the diffs help topic for more information.
1109
1110
1110 With the --switch-parent option, the diff will be against the
1111 With the --switch-parent option, the diff will be against the
1111 second parent. It can be useful to review a merge.
1112 second parent. It can be useful to review a merge.
1112 """
1113 """
1113 if not changesets:
1114 if not changesets:
1114 raise util.Abort(_("export requires at least one changeset"))
1115 raise util.Abort(_("export requires at least one changeset"))
1115 revs = cmdutil.revrange(repo, changesets)
1116 revs = cmdutil.revrange(repo, changesets)
1116 if len(revs) > 1:
1117 if len(revs) > 1:
1117 ui.note(_('exporting patches:\n'))
1118 ui.note(_('exporting patches:\n'))
1118 else:
1119 else:
1119 ui.note(_('exporting patch:\n'))
1120 ui.note(_('exporting patch:\n'))
1120 patch.export(repo, revs, template=opts.get('output'),
1121 patch.export(repo, revs, template=opts.get('output'),
1121 switch_parent=opts.get('switch_parent'),
1122 switch_parent=opts.get('switch_parent'),
1122 opts=patch.diffopts(ui, opts))
1123 opts=patch.diffopts(ui, opts))
1123
1124
1124 def grep(ui, repo, pattern, *pats, **opts):
1125 def grep(ui, repo, pattern, *pats, **opts):
1125 """search for a pattern in specified files and revisions
1126 """search for a pattern in specified files and revisions
1126
1127
1127 Search revisions of files for a regular expression.
1128 Search revisions of files for a regular expression.
1128
1129
1129 This command behaves differently than Unix grep. It only accepts
1130 This command behaves differently than Unix grep. It only accepts
1130 Python/Perl regexps. It searches repository history, not the
1131 Python/Perl regexps. It searches repository history, not the
1131 working directory. It always prints the revision number in which a
1132 working directory. It always prints the revision number in which a
1132 match appears.
1133 match appears.
1133
1134
1134 By default, grep only prints output for the first revision of a
1135 By default, grep only prints output for the first revision of a
1135 file in which it finds a match. To get it to print every revision
1136 file in which it finds a match. To get it to print every revision
1136 that contains a change in match status ("-" for a match that
1137 that contains a change in match status ("-" for a match that
1137 becomes a non-match, or "+" for a non-match that becomes a match),
1138 becomes a non-match, or "+" for a non-match that becomes a match),
1138 use the --all flag.
1139 use the --all flag.
1139 """
1140 """
1140 reflags = 0
1141 reflags = 0
1141 if opts.get('ignore_case'):
1142 if opts.get('ignore_case'):
1142 reflags |= re.I
1143 reflags |= re.I
1143 try:
1144 try:
1144 regexp = re.compile(pattern, reflags)
1145 regexp = re.compile(pattern, reflags)
1145 except Exception, inst:
1146 except Exception, inst:
1146 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1147 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1147 return None
1148 return None
1148 sep, eol = ':', '\n'
1149 sep, eol = ':', '\n'
1149 if opts.get('print0'):
1150 if opts.get('print0'):
1150 sep = eol = '\0'
1151 sep = eol = '\0'
1151
1152
1152 fcache = {}
1153 fcache = {}
1153 def getfile(fn):
1154 def getfile(fn):
1154 if fn not in fcache:
1155 if fn not in fcache:
1155 fcache[fn] = repo.file(fn)
1156 fcache[fn] = repo.file(fn)
1156 return fcache[fn]
1157 return fcache[fn]
1157
1158
1158 def matchlines(body):
1159 def matchlines(body):
1159 begin = 0
1160 begin = 0
1160 linenum = 0
1161 linenum = 0
1161 while True:
1162 while True:
1162 match = regexp.search(body, begin)
1163 match = regexp.search(body, begin)
1163 if not match:
1164 if not match:
1164 break
1165 break
1165 mstart, mend = match.span()
1166 mstart, mend = match.span()
1166 linenum += body.count('\n', begin, mstart) + 1
1167 linenum += body.count('\n', begin, mstart) + 1
1167 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1168 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1168 begin = body.find('\n', mend) + 1 or len(body)
1169 begin = body.find('\n', mend) + 1 or len(body)
1169 lend = begin - 1
1170 lend = begin - 1
1170 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1171 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1171
1172
1172 class linestate(object):
1173 class linestate(object):
1173 def __init__(self, line, linenum, colstart, colend):
1174 def __init__(self, line, linenum, colstart, colend):
1174 self.line = line
1175 self.line = line
1175 self.linenum = linenum
1176 self.linenum = linenum
1176 self.colstart = colstart
1177 self.colstart = colstart
1177 self.colend = colend
1178 self.colend = colend
1178
1179
1179 def __hash__(self):
1180 def __hash__(self):
1180 return hash((self.linenum, self.line))
1181 return hash((self.linenum, self.line))
1181
1182
1182 def __eq__(self, other):
1183 def __eq__(self, other):
1183 return self.line == other.line
1184 return self.line == other.line
1184
1185
1185 matches = {}
1186 matches = {}
1186 copies = {}
1187 copies = {}
1187 def grepbody(fn, rev, body):
1188 def grepbody(fn, rev, body):
1188 matches[rev].setdefault(fn, [])
1189 matches[rev].setdefault(fn, [])
1189 m = matches[rev][fn]
1190 m = matches[rev][fn]
1190 for lnum, cstart, cend, line in matchlines(body):
1191 for lnum, cstart, cend, line in matchlines(body):
1191 s = linestate(line, lnum, cstart, cend)
1192 s = linestate(line, lnum, cstart, cend)
1192 m.append(s)
1193 m.append(s)
1193
1194
1194 def difflinestates(a, b):
1195 def difflinestates(a, b):
1195 sm = difflib.SequenceMatcher(None, a, b)
1196 sm = difflib.SequenceMatcher(None, a, b)
1196 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1197 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1197 if tag == 'insert':
1198 if tag == 'insert':
1198 for i in xrange(blo, bhi):
1199 for i in xrange(blo, bhi):
1199 yield ('+', b[i])
1200 yield ('+', b[i])
1200 elif tag == 'delete':
1201 elif tag == 'delete':
1201 for i in xrange(alo, ahi):
1202 for i in xrange(alo, ahi):
1202 yield ('-', a[i])
1203 yield ('-', a[i])
1203 elif tag == 'replace':
1204 elif tag == 'replace':
1204 for i in xrange(alo, ahi):
1205 for i in xrange(alo, ahi):
1205 yield ('-', a[i])
1206 yield ('-', a[i])
1206 for i in xrange(blo, bhi):
1207 for i in xrange(blo, bhi):
1207 yield ('+', b[i])
1208 yield ('+', b[i])
1208
1209
1209 prev = {}
1210 prev = {}
1210 def display(fn, rev, states, prevstates):
1211 def display(fn, rev, states, prevstates):
1211 datefunc = ui.quiet and util.shortdate or util.datestr
1212 datefunc = ui.quiet and util.shortdate or util.datestr
1212 found = False
1213 found = False
1213 filerevmatches = {}
1214 filerevmatches = {}
1214 r = prev.get(fn, -1)
1215 r = prev.get(fn, -1)
1215 if opts.get('all'):
1216 if opts.get('all'):
1216 iter = difflinestates(states, prevstates)
1217 iter = difflinestates(states, prevstates)
1217 else:
1218 else:
1218 iter = [('', l) for l in prevstates]
1219 iter = [('', l) for l in prevstates]
1219 for change, l in iter:
1220 for change, l in iter:
1220 cols = [fn, str(r)]
1221 cols = [fn, str(r)]
1221 if opts.get('line_number'):
1222 if opts.get('line_number'):
1222 cols.append(str(l.linenum))
1223 cols.append(str(l.linenum))
1223 if opts.get('all'):
1224 if opts.get('all'):
1224 cols.append(change)
1225 cols.append(change)
1225 if opts.get('user'):
1226 if opts.get('user'):
1226 cols.append(ui.shortuser(get(r)[1]))
1227 cols.append(ui.shortuser(get(r)[1]))
1227 if opts.get('date'):
1228 if opts.get('date'):
1228 cols.append(datefunc(get(r)[2]))
1229 cols.append(datefunc(get(r)[2]))
1229 if opts.get('files_with_matches'):
1230 if opts.get('files_with_matches'):
1230 c = (fn, r)
1231 c = (fn, r)
1231 if c in filerevmatches:
1232 if c in filerevmatches:
1232 continue
1233 continue
1233 filerevmatches[c] = 1
1234 filerevmatches[c] = 1
1234 else:
1235 else:
1235 cols.append(l.line)
1236 cols.append(l.line)
1236 ui.write(sep.join(cols), eol)
1237 ui.write(sep.join(cols), eol)
1237 found = True
1238 found = True
1238 return found
1239 return found
1239
1240
1240 fstate = {}
1241 fstate = {}
1241 skip = {}
1242 skip = {}
1242 get = util.cachefunc(lambda r: repo[r].changeset())
1243 get = util.cachefunc(lambda r: repo[r].changeset())
1243 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1244 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1244 found = False
1245 found = False
1245 follow = opts.get('follow')
1246 follow = opts.get('follow')
1246 for st, rev, fns in changeiter:
1247 for st, rev, fns in changeiter:
1247 if st == 'window':
1248 if st == 'window':
1248 matches.clear()
1249 matches.clear()
1249 elif st == 'add':
1250 elif st == 'add':
1250 ctx = repo[rev]
1251 ctx = repo[rev]
1251 matches[rev] = {}
1252 matches[rev] = {}
1252 for fn in fns:
1253 for fn in fns:
1253 if fn in skip:
1254 if fn in skip:
1254 continue
1255 continue
1255 try:
1256 try:
1256 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1257 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1257 fstate.setdefault(fn, [])
1258 fstate.setdefault(fn, [])
1258 if follow:
1259 if follow:
1259 copied = getfile(fn).renamed(ctx.filenode(fn))
1260 copied = getfile(fn).renamed(ctx.filenode(fn))
1260 if copied:
1261 if copied:
1261 copies.setdefault(rev, {})[fn] = copied[0]
1262 copies.setdefault(rev, {})[fn] = copied[0]
1262 except error.LookupError:
1263 except error.LookupError:
1263 pass
1264 pass
1264 elif st == 'iter':
1265 elif st == 'iter':
1265 for fn, m in sorted(matches[rev].items()):
1266 for fn, m in sorted(matches[rev].items()):
1266 copy = copies.get(rev, {}).get(fn)
1267 copy = copies.get(rev, {}).get(fn)
1267 if fn in skip:
1268 if fn in skip:
1268 if copy:
1269 if copy:
1269 skip[copy] = True
1270 skip[copy] = True
1270 continue
1271 continue
1271 if fn in prev or fstate[fn]:
1272 if fn in prev or fstate[fn]:
1272 r = display(fn, rev, m, fstate[fn])
1273 r = display(fn, rev, m, fstate[fn])
1273 found = found or r
1274 found = found or r
1274 if r and not opts.get('all'):
1275 if r and not opts.get('all'):
1275 skip[fn] = True
1276 skip[fn] = True
1276 if copy:
1277 if copy:
1277 skip[copy] = True
1278 skip[copy] = True
1278 fstate[fn] = m
1279 fstate[fn] = m
1279 if copy:
1280 if copy:
1280 fstate[copy] = m
1281 fstate[copy] = m
1281 prev[fn] = rev
1282 prev[fn] = rev
1282
1283
1283 for fn, state in sorted(fstate.items()):
1284 for fn, state in sorted(fstate.items()):
1284 if fn in skip:
1285 if fn in skip:
1285 continue
1286 continue
1286 if fn not in copies.get(prev[fn], {}):
1287 if fn not in copies.get(prev[fn], {}):
1287 found = display(fn, rev, {}, state) or found
1288 found = display(fn, rev, {}, state) or found
1288 return (not found and 1) or 0
1289 return (not found and 1) or 0
1289
1290
1290 def heads(ui, repo, *branchrevs, **opts):
1291 def heads(ui, repo, *branchrevs, **opts):
1291 """show current repository heads or show branch heads
1292 """show current repository heads or show branch heads
1292
1293
1293 With no arguments, show all repository head changesets.
1294 With no arguments, show all repository head changesets.
1294
1295
1295 If branch or revisions names are given this will show the heads of
1296 If branch or revisions names are given this will show the heads of
1296 the specified branches or the branches those revisions are tagged
1297 the specified branches or the branches those revisions are tagged
1297 with.
1298 with.
1298
1299
1299 Repository "heads" are changesets that don't have child
1300 Repository "heads" are changesets that don't have child
1300 changesets. They are where development generally takes place and
1301 changesets. They are where development generally takes place and
1301 are the usual targets for update and merge operations.
1302 are the usual targets for update and merge operations.
1302
1303
1303 Branch heads are changesets that have a given branch tag, but have
1304 Branch heads are changesets that have a given branch tag, but have
1304 no child changesets with that tag. They are usually where
1305 no child changesets with that tag. They are usually where
1305 development on the given branch takes place.
1306 development on the given branch takes place.
1306 """
1307 """
1307 if opts.get('rev'):
1308 if opts.get('rev'):
1308 start = repo.lookup(opts['rev'])
1309 start = repo.lookup(opts['rev'])
1309 else:
1310 else:
1310 start = None
1311 start = None
1311 closed = not opts.get('active')
1312 closed = not opts.get('active')
1312 if not branchrevs:
1313 if not branchrevs:
1313 # Assume we're looking repo-wide heads if no revs were specified.
1314 # Assume we're looking repo-wide heads if no revs were specified.
1314 heads = repo.heads(start, closed=closed)
1315 heads = repo.heads(start, closed=closed)
1315 else:
1316 else:
1316 heads = []
1317 heads = []
1317 visitedset = set()
1318 visitedset = set()
1318 for branchrev in branchrevs:
1319 for branchrev in branchrevs:
1319 branch = repo[branchrev].branch()
1320 branch = repo[branchrev].branch()
1320 if branch in visitedset:
1321 if branch in visitedset:
1321 continue
1322 continue
1322 visitedset.add(branch)
1323 visitedset.add(branch)
1323 bheads = repo.branchheads(branch, start, closed=closed)
1324 bheads = repo.branchheads(branch, start, closed=closed)
1324 if not bheads:
1325 if not bheads:
1325 if branch != branchrev:
1326 if branch != branchrev:
1326 ui.warn(_("no changes on branch %s containing %s are "
1327 ui.warn(_("no changes on branch %s containing %s are "
1327 "reachable from %s\n")
1328 "reachable from %s\n")
1328 % (branch, branchrev, opts.get('rev')))
1329 % (branch, branchrev, opts.get('rev')))
1329 else:
1330 else:
1330 ui.warn(_("no changes on branch %s are reachable from %s\n")
1331 ui.warn(_("no changes on branch %s are reachable from %s\n")
1331 % (branch, opts.get('rev')))
1332 % (branch, opts.get('rev')))
1332 heads.extend(bheads)
1333 heads.extend(bheads)
1333 if not heads:
1334 if not heads:
1334 return 1
1335 return 1
1335 displayer = cmdutil.show_changeset(ui, repo, opts)
1336 displayer = cmdutil.show_changeset(ui, repo, opts)
1336 for n in heads:
1337 for n in heads:
1337 displayer.show(repo[n])
1338 displayer.show(repo[n])
1338
1339
1339 def help_(ui, name=None, with_version=False):
1340 def help_(ui, name=None, with_version=False):
1340 """show help for a given topic or a help overview
1341 """show help for a given topic or a help overview
1341
1342
1342 With no arguments, print a list of commands and short help.
1343 With no arguments, print a list of commands and short help.
1343
1344
1344 Given a topic, extension, or command name, print help for that
1345 Given a topic, extension, or command name, print help for that
1345 topic."""
1346 topic."""
1346 option_lists = []
1347 option_lists = []
1347
1348
1348 def addglobalopts(aliases):
1349 def addglobalopts(aliases):
1349 if ui.verbose:
1350 if ui.verbose:
1350 option_lists.append((_("global options:"), globalopts))
1351 option_lists.append((_("global options:"), globalopts))
1351 if name == 'shortlist':
1352 if name == 'shortlist':
1352 option_lists.append((_('use "hg help" for the full list '
1353 option_lists.append((_('use "hg help" for the full list '
1353 'of commands'), ()))
1354 'of commands'), ()))
1354 else:
1355 else:
1355 if name == 'shortlist':
1356 if name == 'shortlist':
1356 msg = _('use "hg help" for the full list of commands '
1357 msg = _('use "hg help" for the full list of commands '
1357 'or "hg -v" for details')
1358 'or "hg -v" for details')
1358 elif aliases:
1359 elif aliases:
1359 msg = _('use "hg -v help%s" to show aliases and '
1360 msg = _('use "hg -v help%s" to show aliases and '
1360 'global options') % (name and " " + name or "")
1361 'global options') % (name and " " + name or "")
1361 else:
1362 else:
1362 msg = _('use "hg -v help %s" to show global options') % name
1363 msg = _('use "hg -v help %s" to show global options') % name
1363 option_lists.append((msg, ()))
1364 option_lists.append((msg, ()))
1364
1365
1365 def helpcmd(name):
1366 def helpcmd(name):
1366 if with_version:
1367 if with_version:
1367 version_(ui)
1368 version_(ui)
1368 ui.write('\n')
1369 ui.write('\n')
1369
1370
1370 try:
1371 try:
1371 aliases, i = cmdutil.findcmd(name, table, False)
1372 aliases, i = cmdutil.findcmd(name, table, False)
1372 except error.AmbiguousCommand, inst:
1373 except error.AmbiguousCommand, inst:
1373 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1374 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1374 helplist(_('list of commands:\n\n'), select)
1375 helplist(_('list of commands:\n\n'), select)
1375 return
1376 return
1376
1377
1377 # synopsis
1378 # synopsis
1378 if len(i) > 2:
1379 if len(i) > 2:
1379 if i[2].startswith('hg'):
1380 if i[2].startswith('hg'):
1380 ui.write("%s\n" % i[2])
1381 ui.write("%s\n" % i[2])
1381 else:
1382 else:
1382 ui.write('hg %s %s\n' % (aliases[0], i[2]))
1383 ui.write('hg %s %s\n' % (aliases[0], i[2]))
1383 else:
1384 else:
1384 ui.write('hg %s\n' % aliases[0])
1385 ui.write('hg %s\n' % aliases[0])
1385
1386
1386 # aliases
1387 # aliases
1387 if not ui.quiet and len(aliases) > 1:
1388 if not ui.quiet and len(aliases) > 1:
1388 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1389 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1389
1390
1390 # description
1391 # description
1391 doc = gettext(i[0].__doc__)
1392 doc = gettext(i[0].__doc__)
1392 if not doc:
1393 if not doc:
1393 doc = _("(no help text available)")
1394 doc = _("(no help text available)")
1394 if ui.quiet:
1395 if ui.quiet:
1395 doc = doc.splitlines(0)[0]
1396 doc = doc.splitlines(0)[0]
1396 ui.write("\n%s\n" % doc.rstrip())
1397 ui.write("\n%s\n" % doc.rstrip())
1397
1398
1398 if not ui.quiet:
1399 if not ui.quiet:
1399 # options
1400 # options
1400 if i[1]:
1401 if i[1]:
1401 option_lists.append((_("options:\n"), i[1]))
1402 option_lists.append((_("options:\n"), i[1]))
1402
1403
1403 addglobalopts(False)
1404 addglobalopts(False)
1404
1405
1405 def helplist(header, select=None):
1406 def helplist(header, select=None):
1406 h = {}
1407 h = {}
1407 cmds = {}
1408 cmds = {}
1408 for c, e in table.iteritems():
1409 for c, e in table.iteritems():
1409 f = c.split("|", 1)[0]
1410 f = c.split("|", 1)[0]
1410 if select and not select(f):
1411 if select and not select(f):
1411 continue
1412 continue
1412 if (not select and name != 'shortlist' and
1413 if (not select and name != 'shortlist' and
1413 e[0].__module__ != __name__):
1414 e[0].__module__ != __name__):
1414 continue
1415 continue
1415 if name == "shortlist" and not f.startswith("^"):
1416 if name == "shortlist" and not f.startswith("^"):
1416 continue
1417 continue
1417 f = f.lstrip("^")
1418 f = f.lstrip("^")
1418 if not ui.debugflag and f.startswith("debug"):
1419 if not ui.debugflag and f.startswith("debug"):
1419 continue
1420 continue
1420 doc = gettext(e[0].__doc__)
1421 doc = gettext(e[0].__doc__)
1421 if not doc:
1422 if not doc:
1422 doc = _("(no help text available)")
1423 doc = _("(no help text available)")
1423 h[f] = doc.splitlines(0)[0].rstrip()
1424 h[f] = doc.splitlines(0)[0].rstrip()
1424 cmds[f] = c.lstrip("^")
1425 cmds[f] = c.lstrip("^")
1425
1426
1426 if not h:
1427 if not h:
1427 ui.status(_('no commands defined\n'))
1428 ui.status(_('no commands defined\n'))
1428 return
1429 return
1429
1430
1430 ui.status(header)
1431 ui.status(header)
1431 fns = sorted(h)
1432 fns = sorted(h)
1432 m = max(map(len, fns))
1433 m = max(map(len, fns))
1433 for f in fns:
1434 for f in fns:
1434 if ui.verbose:
1435 if ui.verbose:
1435 commands = cmds[f].replace("|",", ")
1436 commands = cmds[f].replace("|",", ")
1436 ui.write(" %s:\n %s\n"%(commands, h[f]))
1437 ui.write(" %s:\n %s\n"%(commands, h[f]))
1437 else:
1438 else:
1438 ui.write(' %-*s %s\n' % (m, f, h[f]))
1439 ui.write(' %-*s %s\n' % (m, f, h[f]))
1439
1440
1440 exts = list(extensions.extensions())
1441 exts = list(extensions.extensions())
1441 if exts and name != 'shortlist':
1442 if exts and name != 'shortlist':
1442 ui.write(_('\nenabled extensions:\n\n'))
1443 ui.write(_('\nenabled extensions:\n\n'))
1443 maxlength = 0
1444 maxlength = 0
1444 exthelps = []
1445 exthelps = []
1445 for ename, ext in exts:
1446 for ename, ext in exts:
1446 doc = (gettext(ext.__doc__) or _('(no help text available)'))
1447 doc = (gettext(ext.__doc__) or _('(no help text available)'))
1447 ename = ename.split('.')[-1]
1448 ename = ename.split('.')[-1]
1448 maxlength = max(len(ename), maxlength)
1449 maxlength = max(len(ename), maxlength)
1449 exthelps.append((ename, doc.splitlines(0)[0].strip()))
1450 exthelps.append((ename, doc.splitlines(0)[0].strip()))
1450 for ename, text in exthelps:
1451 for ename, text in exthelps:
1451 ui.write(_(' %s %s\n') % (ename.ljust(maxlength), text))
1452 ui.write(_(' %s %s\n') % (ename.ljust(maxlength), text))
1452
1453
1453 if not ui.quiet:
1454 if not ui.quiet:
1454 addglobalopts(True)
1455 addglobalopts(True)
1455
1456
1456 def helptopic(name):
1457 def helptopic(name):
1457 for names, header, doc in help.helptable:
1458 for names, header, doc in help.helptable:
1458 if name in names:
1459 if name in names:
1459 break
1460 break
1460 else:
1461 else:
1461 raise error.UnknownCommand(name)
1462 raise error.UnknownCommand(name)
1462
1463
1463 # description
1464 # description
1464 if not doc:
1465 if not doc:
1465 doc = _("(no help text available)")
1466 doc = _("(no help text available)")
1466 if callable(doc):
1467 if callable(doc):
1467 doc = doc()
1468 doc = doc()
1468
1469
1469 ui.write("%s\n" % header)
1470 ui.write("%s\n" % header)
1470 ui.write("%s\n" % doc.rstrip())
1471 ui.write("%s\n" % doc.rstrip())
1471
1472
1472 def helpext(name):
1473 def helpext(name):
1473 try:
1474 try:
1474 mod = extensions.find(name)
1475 mod = extensions.find(name)
1475 except KeyError:
1476 except KeyError:
1476 raise error.UnknownCommand(name)
1477 raise error.UnknownCommand(name)
1477
1478
1478 doc = gettext(mod.__doc__) or _('no help text available')
1479 doc = gettext(mod.__doc__) or _('no help text available')
1479 doc = doc.splitlines(0)
1480 doc = doc.splitlines(0)
1480 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1481 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1481 for d in doc[1:]:
1482 for d in doc[1:]:
1482 ui.write(d, '\n')
1483 ui.write(d, '\n')
1483
1484
1484 ui.status('\n')
1485 ui.status('\n')
1485
1486
1486 try:
1487 try:
1487 ct = mod.cmdtable
1488 ct = mod.cmdtable
1488 except AttributeError:
1489 except AttributeError:
1489 ct = {}
1490 ct = {}
1490
1491
1491 modcmds = set([c.split('|', 1)[0] for c in ct])
1492 modcmds = set([c.split('|', 1)[0] for c in ct])
1492 helplist(_('list of commands:\n\n'), modcmds.__contains__)
1493 helplist(_('list of commands:\n\n'), modcmds.__contains__)
1493
1494
1494 if name and name != 'shortlist':
1495 if name and name != 'shortlist':
1495 i = None
1496 i = None
1496 for f in (helptopic, helpcmd, helpext):
1497 for f in (helptopic, helpcmd, helpext):
1497 try:
1498 try:
1498 f(name)
1499 f(name)
1499 i = None
1500 i = None
1500 break
1501 break
1501 except error.UnknownCommand, inst:
1502 except error.UnknownCommand, inst:
1502 i = inst
1503 i = inst
1503 if i:
1504 if i:
1504 raise i
1505 raise i
1505
1506
1506 else:
1507 else:
1507 # program name
1508 # program name
1508 if ui.verbose or with_version:
1509 if ui.verbose or with_version:
1509 version_(ui)
1510 version_(ui)
1510 else:
1511 else:
1511 ui.status(_("Mercurial Distributed SCM\n"))
1512 ui.status(_("Mercurial Distributed SCM\n"))
1512 ui.status('\n')
1513 ui.status('\n')
1513
1514
1514 # list of commands
1515 # list of commands
1515 if name == "shortlist":
1516 if name == "shortlist":
1516 header = _('basic commands:\n\n')
1517 header = _('basic commands:\n\n')
1517 else:
1518 else:
1518 header = _('list of commands:\n\n')
1519 header = _('list of commands:\n\n')
1519
1520
1520 helplist(header)
1521 helplist(header)
1521
1522
1522 # list all option lists
1523 # list all option lists
1523 opt_output = []
1524 opt_output = []
1524 for title, options in option_lists:
1525 for title, options in option_lists:
1525 opt_output.append(("\n%s" % title, None))
1526 opt_output.append(("\n%s" % title, None))
1526 for shortopt, longopt, default, desc in options:
1527 for shortopt, longopt, default, desc in options:
1527 if "DEPRECATED" in desc and not ui.verbose: continue
1528 if "DEPRECATED" in desc and not ui.verbose: continue
1528 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1529 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1529 longopt and " --%s" % longopt),
1530 longopt and " --%s" % longopt),
1530 "%s%s" % (desc,
1531 "%s%s" % (desc,
1531 default
1532 default
1532 and _(" (default: %s)") % default
1533 and _(" (default: %s)") % default
1533 or "")))
1534 or "")))
1534
1535
1535 if not name:
1536 if not name:
1536 ui.write(_("\nadditional help topics:\n\n"))
1537 ui.write(_("\nadditional help topics:\n\n"))
1537 topics = []
1538 topics = []
1538 for names, header, doc in help.helptable:
1539 for names, header, doc in help.helptable:
1539 names = [(-len(name), name) for name in names]
1540 names = [(-len(name), name) for name in names]
1540 names.sort()
1541 names.sort()
1541 topics.append((names[0][1], header))
1542 topics.append((names[0][1], header))
1542 topics_len = max([len(s[0]) for s in topics])
1543 topics_len = max([len(s[0]) for s in topics])
1543 for t, desc in topics:
1544 for t, desc in topics:
1544 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1545 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1545
1546
1546 if opt_output:
1547 if opt_output:
1547 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1548 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1548 for first, second in opt_output:
1549 for first, second in opt_output:
1549 if second:
1550 if second:
1550 # wrap descriptions at 70 characters, just like the
1551 # wrap descriptions at 70 characters, just like the
1551 # main help texts
1552 # main help texts
1552 second = textwrap.wrap(second, width=70 - opts_len - 3)
1553 second = textwrap.wrap(second, width=70 - opts_len - 3)
1553 pad = '\n' + ' ' * (opts_len + 3)
1554 pad = '\n' + ' ' * (opts_len + 3)
1554 ui.write(" %-*s %s\n" % (opts_len, first, pad.join(second)))
1555 ui.write(" %-*s %s\n" % (opts_len, first, pad.join(second)))
1555 else:
1556 else:
1556 ui.write("%s\n" % first)
1557 ui.write("%s\n" % first)
1557
1558
1558 def identify(ui, repo, source=None,
1559 def identify(ui, repo, source=None,
1559 rev=None, num=None, id=None, branch=None, tags=None):
1560 rev=None, num=None, id=None, branch=None, tags=None):
1560 """identify the working copy or specified revision
1561 """identify the working copy or specified revision
1561
1562
1562 With no revision, print a summary of the current state of the
1563 With no revision, print a summary of the current state of the
1563 repository.
1564 repository.
1564
1565
1565 With a path, do a lookup in another repository.
1566 With a path, do a lookup in another repository.
1566
1567
1567 This summary identifies the repository state using one or two
1568 This summary identifies the repository state using one or two
1568 parent hash identifiers, followed by a "+" if there are
1569 parent hash identifiers, followed by a "+" if there are
1569 uncommitted changes in the working directory, a list of tags for
1570 uncommitted changes in the working directory, a list of tags for
1570 this revision and a branch name for non-default branches.
1571 this revision and a branch name for non-default branches.
1571 """
1572 """
1572
1573
1573 if not repo and not source:
1574 if not repo and not source:
1574 raise util.Abort(_("There is no Mercurial repository here "
1575 raise util.Abort(_("There is no Mercurial repository here "
1575 "(.hg not found)"))
1576 "(.hg not found)"))
1576
1577
1577 hexfunc = ui.debugflag and hex or short
1578 hexfunc = ui.debugflag and hex or short
1578 default = not (num or id or branch or tags)
1579 default = not (num or id or branch or tags)
1579 output = []
1580 output = []
1580
1581
1581 revs = []
1582 revs = []
1582 if source:
1583 if source:
1583 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1584 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1584 repo = hg.repository(ui, source)
1585 repo = hg.repository(ui, source)
1585
1586
1586 if not repo.local():
1587 if not repo.local():
1587 if not rev and revs:
1588 if not rev and revs:
1588 rev = revs[0]
1589 rev = revs[0]
1589 if not rev:
1590 if not rev:
1590 rev = "tip"
1591 rev = "tip"
1591 if num or branch or tags:
1592 if num or branch or tags:
1592 raise util.Abort(
1593 raise util.Abort(
1593 "can't query remote revision number, branch, or tags")
1594 "can't query remote revision number, branch, or tags")
1594 output = [hexfunc(repo.lookup(rev))]
1595 output = [hexfunc(repo.lookup(rev))]
1595 elif not rev:
1596 elif not rev:
1596 ctx = repo[None]
1597 ctx = repo[None]
1597 parents = ctx.parents()
1598 parents = ctx.parents()
1598 changed = False
1599 changed = False
1599 if default or id or num:
1600 if default or id or num:
1600 changed = ctx.files() + ctx.deleted()
1601 changed = ctx.files() + ctx.deleted()
1601 if default or id:
1602 if default or id:
1602 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1603 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1603 (changed) and "+" or "")]
1604 (changed) and "+" or "")]
1604 if num:
1605 if num:
1605 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1606 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1606 (changed) and "+" or ""))
1607 (changed) and "+" or ""))
1607 else:
1608 else:
1608 ctx = repo[rev]
1609 ctx = repo[rev]
1609 if default or id:
1610 if default or id:
1610 output = [hexfunc(ctx.node())]
1611 output = [hexfunc(ctx.node())]
1611 if num:
1612 if num:
1612 output.append(str(ctx.rev()))
1613 output.append(str(ctx.rev()))
1613
1614
1614 if repo.local() and default and not ui.quiet:
1615 if repo.local() and default and not ui.quiet:
1615 b = encoding.tolocal(ctx.branch())
1616 b = encoding.tolocal(ctx.branch())
1616 if b != 'default':
1617 if b != 'default':
1617 output.append("(%s)" % b)
1618 output.append("(%s)" % b)
1618
1619
1619 # multiple tags for a single parent separated by '/'
1620 # multiple tags for a single parent separated by '/'
1620 t = "/".join(ctx.tags())
1621 t = "/".join(ctx.tags())
1621 if t:
1622 if t:
1622 output.append(t)
1623 output.append(t)
1623
1624
1624 if branch:
1625 if branch:
1625 output.append(encoding.tolocal(ctx.branch()))
1626 output.append(encoding.tolocal(ctx.branch()))
1626
1627
1627 if tags:
1628 if tags:
1628 output.extend(ctx.tags())
1629 output.extend(ctx.tags())
1629
1630
1630 ui.write("%s\n" % ' '.join(output))
1631 ui.write("%s\n" % ' '.join(output))
1631
1632
1632 def import_(ui, repo, patch1, *patches, **opts):
1633 def import_(ui, repo, patch1, *patches, **opts):
1633 """import an ordered set of patches
1634 """import an ordered set of patches
1634
1635
1635 Import a list of patches and commit them individually.
1636 Import a list of patches and commit them individually.
1636
1637
1637 If there are outstanding changes in the working directory, import
1638 If there are outstanding changes in the working directory, import
1638 will abort unless given the -f/--force flag.
1639 will abort unless given the -f/--force flag.
1639
1640
1640 You can import a patch straight from a mail message. Even patches
1641 You can import a patch straight from a mail message. Even patches
1641 as attachments work (body part must be type text/plain or
1642 as attachments work (body part must be type text/plain or
1642 text/x-patch to be used). From and Subject headers of email
1643 text/x-patch to be used). From and Subject headers of email
1643 message are used as default committer and commit message. All
1644 message are used as default committer and commit message. All
1644 text/plain body parts before first diff are added to commit
1645 text/plain body parts before first diff are added to commit
1645 message.
1646 message.
1646
1647
1647 If the imported patch was generated by hg export, user and
1648 If the imported patch was generated by hg export, user and
1648 description from patch override values from message headers and
1649 description from patch override values from message headers and
1649 body. Values given on command line with -m/--message and -u/--user
1650 body. Values given on command line with -m/--message and -u/--user
1650 override these.
1651 override these.
1651
1652
1652 If --exact is specified, import will set the working directory to
1653 If --exact is specified, import will set the working directory to
1653 the parent of each patch before applying it, and will abort if the
1654 the parent of each patch before applying it, and will abort if the
1654 resulting changeset has a different ID than the one recorded in
1655 resulting changeset has a different ID than the one recorded in
1655 the patch. This may happen due to character set problems or other
1656 the patch. This may happen due to character set problems or other
1656 deficiencies in the text patch format.
1657 deficiencies in the text patch format.
1657
1658
1658 With -s/--similarity, hg will attempt to discover renames and
1659 With -s/--similarity, hg will attempt to discover renames and
1659 copies in the patch in the same way as 'addremove'.
1660 copies in the patch in the same way as 'addremove'.
1660
1661
1661 To read a patch from standard input, use patch name "-". See 'hg
1662 To read a patch from standard input, use patch name "-". See 'hg
1662 help dates' for a list of formats valid for -d/--date.
1663 help dates' for a list of formats valid for -d/--date.
1663 """
1664 """
1664 patches = (patch1,) + patches
1665 patches = (patch1,) + patches
1665
1666
1666 date = opts.get('date')
1667 date = opts.get('date')
1667 if date:
1668 if date:
1668 opts['date'] = util.parsedate(date)
1669 opts['date'] = util.parsedate(date)
1669
1670
1670 try:
1671 try:
1671 sim = float(opts.get('similarity') or 0)
1672 sim = float(opts.get('similarity') or 0)
1672 except ValueError:
1673 except ValueError:
1673 raise util.Abort(_('similarity must be a number'))
1674 raise util.Abort(_('similarity must be a number'))
1674 if sim < 0 or sim > 100:
1675 if sim < 0 or sim > 100:
1675 raise util.Abort(_('similarity must be between 0 and 100'))
1676 raise util.Abort(_('similarity must be between 0 and 100'))
1676
1677
1677 if opts.get('exact') or not opts.get('force'):
1678 if opts.get('exact') or not opts.get('force'):
1678 cmdutil.bail_if_changed(repo)
1679 cmdutil.bail_if_changed(repo)
1679
1680
1680 d = opts["base"]
1681 d = opts["base"]
1681 strip = opts["strip"]
1682 strip = opts["strip"]
1682 wlock = lock = None
1683 wlock = lock = None
1683 try:
1684 try:
1684 wlock = repo.wlock()
1685 wlock = repo.wlock()
1685 lock = repo.lock()
1686 lock = repo.lock()
1686 for p in patches:
1687 for p in patches:
1687 pf = os.path.join(d, p)
1688 pf = os.path.join(d, p)
1688
1689
1689 if pf == '-':
1690 if pf == '-':
1690 ui.status(_("applying patch from stdin\n"))
1691 ui.status(_("applying patch from stdin\n"))
1691 pf = sys.stdin
1692 pf = sys.stdin
1692 else:
1693 else:
1693 ui.status(_("applying %s\n") % p)
1694 ui.status(_("applying %s\n") % p)
1694 pf = url.open(ui, pf)
1695 pf = url.open(ui, pf)
1695 data = patch.extract(ui, pf)
1696 data = patch.extract(ui, pf)
1696 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1697 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1697
1698
1698 if tmpname is None:
1699 if tmpname is None:
1699 raise util.Abort(_('no diffs found'))
1700 raise util.Abort(_('no diffs found'))
1700
1701
1701 try:
1702 try:
1702 cmdline_message = cmdutil.logmessage(opts)
1703 cmdline_message = cmdutil.logmessage(opts)
1703 if cmdline_message:
1704 if cmdline_message:
1704 # pickup the cmdline msg
1705 # pickup the cmdline msg
1705 message = cmdline_message
1706 message = cmdline_message
1706 elif message:
1707 elif message:
1707 # pickup the patch msg
1708 # pickup the patch msg
1708 message = message.strip()
1709 message = message.strip()
1709 else:
1710 else:
1710 # launch the editor
1711 # launch the editor
1711 message = None
1712 message = None
1712 ui.debug(_('message:\n%s\n') % message)
1713 ui.debug(_('message:\n%s\n') % message)
1713
1714
1714 wp = repo.parents()
1715 wp = repo.parents()
1715 if opts.get('exact'):
1716 if opts.get('exact'):
1716 if not nodeid or not p1:
1717 if not nodeid or not p1:
1717 raise util.Abort(_('not a mercurial patch'))
1718 raise util.Abort(_('not a mercurial patch'))
1718 p1 = repo.lookup(p1)
1719 p1 = repo.lookup(p1)
1719 p2 = repo.lookup(p2 or hex(nullid))
1720 p2 = repo.lookup(p2 or hex(nullid))
1720
1721
1721 if p1 != wp[0].node():
1722 if p1 != wp[0].node():
1722 hg.clean(repo, p1)
1723 hg.clean(repo, p1)
1723 repo.dirstate.setparents(p1, p2)
1724 repo.dirstate.setparents(p1, p2)
1724 elif p2:
1725 elif p2:
1725 try:
1726 try:
1726 p1 = repo.lookup(p1)
1727 p1 = repo.lookup(p1)
1727 p2 = repo.lookup(p2)
1728 p2 = repo.lookup(p2)
1728 if p1 == wp[0].node():
1729 if p1 == wp[0].node():
1729 repo.dirstate.setparents(p1, p2)
1730 repo.dirstate.setparents(p1, p2)
1730 except error.RepoError:
1731 except error.RepoError:
1731 pass
1732 pass
1732 if opts.get('exact') or opts.get('import_branch'):
1733 if opts.get('exact') or opts.get('import_branch'):
1733 repo.dirstate.setbranch(branch or 'default')
1734 repo.dirstate.setbranch(branch or 'default')
1734
1735
1735 files = {}
1736 files = {}
1736 try:
1737 try:
1737 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1738 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1738 files=files)
1739 files=files)
1739 finally:
1740 finally:
1740 files = patch.updatedir(ui, repo, files, similarity=sim/100.)
1741 files = patch.updatedir(ui, repo, files, similarity=sim/100.)
1741 if not opts.get('no_commit'):
1742 if not opts.get('no_commit'):
1742 n = repo.commit(files, message, opts.get('user') or user,
1743 n = repo.commit(files, message, opts.get('user') or user,
1743 opts.get('date') or date)
1744 opts.get('date') or date)
1744 if opts.get('exact'):
1745 if opts.get('exact'):
1745 if hex(n) != nodeid:
1746 if hex(n) != nodeid:
1746 repo.rollback()
1747 repo.rollback()
1747 raise util.Abort(_('patch is damaged'
1748 raise util.Abort(_('patch is damaged'
1748 ' or loses information'))
1749 ' or loses information'))
1749 # Force a dirstate write so that the next transaction
1750 # Force a dirstate write so that the next transaction
1750 # backups an up-do-date file.
1751 # backups an up-do-date file.
1751 repo.dirstate.write()
1752 repo.dirstate.write()
1752 finally:
1753 finally:
1753 os.unlink(tmpname)
1754 os.unlink(tmpname)
1754 finally:
1755 finally:
1755 release(lock, wlock)
1756 release(lock, wlock)
1756
1757
1757 def incoming(ui, repo, source="default", **opts):
1758 def incoming(ui, repo, source="default", **opts):
1758 """show new changesets found in source
1759 """show new changesets found in source
1759
1760
1760 Show new changesets found in the specified path/URL or the default
1761 Show new changesets found in the specified path/URL or the default
1761 pull location. These are the changesets that would be pulled if a
1762 pull location. These are the changesets that would be pulled if a
1762 pull was requested.
1763 pull was requested.
1763
1764
1764 For remote repository, using --bundle avoids downloading the
1765 For remote repository, using --bundle avoids downloading the
1765 changesets twice if the incoming is followed by a pull.
1766 changesets twice if the incoming is followed by a pull.
1766
1767
1767 See pull for valid source format details.
1768 See pull for valid source format details.
1768 """
1769 """
1769 limit = cmdutil.loglimit(opts)
1770 limit = cmdutil.loglimit(opts)
1770 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
1771 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
1771 other = hg.repository(cmdutil.remoteui(repo, opts), source)
1772 other = hg.repository(cmdutil.remoteui(repo, opts), source)
1772 ui.status(_('comparing with %s\n') % url.hidepassword(source))
1773 ui.status(_('comparing with %s\n') % url.hidepassword(source))
1773 if revs:
1774 if revs:
1774 revs = [other.lookup(rev) for rev in revs]
1775 revs = [other.lookup(rev) for rev in revs]
1775 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
1776 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
1776 force=opts["force"])
1777 force=opts["force"])
1777 if not incoming:
1778 if not incoming:
1778 try:
1779 try:
1779 os.unlink(opts["bundle"])
1780 os.unlink(opts["bundle"])
1780 except:
1781 except:
1781 pass
1782 pass
1782 ui.status(_("no changes found\n"))
1783 ui.status(_("no changes found\n"))
1783 return 1
1784 return 1
1784
1785
1785 cleanup = None
1786 cleanup = None
1786 try:
1787 try:
1787 fname = opts["bundle"]
1788 fname = opts["bundle"]
1788 if fname or not other.local():
1789 if fname or not other.local():
1789 # create a bundle (uncompressed if other repo is not local)
1790 # create a bundle (uncompressed if other repo is not local)
1790
1791
1791 if revs is None and other.capable('changegroupsubset'):
1792 if revs is None and other.capable('changegroupsubset'):
1792 revs = rheads
1793 revs = rheads
1793
1794
1794 if revs is None:
1795 if revs is None:
1795 cg = other.changegroup(incoming, "incoming")
1796 cg = other.changegroup(incoming, "incoming")
1796 else:
1797 else:
1797 cg = other.changegroupsubset(incoming, revs, 'incoming')
1798 cg = other.changegroupsubset(incoming, revs, 'incoming')
1798 bundletype = other.local() and "HG10BZ" or "HG10UN"
1799 bundletype = other.local() and "HG10BZ" or "HG10UN"
1799 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1800 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1800 # keep written bundle?
1801 # keep written bundle?
1801 if opts["bundle"]:
1802 if opts["bundle"]:
1802 cleanup = None
1803 cleanup = None
1803 if not other.local():
1804 if not other.local():
1804 # use the created uncompressed bundlerepo
1805 # use the created uncompressed bundlerepo
1805 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1806 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1806
1807
1807 o = other.changelog.nodesbetween(incoming, revs)[0]
1808 o = other.changelog.nodesbetween(incoming, revs)[0]
1808 if opts.get('newest_first'):
1809 if opts.get('newest_first'):
1809 o.reverse()
1810 o.reverse()
1810 displayer = cmdutil.show_changeset(ui, other, opts)
1811 displayer = cmdutil.show_changeset(ui, other, opts)
1811 count = 0
1812 count = 0
1812 for n in o:
1813 for n in o:
1813 if count >= limit:
1814 if count >= limit:
1814 break
1815 break
1815 parents = [p for p in other.changelog.parents(n) if p != nullid]
1816 parents = [p for p in other.changelog.parents(n) if p != nullid]
1816 if opts.get('no_merges') and len(parents) == 2:
1817 if opts.get('no_merges') and len(parents) == 2:
1817 continue
1818 continue
1818 count += 1
1819 count += 1
1819 displayer.show(other[n])
1820 displayer.show(other[n])
1820 finally:
1821 finally:
1821 if hasattr(other, 'close'):
1822 if hasattr(other, 'close'):
1822 other.close()
1823 other.close()
1823 if cleanup:
1824 if cleanup:
1824 os.unlink(cleanup)
1825 os.unlink(cleanup)
1825
1826
1826 def init(ui, dest=".", **opts):
1827 def init(ui, dest=".", **opts):
1827 """create a new repository in the given directory
1828 """create a new repository in the given directory
1828
1829
1829 Initialize a new repository in the given directory. If the given
1830 Initialize a new repository in the given directory. If the given
1830 directory does not exist, it is created.
1831 directory does not exist, it is created.
1831
1832
1832 If no directory is given, the current directory is used.
1833 If no directory is given, the current directory is used.
1833
1834
1834 It is possible to specify an ssh:// URL as the destination.
1835 It is possible to specify an ssh:// URL as the destination.
1835 See 'hg help urls' for more information.
1836 See 'hg help urls' for more information.
1836 """
1837 """
1837 hg.repository(cmdutil.remoteui(ui, opts), dest, create=1)
1838 hg.repository(cmdutil.remoteui(ui, opts), dest, create=1)
1838
1839
1839 def locate(ui, repo, *pats, **opts):
1840 def locate(ui, repo, *pats, **opts):
1840 """locate files matching specific patterns
1841 """locate files matching specific patterns
1841
1842
1842 Print all files under Mercurial control whose names match the
1843 Print all files under Mercurial control whose names match the
1843 given patterns.
1844 given patterns.
1844
1845
1845 This command searches the entire repository by default. To search
1846 This command searches the entire repository by default. To search
1846 just the current directory and its subdirectories, use
1847 just the current directory and its subdirectories, use
1847 "--include .".
1848 "--include .".
1848
1849
1849 If no patterns are given to match, this command prints all file
1850 If no patterns are given to match, this command prints all file
1850 names.
1851 names.
1851
1852
1852 If you want to feed the output of this command into the "xargs"
1853 If you want to feed the output of this command into the "xargs"
1853 command, use the -0 option to both this command and "xargs". This
1854 command, use the -0 option to both this command and "xargs". This
1854 will avoid the problem of "xargs" treating single filenames that
1855 will avoid the problem of "xargs" treating single filenames that
1855 contain white space as multiple filenames.
1856 contain white space as multiple filenames.
1856 """
1857 """
1857 end = opts.get('print0') and '\0' or '\n'
1858 end = opts.get('print0') and '\0' or '\n'
1858 rev = opts.get('rev') or None
1859 rev = opts.get('rev') or None
1859
1860
1860 ret = 1
1861 ret = 1
1861 m = cmdutil.match(repo, pats, opts, default='relglob')
1862 m = cmdutil.match(repo, pats, opts, default='relglob')
1862 m.bad = lambda x,y: False
1863 m.bad = lambda x,y: False
1863 for abs in repo[rev].walk(m):
1864 for abs in repo[rev].walk(m):
1864 if not rev and abs not in repo.dirstate:
1865 if not rev and abs not in repo.dirstate:
1865 continue
1866 continue
1866 if opts.get('fullpath'):
1867 if opts.get('fullpath'):
1867 ui.write(repo.wjoin(abs), end)
1868 ui.write(repo.wjoin(abs), end)
1868 else:
1869 else:
1869 ui.write(((pats and m.rel(abs)) or abs), end)
1870 ui.write(((pats and m.rel(abs)) or abs), end)
1870 ret = 0
1871 ret = 0
1871
1872
1872 return ret
1873 return ret
1873
1874
1874 def log(ui, repo, *pats, **opts):
1875 def log(ui, repo, *pats, **opts):
1875 """show revision history of entire repository or files
1876 """show revision history of entire repository or files
1876
1877
1877 Print the revision history of the specified files or the entire
1878 Print the revision history of the specified files or the entire
1878 project.
1879 project.
1879
1880
1880 File history is shown without following rename or copy history of
1881 File history is shown without following rename or copy history of
1881 files. Use -f/--follow with a file name to follow history across
1882 files. Use -f/--follow with a file name to follow history across
1882 renames and copies. --follow without a file name will only show
1883 renames and copies. --follow without a file name will only show
1883 ancestors or descendants of the starting revision. --follow-first
1884 ancestors or descendants of the starting revision. --follow-first
1884 only follows the first parent of merge revisions.
1885 only follows the first parent of merge revisions.
1885
1886
1886 If no revision range is specified, the default is tip:0 unless
1887 If no revision range is specified, the default is tip:0 unless
1887 --follow is set, in which case the working directory parent is
1888 --follow is set, in which case the working directory parent is
1888 used as the starting revision.
1889 used as the starting revision.
1889
1890
1890 See 'hg help dates' for a list of formats valid for -d/--date.
1891 See 'hg help dates' for a list of formats valid for -d/--date.
1891
1892
1892 By default this command outputs: changeset id and hash, tags,
1893 By default this command outputs: changeset id and hash, tags,
1893 non-trivial parents, user, date and time, and a summary for each
1894 non-trivial parents, user, date and time, and a summary for each
1894 commit. When the -v/--verbose switch is used, the list of changed
1895 commit. When the -v/--verbose switch is used, the list of changed
1895 files and full commit message is shown.
1896 files and full commit message is shown.
1896
1897
1897 NOTE: log -p/--patch may generate unexpected diff output for merge
1898 NOTE: log -p/--patch may generate unexpected diff output for merge
1898 changesets, as it will only compare the merge changeset against
1899 changesets, as it will only compare the merge changeset against
1899 its first parent. Also, the files: list will only reflect files
1900 its first parent. Also, the files: list will only reflect files
1900 that are different from BOTH parents.
1901 that are different from BOTH parents.
1901
1902
1902 """
1903 """
1903
1904
1904 get = util.cachefunc(lambda r: repo[r].changeset())
1905 get = util.cachefunc(lambda r: repo[r].changeset())
1905 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1906 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1906
1907
1907 limit = cmdutil.loglimit(opts)
1908 limit = cmdutil.loglimit(opts)
1908 count = 0
1909 count = 0
1909
1910
1910 if opts.get('copies') and opts.get('rev'):
1911 if opts.get('copies') and opts.get('rev'):
1911 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
1912 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
1912 else:
1913 else:
1913 endrev = len(repo)
1914 endrev = len(repo)
1914 rcache = {}
1915 rcache = {}
1915 ncache = {}
1916 ncache = {}
1916 def getrenamed(fn, rev):
1917 def getrenamed(fn, rev):
1917 '''looks up all renames for a file (up to endrev) the first
1918 '''looks up all renames for a file (up to endrev) the first
1918 time the file is given. It indexes on the changerev and only
1919 time the file is given. It indexes on the changerev and only
1919 parses the manifest if linkrev != changerev.
1920 parses the manifest if linkrev != changerev.
1920 Returns rename info for fn at changerev rev.'''
1921 Returns rename info for fn at changerev rev.'''
1921 if fn not in rcache:
1922 if fn not in rcache:
1922 rcache[fn] = {}
1923 rcache[fn] = {}
1923 ncache[fn] = {}
1924 ncache[fn] = {}
1924 fl = repo.file(fn)
1925 fl = repo.file(fn)
1925 for i in fl:
1926 for i in fl:
1926 node = fl.node(i)
1927 node = fl.node(i)
1927 lr = fl.linkrev(i)
1928 lr = fl.linkrev(i)
1928 renamed = fl.renamed(node)
1929 renamed = fl.renamed(node)
1929 rcache[fn][lr] = renamed
1930 rcache[fn][lr] = renamed
1930 if renamed:
1931 if renamed:
1931 ncache[fn][node] = renamed
1932 ncache[fn][node] = renamed
1932 if lr >= endrev:
1933 if lr >= endrev:
1933 break
1934 break
1934 if rev in rcache[fn]:
1935 if rev in rcache[fn]:
1935 return rcache[fn][rev]
1936 return rcache[fn][rev]
1936
1937
1937 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1938 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1938 # filectx logic.
1939 # filectx logic.
1939
1940
1940 try:
1941 try:
1941 return repo[rev][fn].renamed()
1942 return repo[rev][fn].renamed()
1942 except error.LookupError:
1943 except error.LookupError:
1943 pass
1944 pass
1944 return None
1945 return None
1945
1946
1946 df = False
1947 df = False
1947 if opts["date"]:
1948 if opts["date"]:
1948 df = util.matchdate(opts["date"])
1949 df = util.matchdate(opts["date"])
1949
1950
1950 only_branches = opts.get('only_branch')
1951 only_branches = opts.get('only_branch')
1951
1952
1952 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1953 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1953 for st, rev, fns in changeiter:
1954 for st, rev, fns in changeiter:
1954 if st == 'add':
1955 if st == 'add':
1955 parents = [p for p in repo.changelog.parentrevs(rev)
1956 parents = [p for p in repo.changelog.parentrevs(rev)
1956 if p != nullrev]
1957 if p != nullrev]
1957 if opts.get('no_merges') and len(parents) == 2:
1958 if opts.get('no_merges') and len(parents) == 2:
1958 continue
1959 continue
1959 if opts.get('only_merges') and len(parents) != 2:
1960 if opts.get('only_merges') and len(parents) != 2:
1960 continue
1961 continue
1961
1962
1962 if only_branches:
1963 if only_branches:
1963 revbranch = get(rev)[5]['branch']
1964 revbranch = get(rev)[5]['branch']
1964 if revbranch not in only_branches:
1965 if revbranch not in only_branches:
1965 continue
1966 continue
1966
1967
1967 if df:
1968 if df:
1968 changes = get(rev)
1969 changes = get(rev)
1969 if not df(changes[2][0]):
1970 if not df(changes[2][0]):
1970 continue
1971 continue
1971
1972
1972 if opts.get('keyword'):
1973 if opts.get('keyword'):
1973 changes = get(rev)
1974 changes = get(rev)
1974 miss = 0
1975 miss = 0
1975 for k in [kw.lower() for kw in opts['keyword']]:
1976 for k in [kw.lower() for kw in opts['keyword']]:
1976 if not (k in changes[1].lower() or
1977 if not (k in changes[1].lower() or
1977 k in changes[4].lower() or
1978 k in changes[4].lower() or
1978 k in " ".join(changes[3]).lower()):
1979 k in " ".join(changes[3]).lower()):
1979 miss = 1
1980 miss = 1
1980 break
1981 break
1981 if miss:
1982 if miss:
1982 continue
1983 continue
1983
1984
1984 if opts['user']:
1985 if opts['user']:
1985 changes = get(rev)
1986 changes = get(rev)
1986 if not [k for k in opts['user'] if k in changes[1]]:
1987 if not [k for k in opts['user'] if k in changes[1]]:
1987 continue
1988 continue
1988
1989
1989 copies = []
1990 copies = []
1990 if opts.get('copies') and rev:
1991 if opts.get('copies') and rev:
1991 for fn in get(rev)[3]:
1992 for fn in get(rev)[3]:
1992 rename = getrenamed(fn, rev)
1993 rename = getrenamed(fn, rev)
1993 if rename:
1994 if rename:
1994 copies.append((fn, rename[0]))
1995 copies.append((fn, rename[0]))
1995 displayer.show(context.changectx(repo, rev), copies=copies)
1996 displayer.show(context.changectx(repo, rev), copies=copies)
1996 elif st == 'iter':
1997 elif st == 'iter':
1997 if count == limit: break
1998 if count == limit: break
1998 if displayer.flush(rev):
1999 if displayer.flush(rev):
1999 count += 1
2000 count += 1
2000
2001
2001 def manifest(ui, repo, node=None, rev=None):
2002 def manifest(ui, repo, node=None, rev=None):
2002 """output the current or given revision of the project manifest
2003 """output the current or given revision of the project manifest
2003
2004
2004 Print a list of version controlled files for the given revision.
2005 Print a list of version controlled files for the given revision.
2005 If no revision is given, the first parent of the working directory
2006 If no revision is given, the first parent of the working directory
2006 is used, or the null revision if none is checked out.
2007 is used, or the null revision if none is checked out.
2007
2008
2008 With -v flag, print file permissions, symlink and executable bits.
2009 With -v flag, print file permissions, symlink and executable bits.
2009 With --debug flag, print file revision hashes.
2010 With --debug flag, print file revision hashes.
2010 """
2011 """
2011
2012
2012 if rev and node:
2013 if rev and node:
2013 raise util.Abort(_("please specify just one revision"))
2014 raise util.Abort(_("please specify just one revision"))
2014
2015
2015 if not node:
2016 if not node:
2016 node = rev
2017 node = rev
2017
2018
2018 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2019 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2019 ctx = repo[node]
2020 ctx = repo[node]
2020 for f in ctx:
2021 for f in ctx:
2021 if ui.debugflag:
2022 if ui.debugflag:
2022 ui.write("%40s " % hex(ctx.manifest()[f]))
2023 ui.write("%40s " % hex(ctx.manifest()[f]))
2023 if ui.verbose:
2024 if ui.verbose:
2024 ui.write(decor[ctx.flags(f)])
2025 ui.write(decor[ctx.flags(f)])
2025 ui.write("%s\n" % f)
2026 ui.write("%s\n" % f)
2026
2027
2027 def merge(ui, repo, node=None, force=None, rev=None):
2028 def merge(ui, repo, node=None, force=None, rev=None):
2028 """merge working directory with another revision
2029 """merge working directory with another revision
2029
2030
2030 The contents of the current working directory is updated with all
2031 The contents of the current working directory is updated with all
2031 changes made in the requested revision since the last common
2032 changes made in the requested revision since the last common
2032 predecessor revision.
2033 predecessor revision.
2033
2034
2034 Files that changed between either parent are marked as changed for
2035 Files that changed between either parent are marked as changed for
2035 the next commit and a commit must be performed before any further
2036 the next commit and a commit must be performed before any further
2036 updates are allowed. The next commit has two parents.
2037 updates are allowed. The next commit has two parents.
2037
2038
2038 If no revision is specified, the working directory's parent is a
2039 If no revision is specified, the working directory's parent is a
2039 head revision, and the current branch contains exactly one other
2040 head revision, and the current branch contains exactly one other
2040 head, the other head is merged with by default. Otherwise, an
2041 head, the other head is merged with by default. Otherwise, an
2041 explicit revision to merge with must be provided.
2042 explicit revision to merge with must be provided.
2042 """
2043 """
2043
2044
2044 if rev and node:
2045 if rev and node:
2045 raise util.Abort(_("please specify just one revision"))
2046 raise util.Abort(_("please specify just one revision"))
2046 if not node:
2047 if not node:
2047 node = rev
2048 node = rev
2048
2049
2049 if not node:
2050 if not node:
2050 branch = repo.changectx(None).branch()
2051 branch = repo.changectx(None).branch()
2051 bheads = repo.branchheads(branch)
2052 bheads = repo.branchheads(branch)
2052 if len(bheads) > 2:
2053 if len(bheads) > 2:
2053 raise util.Abort(_("branch '%s' has %d heads - "
2054 raise util.Abort(_("branch '%s' has %d heads - "
2054 "please merge with an explicit rev") %
2055 "please merge with an explicit rev") %
2055 (branch, len(bheads)))
2056 (branch, len(bheads)))
2056
2057
2057 parent = repo.dirstate.parents()[0]
2058 parent = repo.dirstate.parents()[0]
2058 if len(bheads) == 1:
2059 if len(bheads) == 1:
2059 if len(repo.heads()) > 1:
2060 if len(repo.heads()) > 1:
2060 raise util.Abort(_("branch '%s' has one head - "
2061 raise util.Abort(_("branch '%s' has one head - "
2061 "please merge with an explicit rev") %
2062 "please merge with an explicit rev") %
2062 branch)
2063 branch)
2063 msg = _('there is nothing to merge')
2064 msg = _('there is nothing to merge')
2064 if parent != repo.lookup(repo[None].branch()):
2065 if parent != repo.lookup(repo[None].branch()):
2065 msg = _('%s - use "hg update" instead') % msg
2066 msg = _('%s - use "hg update" instead') % msg
2066 raise util.Abort(msg)
2067 raise util.Abort(msg)
2067
2068
2068 if parent not in bheads:
2069 if parent not in bheads:
2069 raise util.Abort(_('working dir not at a head rev - '
2070 raise util.Abort(_('working dir not at a head rev - '
2070 'use "hg update" or merge with an explicit rev'))
2071 'use "hg update" or merge with an explicit rev'))
2071 node = parent == bheads[0] and bheads[-1] or bheads[0]
2072 node = parent == bheads[0] and bheads[-1] or bheads[0]
2072 return hg.merge(repo, node, force=force)
2073 return hg.merge(repo, node, force=force)
2073
2074
2074 def outgoing(ui, repo, dest=None, **opts):
2075 def outgoing(ui, repo, dest=None, **opts):
2075 """show changesets not found in destination
2076 """show changesets not found in destination
2076
2077
2077 Show changesets not found in the specified destination repository
2078 Show changesets not found in the specified destination repository
2078 or the default push location. These are the changesets that would
2079 or the default push location. These are the changesets that would
2079 be pushed if a push was requested.
2080 be pushed if a push was requested.
2080
2081
2081 See pull for valid destination format details.
2082 See pull for valid destination format details.
2082 """
2083 """
2083 limit = cmdutil.loglimit(opts)
2084 limit = cmdutil.loglimit(opts)
2084 dest, revs, checkout = hg.parseurl(
2085 dest, revs, checkout = hg.parseurl(
2085 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2086 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2086 if revs:
2087 if revs:
2087 revs = [repo.lookup(rev) for rev in revs]
2088 revs = [repo.lookup(rev) for rev in revs]
2088
2089
2089 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2090 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2090 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2091 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2091 o = repo.findoutgoing(other, force=opts.get('force'))
2092 o = repo.findoutgoing(other, force=opts.get('force'))
2092 if not o:
2093 if not o:
2093 ui.status(_("no changes found\n"))
2094 ui.status(_("no changes found\n"))
2094 return 1
2095 return 1
2095 o = repo.changelog.nodesbetween(o, revs)[0]
2096 o = repo.changelog.nodesbetween(o, revs)[0]
2096 if opts.get('newest_first'):
2097 if opts.get('newest_first'):
2097 o.reverse()
2098 o.reverse()
2098 displayer = cmdutil.show_changeset(ui, repo, opts)
2099 displayer = cmdutil.show_changeset(ui, repo, opts)
2099 count = 0
2100 count = 0
2100 for n in o:
2101 for n in o:
2101 if count >= limit:
2102 if count >= limit:
2102 break
2103 break
2103 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2104 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2104 if opts.get('no_merges') and len(parents) == 2:
2105 if opts.get('no_merges') and len(parents) == 2:
2105 continue
2106 continue
2106 count += 1
2107 count += 1
2107 displayer.show(repo[n])
2108 displayer.show(repo[n])
2108
2109
2109 def parents(ui, repo, file_=None, **opts):
2110 def parents(ui, repo, file_=None, **opts):
2110 """show the parents of the working directory or revision
2111 """show the parents of the working directory or revision
2111
2112
2112 Print the working directory's parent revisions. If a revision is
2113 Print the working directory's parent revisions. If a revision is
2113 given via -r/--rev, the parent of that revision will be printed.
2114 given via -r/--rev, the parent of that revision will be printed.
2114 If a file argument is given, revision in which the file was last
2115 If a file argument is given, revision in which the file was last
2115 changed (before the working directory revision or the argument to
2116 changed (before the working directory revision or the argument to
2116 --rev if given) is printed.
2117 --rev if given) is printed.
2117 """
2118 """
2118 rev = opts.get('rev')
2119 rev = opts.get('rev')
2119 if rev:
2120 if rev:
2120 ctx = repo[rev]
2121 ctx = repo[rev]
2121 else:
2122 else:
2122 ctx = repo[None]
2123 ctx = repo[None]
2123
2124
2124 if file_:
2125 if file_:
2125 m = cmdutil.match(repo, (file_,), opts)
2126 m = cmdutil.match(repo, (file_,), opts)
2126 if m.anypats() or len(m.files()) != 1:
2127 if m.anypats() or len(m.files()) != 1:
2127 raise util.Abort(_('can only specify an explicit file name'))
2128 raise util.Abort(_('can only specify an explicit file name'))
2128 file_ = m.files()[0]
2129 file_ = m.files()[0]
2129 filenodes = []
2130 filenodes = []
2130 for cp in ctx.parents():
2131 for cp in ctx.parents():
2131 if not cp:
2132 if not cp:
2132 continue
2133 continue
2133 try:
2134 try:
2134 filenodes.append(cp.filenode(file_))
2135 filenodes.append(cp.filenode(file_))
2135 except error.LookupError:
2136 except error.LookupError:
2136 pass
2137 pass
2137 if not filenodes:
2138 if not filenodes:
2138 raise util.Abort(_("'%s' not found in manifest!") % file_)
2139 raise util.Abort(_("'%s' not found in manifest!") % file_)
2139 fl = repo.file(file_)
2140 fl = repo.file(file_)
2140 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2141 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2141 else:
2142 else:
2142 p = [cp.node() for cp in ctx.parents()]
2143 p = [cp.node() for cp in ctx.parents()]
2143
2144
2144 displayer = cmdutil.show_changeset(ui, repo, opts)
2145 displayer = cmdutil.show_changeset(ui, repo, opts)
2145 for n in p:
2146 for n in p:
2146 if n != nullid:
2147 if n != nullid:
2147 displayer.show(repo[n])
2148 displayer.show(repo[n])
2148
2149
2149 def paths(ui, repo, search=None):
2150 def paths(ui, repo, search=None):
2150 """show aliases for remote repositories
2151 """show aliases for remote repositories
2151
2152
2152 Show definition of symbolic path name NAME. If no name is given,
2153 Show definition of symbolic path name NAME. If no name is given,
2153 show definition of available names.
2154 show definition of available names.
2154
2155
2155 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2156 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2156 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2157 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2157
2158
2158 See 'hg help urls' for more information.
2159 See 'hg help urls' for more information.
2159 """
2160 """
2160 if search:
2161 if search:
2161 for name, path in ui.configitems("paths"):
2162 for name, path in ui.configitems("paths"):
2162 if name == search:
2163 if name == search:
2163 ui.write("%s\n" % url.hidepassword(path))
2164 ui.write("%s\n" % url.hidepassword(path))
2164 return
2165 return
2165 ui.warn(_("not found!\n"))
2166 ui.warn(_("not found!\n"))
2166 return 1
2167 return 1
2167 else:
2168 else:
2168 for name, path in ui.configitems("paths"):
2169 for name, path in ui.configitems("paths"):
2169 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2170 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2170
2171
2171 def postincoming(ui, repo, modheads, optupdate, checkout):
2172 def postincoming(ui, repo, modheads, optupdate, checkout):
2172 if modheads == 0:
2173 if modheads == 0:
2173 return
2174 return
2174 if optupdate:
2175 if optupdate:
2175 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2176 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2176 return hg.update(repo, checkout)
2177 return hg.update(repo, checkout)
2177 else:
2178 else:
2178 ui.status(_("not updating, since new heads added\n"))
2179 ui.status(_("not updating, since new heads added\n"))
2179 if modheads > 1:
2180 if modheads > 1:
2180 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2181 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2181 else:
2182 else:
2182 ui.status(_("(run 'hg update' to get a working copy)\n"))
2183 ui.status(_("(run 'hg update' to get a working copy)\n"))
2183
2184
2184 def pull(ui, repo, source="default", **opts):
2185 def pull(ui, repo, source="default", **opts):
2185 """pull changes from the specified source
2186 """pull changes from the specified source
2186
2187
2187 Pull changes from a remote repository to the local one.
2188 Pull changes from a remote repository to the local one.
2188
2189
2189 This finds all changes from the repository at the specified path
2190 This finds all changes from the repository at the specified path
2190 or URL and adds them to the local repository. By default, this
2191 or URL and adds them to the local repository. By default, this
2191 does not update the copy of the project in the working directory.
2192 does not update the copy of the project in the working directory.
2192
2193
2193 Use hg incoming if you want to see what will be added by the next
2194 Use hg incoming if you want to see what will be added by the next
2194 pull without actually adding the changes to the repository.
2195 pull without actually adding the changes to the repository.
2195
2196
2196 If SOURCE is omitted, the 'default' path will be used.
2197 If SOURCE is omitted, the 'default' path will be used.
2197 See 'hg help urls' for more information.
2198 See 'hg help urls' for more information.
2198 """
2199 """
2199 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
2200 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
2200 other = hg.repository(cmdutil.remoteui(repo, opts), source)
2201 other = hg.repository(cmdutil.remoteui(repo, opts), source)
2201 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2202 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2202 if revs:
2203 if revs:
2203 try:
2204 try:
2204 revs = [other.lookup(rev) for rev in revs]
2205 revs = [other.lookup(rev) for rev in revs]
2205 except error.CapabilityError:
2206 except error.CapabilityError:
2206 err = _("Other repository doesn't support revision lookup, "
2207 err = _("Other repository doesn't support revision lookup, "
2207 "so a rev cannot be specified.")
2208 "so a rev cannot be specified.")
2208 raise util.Abort(err)
2209 raise util.Abort(err)
2209
2210
2210 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2211 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2211 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2212 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2212
2213
2213 def push(ui, repo, dest=None, **opts):
2214 def push(ui, repo, dest=None, **opts):
2214 """push changes to the specified destination
2215 """push changes to the specified destination
2215
2216
2216 Push changes from the local repository to the given destination.
2217 Push changes from the local repository to the given destination.
2217
2218
2218 This is the symmetrical operation for pull. It moves changes from
2219 This is the symmetrical operation for pull. It moves changes from
2219 the current repository to a different one. If the destination is
2220 the current repository to a different one. If the destination is
2220 local this is identical to a pull in that directory from the
2221 local this is identical to a pull in that directory from the
2221 current one.
2222 current one.
2222
2223
2223 By default, push will refuse to run if it detects the result would
2224 By default, push will refuse to run if it detects the result would
2224 increase the number of remote heads. This generally indicates the
2225 increase the number of remote heads. This generally indicates the
2225 the client has forgotten to pull and merge before pushing.
2226 the client has forgotten to pull and merge before pushing.
2226
2227
2227 If -r/--rev is used, the named revision and all its ancestors will
2228 If -r/--rev is used, the named revision and all its ancestors will
2228 be pushed to the remote repository.
2229 be pushed to the remote repository.
2229
2230
2230 Look at the help text for URLs for important details about ssh://
2231 Look at the help text for URLs for important details about ssh://
2231 URLs. If DESTINATION is omitted, a default path will be used.
2232 URLs. If DESTINATION is omitted, a default path will be used.
2232 See 'hg help urls' for more information.
2233 See 'hg help urls' for more information.
2233 """
2234 """
2234 dest, revs, checkout = hg.parseurl(
2235 dest, revs, checkout = hg.parseurl(
2235 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2236 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2236 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2237 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2237 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2238 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2238 if revs:
2239 if revs:
2239 revs = [repo.lookup(rev) for rev in revs]
2240 revs = [repo.lookup(rev) for rev in revs]
2240 r = repo.push(other, opts.get('force'), revs=revs)
2241 r = repo.push(other, opts.get('force'), revs=revs)
2241 return r == 0
2242 return r == 0
2242
2243
2243 def rawcommit(ui, repo, *pats, **opts):
2244 def rawcommit(ui, repo, *pats, **opts):
2244 """raw commit interface (DEPRECATED)
2245 """raw commit interface (DEPRECATED)
2245
2246
2246 (DEPRECATED)
2247 (DEPRECATED)
2247 Lowlevel commit, for use in helper scripts.
2248 Lowlevel commit, for use in helper scripts.
2248
2249
2249 This command is not intended to be used by normal users, as it is
2250 This command is not intended to be used by normal users, as it is
2250 primarily useful for importing from other SCMs.
2251 primarily useful for importing from other SCMs.
2251
2252
2252 This command is now deprecated and will be removed in a future
2253 This command is now deprecated and will be removed in a future
2253 release, please use debugsetparents and commit instead.
2254 release, please use debugsetparents and commit instead.
2254 """
2255 """
2255
2256
2256 ui.warn(_("(the rawcommit command is deprecated)\n"))
2257 ui.warn(_("(the rawcommit command is deprecated)\n"))
2257
2258
2258 message = cmdutil.logmessage(opts)
2259 message = cmdutil.logmessage(opts)
2259
2260
2260 files = cmdutil.match(repo, pats, opts).files()
2261 files = cmdutil.match(repo, pats, opts).files()
2261 if opts.get('files'):
2262 if opts.get('files'):
2262 files += open(opts['files']).read().splitlines()
2263 files += open(opts['files']).read().splitlines()
2263
2264
2264 parents = [repo.lookup(p) for p in opts['parent']]
2265 parents = [repo.lookup(p) for p in opts['parent']]
2265
2266
2266 try:
2267 try:
2267 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2268 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2268 except ValueError, inst:
2269 except ValueError, inst:
2269 raise util.Abort(str(inst))
2270 raise util.Abort(str(inst))
2270
2271
2271 def recover(ui, repo):
2272 def recover(ui, repo):
2272 """roll back an interrupted transaction
2273 """roll back an interrupted transaction
2273
2274
2274 Recover from an interrupted commit or pull.
2275 Recover from an interrupted commit or pull.
2275
2276
2276 This command tries to fix the repository status after an
2277 This command tries to fix the repository status after an
2277 interrupted operation. It should only be necessary when Mercurial
2278 interrupted operation. It should only be necessary when Mercurial
2278 suggests it.
2279 suggests it.
2279 """
2280 """
2280 if repo.recover():
2281 if repo.recover():
2281 return hg.verify(repo)
2282 return hg.verify(repo)
2282 return 1
2283 return 1
2283
2284
2284 def remove(ui, repo, *pats, **opts):
2285 def remove(ui, repo, *pats, **opts):
2285 """remove the specified files on the next commit
2286 """remove the specified files on the next commit
2286
2287
2287 Schedule the indicated files for removal from the repository.
2288 Schedule the indicated files for removal from the repository.
2288
2289
2289 This only removes files from the current branch, not from the
2290 This only removes files from the current branch, not from the
2290 entire project history. -A/--after can be used to remove only
2291 entire project history. -A/--after can be used to remove only
2291 files that have already been deleted, -f/--force can be used to
2292 files that have already been deleted, -f/--force can be used to
2292 force deletion, and -Af can be used to remove files from the next
2293 force deletion, and -Af can be used to remove files from the next
2293 revision without deleting them.
2294 revision without deleting them.
2294
2295
2295 The following table details the behavior of remove for different
2296 The following table details the behavior of remove for different
2296 file states (columns) and option combinations (rows). The file
2297 file states (columns) and option combinations (rows). The file
2297 states are Added, Clean, Modified and Missing (as reported by hg
2298 states are Added, Clean, Modified and Missing (as reported by hg
2298 status). The actions are Warn, Remove (from branch) and Delete
2299 status). The actions are Warn, Remove (from branch) and Delete
2299 (from disk).
2300 (from disk).
2300
2301
2301 A C M !
2302 A C M !
2302 none W RD W R
2303 none W RD W R
2303 -f R RD RD R
2304 -f R RD RD R
2304 -A W W W R
2305 -A W W W R
2305 -Af R R R R
2306 -Af R R R R
2306
2307
2307 This command schedules the files to be removed at the next commit.
2308 This command schedules the files to be removed at the next commit.
2308 To undo a remove before that, see hg revert.
2309 To undo a remove before that, see hg revert.
2309 """
2310 """
2310
2311
2311 after, force = opts.get('after'), opts.get('force')
2312 after, force = opts.get('after'), opts.get('force')
2312 if not pats and not after:
2313 if not pats and not after:
2313 raise util.Abort(_('no files specified'))
2314 raise util.Abort(_('no files specified'))
2314
2315
2315 m = cmdutil.match(repo, pats, opts)
2316 m = cmdutil.match(repo, pats, opts)
2316 s = repo.status(match=m, clean=True)
2317 s = repo.status(match=m, clean=True)
2317 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2318 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2318
2319
2319 def warn(files, reason):
2320 def warn(files, reason):
2320 for f in files:
2321 for f in files:
2321 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2322 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2322 % (m.rel(f), reason))
2323 % (m.rel(f), reason))
2323
2324
2324 if force:
2325 if force:
2325 remove, forget = modified + deleted + clean, added
2326 remove, forget = modified + deleted + clean, added
2326 elif after:
2327 elif after:
2327 remove, forget = deleted, []
2328 remove, forget = deleted, []
2328 warn(modified + added + clean, _('still exists'))
2329 warn(modified + added + clean, _('still exists'))
2329 else:
2330 else:
2330 remove, forget = deleted + clean, []
2331 remove, forget = deleted + clean, []
2331 warn(modified, _('is modified'))
2332 warn(modified, _('is modified'))
2332 warn(added, _('has been marked for add'))
2333 warn(added, _('has been marked for add'))
2333
2334
2334 for f in sorted(remove + forget):
2335 for f in sorted(remove + forget):
2335 if ui.verbose or not m.exact(f):
2336 if ui.verbose or not m.exact(f):
2336 ui.status(_('removing %s\n') % m.rel(f))
2337 ui.status(_('removing %s\n') % m.rel(f))
2337
2338
2338 repo.forget(forget)
2339 repo.forget(forget)
2339 repo.remove(remove, unlink=not after)
2340 repo.remove(remove, unlink=not after)
2340
2341
2341 def rename(ui, repo, *pats, **opts):
2342 def rename(ui, repo, *pats, **opts):
2342 """rename files; equivalent of copy + remove
2343 """rename files; equivalent of copy + remove
2343
2344
2344 Mark dest as copies of sources; mark sources for deletion. If dest
2345 Mark dest as copies of sources; mark sources for deletion. If dest
2345 is a directory, copies are put in that directory. If dest is a
2346 is a directory, copies are put in that directory. If dest is a
2346 file, there can only be one source.
2347 file, there can only be one source.
2347
2348
2348 By default, this command copies the contents of files as they
2349 By default, this command copies the contents of files as they
2349 exist in the working directory. If invoked with -A/--after, the
2350 exist in the working directory. If invoked with -A/--after, the
2350 operation is recorded, but no copying is performed.
2351 operation is recorded, but no copying is performed.
2351
2352
2352 This command takes effect at the next commit. To undo a rename
2353 This command takes effect at the next commit. To undo a rename
2353 before that, see hg revert.
2354 before that, see hg revert.
2354 """
2355 """
2355 wlock = repo.wlock(False)
2356 wlock = repo.wlock(False)
2356 try:
2357 try:
2357 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2358 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2358 finally:
2359 finally:
2359 wlock.release()
2360 wlock.release()
2360
2361
2361 def resolve(ui, repo, *pats, **opts):
2362 def resolve(ui, repo, *pats, **opts):
2362 """retry file merges from a merge or update
2363 """retry file merges from a merge or update
2363
2364
2364 This command will cleanly retry unresolved file merges using file
2365 This command will cleanly retry unresolved file merges using file
2365 revisions preserved from the last update or merge. To attempt to
2366 revisions preserved from the last update or merge. To attempt to
2366 resolve all unresolved files, use the -a/--all switch.
2367 resolve all unresolved files, use the -a/--all switch.
2367
2368
2368 If a conflict is resolved manually, please note that the changes
2369 If a conflict is resolved manually, please note that the changes
2369 will be overwritten if the merge is retried with resolve. The
2370 will be overwritten if the merge is retried with resolve. The
2370 -m/--mark switch should be used to mark the file as resolved.
2371 -m/--mark switch should be used to mark the file as resolved.
2371
2372
2372 This command will also allow listing resolved files and manually
2373 This command will also allow listing resolved files and manually
2373 marking and unmarking files as resolved. All files must be marked
2374 marking and unmarking files as resolved. All files must be marked
2374 as resolved before the new commits are permitted.
2375 as resolved before the new commits are permitted.
2375
2376
2376 The codes used to show the status of files are:
2377 The codes used to show the status of files are:
2377 U = unresolved
2378 U = unresolved
2378 R = resolved
2379 R = resolved
2379 """
2380 """
2380
2381
2381 all, mark, unmark, show = [opts.get(o) for o in 'all mark unmark list'.split()]
2382 all, mark, unmark, show = [opts.get(o) for o in 'all mark unmark list'.split()]
2382
2383
2383 if (show and (mark or unmark)) or (mark and unmark):
2384 if (show and (mark or unmark)) or (mark and unmark):
2384 raise util.Abort(_("too many options specified"))
2385 raise util.Abort(_("too many options specified"))
2385 if pats and all:
2386 if pats and all:
2386 raise util.Abort(_("can't specify --all and patterns"))
2387 raise util.Abort(_("can't specify --all and patterns"))
2387 if not (all or pats or show or mark or unmark):
2388 if not (all or pats or show or mark or unmark):
2388 raise util.Abort(_('no files or directories specified; '
2389 raise util.Abort(_('no files or directories specified; '
2389 'use --all to remerge all files'))
2390 'use --all to remerge all files'))
2390
2391
2391 ms = merge_.mergestate(repo)
2392 ms = merge_.mergestate(repo)
2392 m = cmdutil.match(repo, pats, opts)
2393 m = cmdutil.match(repo, pats, opts)
2393
2394
2394 for f in ms:
2395 for f in ms:
2395 if m(f):
2396 if m(f):
2396 if show:
2397 if show:
2397 ui.write("%s %s\n" % (ms[f].upper(), f))
2398 ui.write("%s %s\n" % (ms[f].upper(), f))
2398 elif mark:
2399 elif mark:
2399 ms.mark(f, "r")
2400 ms.mark(f, "r")
2400 elif unmark:
2401 elif unmark:
2401 ms.mark(f, "u")
2402 ms.mark(f, "u")
2402 else:
2403 else:
2403 wctx = repo[None]
2404 wctx = repo[None]
2404 mctx = wctx.parents()[-1]
2405 mctx = wctx.parents()[-1]
2405
2406
2406 # backup pre-resolve (merge uses .orig for its own purposes)
2407 # backup pre-resolve (merge uses .orig for its own purposes)
2407 a = repo.wjoin(f)
2408 a = repo.wjoin(f)
2408 util.copyfile(a, a + ".resolve")
2409 util.copyfile(a, a + ".resolve")
2409
2410
2410 # resolve file
2411 # resolve file
2411 ms.resolve(f, wctx, mctx)
2412 ms.resolve(f, wctx, mctx)
2412
2413
2413 # replace filemerge's .orig file with our resolve file
2414 # replace filemerge's .orig file with our resolve file
2414 util.rename(a + ".resolve", a + ".orig")
2415 util.rename(a + ".resolve", a + ".orig")
2415
2416
2416 def revert(ui, repo, *pats, **opts):
2417 def revert(ui, repo, *pats, **opts):
2417 """restore individual files or directories to an earlier state
2418 """restore individual files or directories to an earlier state
2418
2419
2419 (Use update -r to check out earlier revisions, revert does not
2420 (Use update -r to check out earlier revisions, revert does not
2420 change the working directory parents.)
2421 change the working directory parents.)
2421
2422
2422 With no revision specified, revert the named files or directories
2423 With no revision specified, revert the named files or directories
2423 to the contents they had in the parent of the working directory.
2424 to the contents they had in the parent of the working directory.
2424 This restores the contents of the affected files to an unmodified
2425 This restores the contents of the affected files to an unmodified
2425 state and unschedules adds, removes, copies, and renames. If the
2426 state and unschedules adds, removes, copies, and renames. If the
2426 working directory has two parents, you must explicitly specify the
2427 working directory has two parents, you must explicitly specify the
2427 revision to revert to.
2428 revision to revert to.
2428
2429
2429 Using the -r/--rev option, revert the given files or directories
2430 Using the -r/--rev option, revert the given files or directories
2430 to their contents as of a specific revision. This can be helpful
2431 to their contents as of a specific revision. This can be helpful
2431 to "roll back" some or all of an earlier change. See 'hg help
2432 to "roll back" some or all of an earlier change. See 'hg help
2432 dates' for a list of formats valid for -d/--date.
2433 dates' for a list of formats valid for -d/--date.
2433
2434
2434 Revert modifies the working directory. It does not commit any
2435 Revert modifies the working directory. It does not commit any
2435 changes, or change the parent of the working directory. If you
2436 changes, or change the parent of the working directory. If you
2436 revert to a revision other than the parent of the working
2437 revert to a revision other than the parent of the working
2437 directory, the reverted files will thus appear modified
2438 directory, the reverted files will thus appear modified
2438 afterwards.
2439 afterwards.
2439
2440
2440 If a file has been deleted, it is restored. If the executable mode
2441 If a file has been deleted, it is restored. If the executable mode
2441 of a file was changed, it is reset.
2442 of a file was changed, it is reset.
2442
2443
2443 If names are given, all files matching the names are reverted.
2444 If names are given, all files matching the names are reverted.
2444 If no arguments are given, no files are reverted.
2445 If no arguments are given, no files are reverted.
2445
2446
2446 Modified files are saved with a .orig suffix before reverting.
2447 Modified files are saved with a .orig suffix before reverting.
2447 To disable these backups, use --no-backup.
2448 To disable these backups, use --no-backup.
2448 """
2449 """
2449
2450
2450 if opts["date"]:
2451 if opts["date"]:
2451 if opts["rev"]:
2452 if opts["rev"]:
2452 raise util.Abort(_("you can't specify a revision and a date"))
2453 raise util.Abort(_("you can't specify a revision and a date"))
2453 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2454 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2454
2455
2455 if not pats and not opts.get('all'):
2456 if not pats and not opts.get('all'):
2456 raise util.Abort(_('no files or directories specified; '
2457 raise util.Abort(_('no files or directories specified; '
2457 'use --all to revert the whole repo'))
2458 'use --all to revert the whole repo'))
2458
2459
2459 parent, p2 = repo.dirstate.parents()
2460 parent, p2 = repo.dirstate.parents()
2460 if not opts.get('rev') and p2 != nullid:
2461 if not opts.get('rev') and p2 != nullid:
2461 raise util.Abort(_('uncommitted merge - please provide a '
2462 raise util.Abort(_('uncommitted merge - please provide a '
2462 'specific revision'))
2463 'specific revision'))
2463 ctx = repo[opts.get('rev')]
2464 ctx = repo[opts.get('rev')]
2464 node = ctx.node()
2465 node = ctx.node()
2465 mf = ctx.manifest()
2466 mf = ctx.manifest()
2466 if node == parent:
2467 if node == parent:
2467 pmf = mf
2468 pmf = mf
2468 else:
2469 else:
2469 pmf = None
2470 pmf = None
2470
2471
2471 # need all matching names in dirstate and manifest of target rev,
2472 # need all matching names in dirstate and manifest of target rev,
2472 # so have to walk both. do not print errors if files exist in one
2473 # so have to walk both. do not print errors if files exist in one
2473 # but not other.
2474 # but not other.
2474
2475
2475 names = {}
2476 names = {}
2476
2477
2477 wlock = repo.wlock()
2478 wlock = repo.wlock()
2478 try:
2479 try:
2479 # walk dirstate.
2480 # walk dirstate.
2480
2481
2481 m = cmdutil.match(repo, pats, opts)
2482 m = cmdutil.match(repo, pats, opts)
2482 m.bad = lambda x,y: False
2483 m.bad = lambda x,y: False
2483 for abs in repo.walk(m):
2484 for abs in repo.walk(m):
2484 names[abs] = m.rel(abs), m.exact(abs)
2485 names[abs] = m.rel(abs), m.exact(abs)
2485
2486
2486 # walk target manifest.
2487 # walk target manifest.
2487
2488
2488 def badfn(path, msg):
2489 def badfn(path, msg):
2489 if path in names:
2490 if path in names:
2490 return False
2491 return False
2491 path_ = path + '/'
2492 path_ = path + '/'
2492 for f in names:
2493 for f in names:
2493 if f.startswith(path_):
2494 if f.startswith(path_):
2494 return False
2495 return False
2495 repo.ui.warn("%s: %s\n" % (m.rel(path), msg))
2496 repo.ui.warn("%s: %s\n" % (m.rel(path), msg))
2496 return False
2497 return False
2497
2498
2498 m = cmdutil.match(repo, pats, opts)
2499 m = cmdutil.match(repo, pats, opts)
2499 m.bad = badfn
2500 m.bad = badfn
2500 for abs in repo[node].walk(m):
2501 for abs in repo[node].walk(m):
2501 if abs not in names:
2502 if abs not in names:
2502 names[abs] = m.rel(abs), m.exact(abs)
2503 names[abs] = m.rel(abs), m.exact(abs)
2503
2504
2504 m = cmdutil.matchfiles(repo, names)
2505 m = cmdutil.matchfiles(repo, names)
2505 changes = repo.status(match=m)[:4]
2506 changes = repo.status(match=m)[:4]
2506 modified, added, removed, deleted = map(set, changes)
2507 modified, added, removed, deleted = map(set, changes)
2507
2508
2508 # if f is a rename, also revert the source
2509 # if f is a rename, also revert the source
2509 cwd = repo.getcwd()
2510 cwd = repo.getcwd()
2510 for f in added:
2511 for f in added:
2511 src = repo.dirstate.copied(f)
2512 src = repo.dirstate.copied(f)
2512 if src and src not in names and repo.dirstate[src] == 'r':
2513 if src and src not in names and repo.dirstate[src] == 'r':
2513 removed.add(src)
2514 removed.add(src)
2514 names[src] = (repo.pathto(src, cwd), True)
2515 names[src] = (repo.pathto(src, cwd), True)
2515
2516
2516 def removeforget(abs):
2517 def removeforget(abs):
2517 if repo.dirstate[abs] == 'a':
2518 if repo.dirstate[abs] == 'a':
2518 return _('forgetting %s\n')
2519 return _('forgetting %s\n')
2519 return _('removing %s\n')
2520 return _('removing %s\n')
2520
2521
2521 revert = ([], _('reverting %s\n'))
2522 revert = ([], _('reverting %s\n'))
2522 add = ([], _('adding %s\n'))
2523 add = ([], _('adding %s\n'))
2523 remove = ([], removeforget)
2524 remove = ([], removeforget)
2524 undelete = ([], _('undeleting %s\n'))
2525 undelete = ([], _('undeleting %s\n'))
2525
2526
2526 disptable = (
2527 disptable = (
2527 # dispatch table:
2528 # dispatch table:
2528 # file state
2529 # file state
2529 # action if in target manifest
2530 # action if in target manifest
2530 # action if not in target manifest
2531 # action if not in target manifest
2531 # make backup if in target manifest
2532 # make backup if in target manifest
2532 # make backup if not in target manifest
2533 # make backup if not in target manifest
2533 (modified, revert, remove, True, True),
2534 (modified, revert, remove, True, True),
2534 (added, revert, remove, True, False),
2535 (added, revert, remove, True, False),
2535 (removed, undelete, None, False, False),
2536 (removed, undelete, None, False, False),
2536 (deleted, revert, remove, False, False),
2537 (deleted, revert, remove, False, False),
2537 )
2538 )
2538
2539
2539 for abs, (rel, exact) in sorted(names.items()):
2540 for abs, (rel, exact) in sorted(names.items()):
2540 mfentry = mf.get(abs)
2541 mfentry = mf.get(abs)
2541 target = repo.wjoin(abs)
2542 target = repo.wjoin(abs)
2542 def handle(xlist, dobackup):
2543 def handle(xlist, dobackup):
2543 xlist[0].append(abs)
2544 xlist[0].append(abs)
2544 if dobackup and not opts.get('no_backup') and util.lexists(target):
2545 if dobackup and not opts.get('no_backup') and util.lexists(target):
2545 bakname = "%s.orig" % rel
2546 bakname = "%s.orig" % rel
2546 ui.note(_('saving current version of %s as %s\n') %
2547 ui.note(_('saving current version of %s as %s\n') %
2547 (rel, bakname))
2548 (rel, bakname))
2548 if not opts.get('dry_run'):
2549 if not opts.get('dry_run'):
2549 util.copyfile(target, bakname)
2550 util.copyfile(target, bakname)
2550 if ui.verbose or not exact:
2551 if ui.verbose or not exact:
2551 msg = xlist[1]
2552 msg = xlist[1]
2552 if not isinstance(msg, basestring):
2553 if not isinstance(msg, basestring):
2553 msg = msg(abs)
2554 msg = msg(abs)
2554 ui.status(msg % rel)
2555 ui.status(msg % rel)
2555 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2556 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2556 if abs not in table: continue
2557 if abs not in table: continue
2557 # file has changed in dirstate
2558 # file has changed in dirstate
2558 if mfentry:
2559 if mfentry:
2559 handle(hitlist, backuphit)
2560 handle(hitlist, backuphit)
2560 elif misslist is not None:
2561 elif misslist is not None:
2561 handle(misslist, backupmiss)
2562 handle(misslist, backupmiss)
2562 break
2563 break
2563 else:
2564 else:
2564 if abs not in repo.dirstate:
2565 if abs not in repo.dirstate:
2565 if mfentry:
2566 if mfentry:
2566 handle(add, True)
2567 handle(add, True)
2567 elif exact:
2568 elif exact:
2568 ui.warn(_('file not managed: %s\n') % rel)
2569 ui.warn(_('file not managed: %s\n') % rel)
2569 continue
2570 continue
2570 # file has not changed in dirstate
2571 # file has not changed in dirstate
2571 if node == parent:
2572 if node == parent:
2572 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2573 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2573 continue
2574 continue
2574 if pmf is None:
2575 if pmf is None:
2575 # only need parent manifest in this unlikely case,
2576 # only need parent manifest in this unlikely case,
2576 # so do not read by default
2577 # so do not read by default
2577 pmf = repo[parent].manifest()
2578 pmf = repo[parent].manifest()
2578 if abs in pmf:
2579 if abs in pmf:
2579 if mfentry:
2580 if mfentry:
2580 # if version of file is same in parent and target
2581 # if version of file is same in parent and target
2581 # manifests, do nothing
2582 # manifests, do nothing
2582 if (pmf[abs] != mfentry or
2583 if (pmf[abs] != mfentry or
2583 pmf.flags(abs) != mf.flags(abs)):
2584 pmf.flags(abs) != mf.flags(abs)):
2584 handle(revert, False)
2585 handle(revert, False)
2585 else:
2586 else:
2586 handle(remove, False)
2587 handle(remove, False)
2587
2588
2588 if not opts.get('dry_run'):
2589 if not opts.get('dry_run'):
2589 def checkout(f):
2590 def checkout(f):
2590 fc = ctx[f]
2591 fc = ctx[f]
2591 repo.wwrite(f, fc.data(), fc.flags())
2592 repo.wwrite(f, fc.data(), fc.flags())
2592
2593
2593 audit_path = util.path_auditor(repo.root)
2594 audit_path = util.path_auditor(repo.root)
2594 for f in remove[0]:
2595 for f in remove[0]:
2595 if repo.dirstate[f] == 'a':
2596 if repo.dirstate[f] == 'a':
2596 repo.dirstate.forget(f)
2597 repo.dirstate.forget(f)
2597 continue
2598 continue
2598 audit_path(f)
2599 audit_path(f)
2599 try:
2600 try:
2600 util.unlink(repo.wjoin(f))
2601 util.unlink(repo.wjoin(f))
2601 except OSError:
2602 except OSError:
2602 pass
2603 pass
2603 repo.dirstate.remove(f)
2604 repo.dirstate.remove(f)
2604
2605
2605 normal = None
2606 normal = None
2606 if node == parent:
2607 if node == parent:
2607 # We're reverting to our parent. If possible, we'd like status
2608 # We're reverting to our parent. If possible, we'd like status
2608 # to report the file as clean. We have to use normallookup for
2609 # to report the file as clean. We have to use normallookup for
2609 # merges to avoid losing information about merged/dirty files.
2610 # merges to avoid losing information about merged/dirty files.
2610 if p2 != nullid:
2611 if p2 != nullid:
2611 normal = repo.dirstate.normallookup
2612 normal = repo.dirstate.normallookup
2612 else:
2613 else:
2613 normal = repo.dirstate.normal
2614 normal = repo.dirstate.normal
2614 for f in revert[0]:
2615 for f in revert[0]:
2615 checkout(f)
2616 checkout(f)
2616 if normal:
2617 if normal:
2617 normal(f)
2618 normal(f)
2618
2619
2619 for f in add[0]:
2620 for f in add[0]:
2620 checkout(f)
2621 checkout(f)
2621 repo.dirstate.add(f)
2622 repo.dirstate.add(f)
2622
2623
2623 normal = repo.dirstate.normallookup
2624 normal = repo.dirstate.normallookup
2624 if node == parent and p2 == nullid:
2625 if node == parent and p2 == nullid:
2625 normal = repo.dirstate.normal
2626 normal = repo.dirstate.normal
2626 for f in undelete[0]:
2627 for f in undelete[0]:
2627 checkout(f)
2628 checkout(f)
2628 normal(f)
2629 normal(f)
2629
2630
2630 finally:
2631 finally:
2631 wlock.release()
2632 wlock.release()
2632
2633
2633 def rollback(ui, repo):
2634 def rollback(ui, repo):
2634 """roll back the last transaction
2635 """roll back the last transaction
2635
2636
2636 This command should be used with care. There is only one level of
2637 This command should be used with care. There is only one level of
2637 rollback, and there is no way to undo a rollback. It will also
2638 rollback, and there is no way to undo a rollback. It will also
2638 restore the dirstate at the time of the last transaction, losing
2639 restore the dirstate at the time of the last transaction, losing
2639 any dirstate changes since that time.
2640 any dirstate changes since that time.
2640
2641
2641 Transactions are used to encapsulate the effects of all commands
2642 Transactions are used to encapsulate the effects of all commands
2642 that create new changesets or propagate existing changesets into a
2643 that create new changesets or propagate existing changesets into a
2643 repository. For example, the following commands are transactional,
2644 repository. For example, the following commands are transactional,
2644 and their effects can be rolled back:
2645 and their effects can be rolled back:
2645
2646
2646 commit
2647 commit
2647 import
2648 import
2648 pull
2649 pull
2649 push (with this repository as destination)
2650 push (with this repository as destination)
2650 unbundle
2651 unbundle
2651
2652
2652 This command is not intended for use on public repositories. Once
2653 This command is not intended for use on public repositories. Once
2653 changes are visible for pull by other users, rolling a transaction
2654 changes are visible for pull by other users, rolling a transaction
2654 back locally is ineffective (someone else may already have pulled
2655 back locally is ineffective (someone else may already have pulled
2655 the changes). Furthermore, a race is possible with readers of the
2656 the changes). Furthermore, a race is possible with readers of the
2656 repository; for example an in-progress pull from the repository
2657 repository; for example an in-progress pull from the repository
2657 may fail if a rollback is performed.
2658 may fail if a rollback is performed.
2658 """
2659 """
2659 repo.rollback()
2660 repo.rollback()
2660
2661
2661 def root(ui, repo):
2662 def root(ui, repo):
2662 """print the root (top) of the current working directory
2663 """print the root (top) of the current working directory
2663
2664
2664 Print the root directory of the current repository.
2665 Print the root directory of the current repository.
2665 """
2666 """
2666 ui.write(repo.root + "\n")
2667 ui.write(repo.root + "\n")
2667
2668
2668 def serve(ui, repo, **opts):
2669 def serve(ui, repo, **opts):
2669 """export the repository via HTTP
2670 """export the repository via HTTP
2670
2671
2671 Start a local HTTP repository browser and pull server.
2672 Start a local HTTP repository browser and pull server.
2672
2673
2673 By default, the server logs accesses to stdout and errors to
2674 By default, the server logs accesses to stdout and errors to
2674 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
2675 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
2675 files.
2676 files.
2676 """
2677 """
2677
2678
2678 if opts["stdio"]:
2679 if opts["stdio"]:
2679 if repo is None:
2680 if repo is None:
2680 raise error.RepoError(_("There is no Mercurial repository here"
2681 raise error.RepoError(_("There is no Mercurial repository here"
2681 " (.hg not found)"))
2682 " (.hg not found)"))
2682 s = sshserver.sshserver(ui, repo)
2683 s = sshserver.sshserver(ui, repo)
2683 s.serve_forever()
2684 s.serve_forever()
2684
2685
2685 baseui = repo and repo.baseui or ui
2686 baseui = repo and repo.baseui or ui
2686 optlist = ("name templates style address port prefix ipv6"
2687 optlist = ("name templates style address port prefix ipv6"
2687 " accesslog errorlog webdir_conf certificate")
2688 " accesslog errorlog webdir_conf certificate")
2688 for o in optlist.split():
2689 for o in optlist.split():
2689 if opts[o]:
2690 if opts[o]:
2690 baseui.setconfig("web", o, str(opts[o]))
2691 baseui.setconfig("web", o, str(opts[o]))
2691 if (repo is not None) and (repo.ui != baseui):
2692 if (repo is not None) and (repo.ui != baseui):
2692 repo.ui.setconfig("web", o, str(opts[o]))
2693 repo.ui.setconfig("web", o, str(opts[o]))
2693
2694
2694 if repo is None and not ui.config("web", "webdir_conf"):
2695 if repo is None and not ui.config("web", "webdir_conf"):
2695 raise error.RepoError(_("There is no Mercurial repository here"
2696 raise error.RepoError(_("There is no Mercurial repository here"
2696 " (.hg not found)"))
2697 " (.hg not found)"))
2697
2698
2698 class service:
2699 class service:
2699 def init(self):
2700 def init(self):
2700 util.set_signal_handler()
2701 util.set_signal_handler()
2701 self.httpd = hgweb.server.create_server(baseui, repo)
2702 self.httpd = server.create_server(baseui, repo)
2702
2703
2703 if not ui.verbose: return
2704 if not ui.verbose: return
2704
2705
2705 if self.httpd.prefix:
2706 if self.httpd.prefix:
2706 prefix = self.httpd.prefix.strip('/') + '/'
2707 prefix = self.httpd.prefix.strip('/') + '/'
2707 else:
2708 else:
2708 prefix = ''
2709 prefix = ''
2709
2710
2710 port = ':%d' % self.httpd.port
2711 port = ':%d' % self.httpd.port
2711 if port == ':80':
2712 if port == ':80':
2712 port = ''
2713 port = ''
2713
2714
2714 bindaddr = self.httpd.addr
2715 bindaddr = self.httpd.addr
2715 if bindaddr == '0.0.0.0':
2716 if bindaddr == '0.0.0.0':
2716 bindaddr = '*'
2717 bindaddr = '*'
2717 elif ':' in bindaddr: # IPv6
2718 elif ':' in bindaddr: # IPv6
2718 bindaddr = '[%s]' % bindaddr
2719 bindaddr = '[%s]' % bindaddr
2719
2720
2720 fqaddr = self.httpd.fqaddr
2721 fqaddr = self.httpd.fqaddr
2721 if ':' in fqaddr:
2722 if ':' in fqaddr:
2722 fqaddr = '[%s]' % fqaddr
2723 fqaddr = '[%s]' % fqaddr
2723 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2724 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2724 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2725 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2725
2726
2726 def run(self):
2727 def run(self):
2727 self.httpd.serve_forever()
2728 self.httpd.serve_forever()
2728
2729
2729 service = service()
2730 service = service()
2730
2731
2731 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2732 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2732
2733
2733 def status(ui, repo, *pats, **opts):
2734 def status(ui, repo, *pats, **opts):
2734 """show changed files in the working directory
2735 """show changed files in the working directory
2735
2736
2736 Show status of files in the repository. If names are given, only
2737 Show status of files in the repository. If names are given, only
2737 files that match are shown. Files that are clean or ignored or
2738 files that match are shown. Files that are clean or ignored or
2738 source of a copy/move operation, are not listed unless -c/--clean,
2739 source of a copy/move operation, are not listed unless -c/--clean,
2739 -i/--ignored, -C/--copies or -A/--all is given. Unless options
2740 -i/--ignored, -C/--copies or -A/--all is given. Unless options
2740 described with "show only ..." are given, the options -mardu are
2741 described with "show only ..." are given, the options -mardu are
2741 used.
2742 used.
2742
2743
2743 Option -q/--quiet hides untracked (unknown and ignored) files
2744 Option -q/--quiet hides untracked (unknown and ignored) files
2744 unless explicitly requested with -u/--unknown or -i/--ignored.
2745 unless explicitly requested with -u/--unknown or -i/--ignored.
2745
2746
2746 NOTE: status may appear to disagree with diff if permissions have
2747 NOTE: status may appear to disagree with diff if permissions have
2747 changed or a merge has occurred. The standard diff format does not
2748 changed or a merge has occurred. The standard diff format does not
2748 report permission changes and diff only reports changes relative
2749 report permission changes and diff only reports changes relative
2749 to one merge parent.
2750 to one merge parent.
2750
2751
2751 If one revision is given, it is used as the base revision.
2752 If one revision is given, it is used as the base revision.
2752 If two revisions are given, the difference between them is shown.
2753 If two revisions are given, the difference between them is shown.
2753
2754
2754 The codes used to show the status of files are:
2755 The codes used to show the status of files are:
2755 M = modified
2756 M = modified
2756 A = added
2757 A = added
2757 R = removed
2758 R = removed
2758 C = clean
2759 C = clean
2759 ! = missing (deleted by non-hg command, but still tracked)
2760 ! = missing (deleted by non-hg command, but still tracked)
2760 ? = not tracked
2761 ? = not tracked
2761 I = ignored
2762 I = ignored
2762 = the previous added file was copied from here
2763 = the previous added file was copied from here
2763 """
2764 """
2764
2765
2765 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2766 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2766 cwd = (pats and repo.getcwd()) or ''
2767 cwd = (pats and repo.getcwd()) or ''
2767 end = opts.get('print0') and '\0' or '\n'
2768 end = opts.get('print0') and '\0' or '\n'
2768 copy = {}
2769 copy = {}
2769 states = 'modified added removed deleted unknown ignored clean'.split()
2770 states = 'modified added removed deleted unknown ignored clean'.split()
2770 show = [k for k in states if opts.get(k)]
2771 show = [k for k in states if opts.get(k)]
2771 if opts.get('all'):
2772 if opts.get('all'):
2772 show += ui.quiet and (states[:4] + ['clean']) or states
2773 show += ui.quiet and (states[:4] + ['clean']) or states
2773 if not show:
2774 if not show:
2774 show = ui.quiet and states[:4] or states[:5]
2775 show = ui.quiet and states[:4] or states[:5]
2775
2776
2776 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2777 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2777 'ignored' in show, 'clean' in show, 'unknown' in show)
2778 'ignored' in show, 'clean' in show, 'unknown' in show)
2778 changestates = zip(states, 'MAR!?IC', stat)
2779 changestates = zip(states, 'MAR!?IC', stat)
2779
2780
2780 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
2781 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
2781 ctxn = repo[nullid]
2782 ctxn = repo[nullid]
2782 ctx1 = repo[node1]
2783 ctx1 = repo[node1]
2783 ctx2 = repo[node2]
2784 ctx2 = repo[node2]
2784 added = stat[1]
2785 added = stat[1]
2785 if node2 is None:
2786 if node2 is None:
2786 added = stat[0] + stat[1] # merged?
2787 added = stat[0] + stat[1] # merged?
2787
2788
2788 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
2789 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
2789 if k in added:
2790 if k in added:
2790 copy[k] = v
2791 copy[k] = v
2791 elif v in added:
2792 elif v in added:
2792 copy[v] = k
2793 copy[v] = k
2793
2794
2794 for state, char, files in changestates:
2795 for state, char, files in changestates:
2795 if state in show:
2796 if state in show:
2796 format = "%s %%s%s" % (char, end)
2797 format = "%s %%s%s" % (char, end)
2797 if opts.get('no_status'):
2798 if opts.get('no_status'):
2798 format = "%%s%s" % end
2799 format = "%%s%s" % end
2799
2800
2800 for f in files:
2801 for f in files:
2801 ui.write(format % repo.pathto(f, cwd))
2802 ui.write(format % repo.pathto(f, cwd))
2802 if f in copy:
2803 if f in copy:
2803 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2804 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2804
2805
2805 def tag(ui, repo, name1, *names, **opts):
2806 def tag(ui, repo, name1, *names, **opts):
2806 """add one or more tags for the current or given revision
2807 """add one or more tags for the current or given revision
2807
2808
2808 Name a particular revision using <name>.
2809 Name a particular revision using <name>.
2809
2810
2810 Tags are used to name particular revisions of the repository and are
2811 Tags are used to name particular revisions of the repository and are
2811 very useful to compare different revisions, to go back to significant
2812 very useful to compare different revisions, to go back to significant
2812 earlier versions or to mark branch points as releases, etc.
2813 earlier versions or to mark branch points as releases, etc.
2813
2814
2814 If no revision is given, the parent of the working directory is
2815 If no revision is given, the parent of the working directory is
2815 used, or tip if no revision is checked out.
2816 used, or tip if no revision is checked out.
2816
2817
2817 To facilitate version control, distribution, and merging of tags,
2818 To facilitate version control, distribution, and merging of tags,
2818 they are stored as a file named ".hgtags" which is managed
2819 they are stored as a file named ".hgtags" which is managed
2819 similarly to other project files and can be hand-edited if
2820 similarly to other project files and can be hand-edited if
2820 necessary. The file '.hg/localtags' is used for local tags (not
2821 necessary. The file '.hg/localtags' is used for local tags (not
2821 shared among repositories).
2822 shared among repositories).
2822
2823
2823 See 'hg help dates' for a list of formats valid for -d/--date.
2824 See 'hg help dates' for a list of formats valid for -d/--date.
2824 """
2825 """
2825
2826
2826 rev_ = "."
2827 rev_ = "."
2827 names = (name1,) + names
2828 names = (name1,) + names
2828 if len(names) != len(set(names)):
2829 if len(names) != len(set(names)):
2829 raise util.Abort(_('tag names must be unique'))
2830 raise util.Abort(_('tag names must be unique'))
2830 for n in names:
2831 for n in names:
2831 if n in ['tip', '.', 'null']:
2832 if n in ['tip', '.', 'null']:
2832 raise util.Abort(_('the name \'%s\' is reserved') % n)
2833 raise util.Abort(_('the name \'%s\' is reserved') % n)
2833 if opts.get('rev') and opts.get('remove'):
2834 if opts.get('rev') and opts.get('remove'):
2834 raise util.Abort(_("--rev and --remove are incompatible"))
2835 raise util.Abort(_("--rev and --remove are incompatible"))
2835 if opts.get('rev'):
2836 if opts.get('rev'):
2836 rev_ = opts['rev']
2837 rev_ = opts['rev']
2837 message = opts.get('message')
2838 message = opts.get('message')
2838 if opts.get('remove'):
2839 if opts.get('remove'):
2839 expectedtype = opts.get('local') and 'local' or 'global'
2840 expectedtype = opts.get('local') and 'local' or 'global'
2840 for n in names:
2841 for n in names:
2841 if not repo.tagtype(n):
2842 if not repo.tagtype(n):
2842 raise util.Abort(_('tag \'%s\' does not exist') % n)
2843 raise util.Abort(_('tag \'%s\' does not exist') % n)
2843 if repo.tagtype(n) != expectedtype:
2844 if repo.tagtype(n) != expectedtype:
2844 if expectedtype == 'global':
2845 if expectedtype == 'global':
2845 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
2846 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
2846 else:
2847 else:
2847 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
2848 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
2848 rev_ = nullid
2849 rev_ = nullid
2849 if not message:
2850 if not message:
2850 message = _('Removed tag %s') % ', '.join(names)
2851 message = _('Removed tag %s') % ', '.join(names)
2851 elif not opts.get('force'):
2852 elif not opts.get('force'):
2852 for n in names:
2853 for n in names:
2853 if n in repo.tags():
2854 if n in repo.tags():
2854 raise util.Abort(_('tag \'%s\' already exists '
2855 raise util.Abort(_('tag \'%s\' already exists '
2855 '(use -f to force)') % n)
2856 '(use -f to force)') % n)
2856 if not rev_ and repo.dirstate.parents()[1] != nullid:
2857 if not rev_ and repo.dirstate.parents()[1] != nullid:
2857 raise util.Abort(_('uncommitted merge - please provide a '
2858 raise util.Abort(_('uncommitted merge - please provide a '
2858 'specific revision'))
2859 'specific revision'))
2859 r = repo[rev_].node()
2860 r = repo[rev_].node()
2860
2861
2861 if not message:
2862 if not message:
2862 message = (_('Added tag %s for changeset %s') %
2863 message = (_('Added tag %s for changeset %s') %
2863 (', '.join(names), short(r)))
2864 (', '.join(names), short(r)))
2864
2865
2865 date = opts.get('date')
2866 date = opts.get('date')
2866 if date:
2867 if date:
2867 date = util.parsedate(date)
2868 date = util.parsedate(date)
2868
2869
2869 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
2870 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
2870
2871
2871 def tags(ui, repo):
2872 def tags(ui, repo):
2872 """list repository tags
2873 """list repository tags
2873
2874
2874 This lists both regular and local tags. When the -v/--verbose
2875 This lists both regular and local tags. When the -v/--verbose
2875 switch is used, a third column "local" is printed for local tags.
2876 switch is used, a third column "local" is printed for local tags.
2876 """
2877 """
2877
2878
2878 hexfunc = ui.debugflag and hex or short
2879 hexfunc = ui.debugflag and hex or short
2879 tagtype = ""
2880 tagtype = ""
2880
2881
2881 for t, n in reversed(repo.tagslist()):
2882 for t, n in reversed(repo.tagslist()):
2882 if ui.quiet:
2883 if ui.quiet:
2883 ui.write("%s\n" % t)
2884 ui.write("%s\n" % t)
2884 continue
2885 continue
2885
2886
2886 try:
2887 try:
2887 hn = hexfunc(n)
2888 hn = hexfunc(n)
2888 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2889 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2889 except error.LookupError:
2890 except error.LookupError:
2890 r = " ?:%s" % hn
2891 r = " ?:%s" % hn
2891 else:
2892 else:
2892 spaces = " " * (30 - encoding.colwidth(t))
2893 spaces = " " * (30 - encoding.colwidth(t))
2893 if ui.verbose:
2894 if ui.verbose:
2894 if repo.tagtype(t) == 'local':
2895 if repo.tagtype(t) == 'local':
2895 tagtype = " local"
2896 tagtype = " local"
2896 else:
2897 else:
2897 tagtype = ""
2898 tagtype = ""
2898 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2899 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2899
2900
2900 def tip(ui, repo, **opts):
2901 def tip(ui, repo, **opts):
2901 """show the tip revision
2902 """show the tip revision
2902
2903
2903 The tip revision (usually just called the tip) is the most
2904 The tip revision (usually just called the tip) is the most
2904 recently added changeset in the repository, the most recently
2905 recently added changeset in the repository, the most recently
2905 changed head.
2906 changed head.
2906
2907
2907 If you have just made a commit, that commit will be the tip. If
2908 If you have just made a commit, that commit will be the tip. If
2908 you have just pulled changes from another repository, the tip of
2909 you have just pulled changes from another repository, the tip of
2909 that repository becomes the current tip. The "tip" tag is special
2910 that repository becomes the current tip. The "tip" tag is special
2910 and cannot be renamed or assigned to a different changeset.
2911 and cannot be renamed or assigned to a different changeset.
2911 """
2912 """
2912 cmdutil.show_changeset(ui, repo, opts).show(repo[len(repo) - 1])
2913 cmdutil.show_changeset(ui, repo, opts).show(repo[len(repo) - 1])
2913
2914
2914 def unbundle(ui, repo, fname1, *fnames, **opts):
2915 def unbundle(ui, repo, fname1, *fnames, **opts):
2915 """apply one or more changegroup files
2916 """apply one or more changegroup files
2916
2917
2917 Apply one or more compressed changegroup files generated by the
2918 Apply one or more compressed changegroup files generated by the
2918 bundle command.
2919 bundle command.
2919 """
2920 """
2920 fnames = (fname1,) + fnames
2921 fnames = (fname1,) + fnames
2921
2922
2922 lock = repo.lock()
2923 lock = repo.lock()
2923 try:
2924 try:
2924 for fname in fnames:
2925 for fname in fnames:
2925 f = url.open(ui, fname)
2926 f = url.open(ui, fname)
2926 gen = changegroup.readbundle(f, fname)
2927 gen = changegroup.readbundle(f, fname)
2927 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2928 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2928 finally:
2929 finally:
2929 lock.release()
2930 lock.release()
2930
2931
2931 return postincoming(ui, repo, modheads, opts.get('update'), None)
2932 return postincoming(ui, repo, modheads, opts.get('update'), None)
2932
2933
2933 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2934 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2934 """update working directory
2935 """update working directory
2935
2936
2936 Update the repository's working directory to the specified
2937 Update the repository's working directory to the specified
2937 revision, or the tip of the current branch if none is specified.
2938 revision, or the tip of the current branch if none is specified.
2938 Use null as the revision to remove the working copy (like 'hg
2939 Use null as the revision to remove the working copy (like 'hg
2939 clone -U').
2940 clone -U').
2940
2941
2941 When the working directory contains no uncommitted changes, it
2942 When the working directory contains no uncommitted changes, it
2942 will be replaced by the state of the requested revision from the
2943 will be replaced by the state of the requested revision from the
2943 repository. When the requested revision is on a different branch,
2944 repository. When the requested revision is on a different branch,
2944 the working directory will additionally be switched to that
2945 the working directory will additionally be switched to that
2945 branch.
2946 branch.
2946
2947
2947 When there are uncommitted changes, use option -C/--clean to
2948 When there are uncommitted changes, use option -C/--clean to
2948 discard them, forcibly replacing the state of the working
2949 discard them, forcibly replacing the state of the working
2949 directory with the requested revision.
2950 directory with the requested revision.
2950
2951
2951 When there are uncommitted changes and option -C/--clean is not
2952 When there are uncommitted changes and option -C/--clean is not
2952 used, and the parent revision and requested revision are on the
2953 used, and the parent revision and requested revision are on the
2953 same branch, and one of them is an ancestor of the other, then the
2954 same branch, and one of them is an ancestor of the other, then the
2954 new working directory will contain the requested revision merged
2955 new working directory will contain the requested revision merged
2955 with the uncommitted changes. Otherwise, the update will fail with
2956 with the uncommitted changes. Otherwise, the update will fail with
2956 a suggestion to use 'merge' or 'update -C' instead.
2957 a suggestion to use 'merge' or 'update -C' instead.
2957
2958
2958 If you want to update just one file to an older revision, use
2959 If you want to update just one file to an older revision, use
2959 revert.
2960 revert.
2960
2961
2961 See 'hg help dates' for a list of formats valid for -d/--date.
2962 See 'hg help dates' for a list of formats valid for -d/--date.
2962 """
2963 """
2963 if rev and node:
2964 if rev and node:
2964 raise util.Abort(_("please specify just one revision"))
2965 raise util.Abort(_("please specify just one revision"))
2965
2966
2966 if not rev:
2967 if not rev:
2967 rev = node
2968 rev = node
2968
2969
2969 if date:
2970 if date:
2970 if rev:
2971 if rev:
2971 raise util.Abort(_("you can't specify a revision and a date"))
2972 raise util.Abort(_("you can't specify a revision and a date"))
2972 rev = cmdutil.finddate(ui, repo, date)
2973 rev = cmdutil.finddate(ui, repo, date)
2973
2974
2974 if clean:
2975 if clean:
2975 return hg.clean(repo, rev)
2976 return hg.clean(repo, rev)
2976 else:
2977 else:
2977 return hg.update(repo, rev)
2978 return hg.update(repo, rev)
2978
2979
2979 def verify(ui, repo):
2980 def verify(ui, repo):
2980 """verify the integrity of the repository
2981 """verify the integrity of the repository
2981
2982
2982 Verify the integrity of the current repository.
2983 Verify the integrity of the current repository.
2983
2984
2984 This will perform an extensive check of the repository's
2985 This will perform an extensive check of the repository's
2985 integrity, validating the hashes and checksums of each entry in
2986 integrity, validating the hashes and checksums of each entry in
2986 the changelog, manifest, and tracked files, as well as the
2987 the changelog, manifest, and tracked files, as well as the
2987 integrity of their crosslinks and indices.
2988 integrity of their crosslinks and indices.
2988 """
2989 """
2989 return hg.verify(repo)
2990 return hg.verify(repo)
2990
2991
2991 def version_(ui):
2992 def version_(ui):
2992 """output version and copyright information"""
2993 """output version and copyright information"""
2993 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2994 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2994 % util.version())
2995 % util.version())
2995 ui.status(_(
2996 ui.status(_(
2996 "\nCopyright (C) 2005-2009 Matt Mackall <mpm@selenic.com> and others\n"
2997 "\nCopyright (C) 2005-2009 Matt Mackall <mpm@selenic.com> and others\n"
2997 "This is free software; see the source for copying conditions. "
2998 "This is free software; see the source for copying conditions. "
2998 "There is NO\nwarranty; "
2999 "There is NO\nwarranty; "
2999 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
3000 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
3000 ))
3001 ))
3001
3002
3002 # Command options and aliases are listed here, alphabetically
3003 # Command options and aliases are listed here, alphabetically
3003
3004
3004 globalopts = [
3005 globalopts = [
3005 ('R', 'repository', '',
3006 ('R', 'repository', '',
3006 _('repository root directory or symbolic path name')),
3007 _('repository root directory or symbolic path name')),
3007 ('', 'cwd', '', _('change working directory')),
3008 ('', 'cwd', '', _('change working directory')),
3008 ('y', 'noninteractive', None,
3009 ('y', 'noninteractive', None,
3009 _('do not prompt, assume \'yes\' for any required answers')),
3010 _('do not prompt, assume \'yes\' for any required answers')),
3010 ('q', 'quiet', None, _('suppress output')),
3011 ('q', 'quiet', None, _('suppress output')),
3011 ('v', 'verbose', None, _('enable additional output')),
3012 ('v', 'verbose', None, _('enable additional output')),
3012 ('', 'config', [], _('set/override config option')),
3013 ('', 'config', [], _('set/override config option')),
3013 ('', 'debug', None, _('enable debugging output')),
3014 ('', 'debug', None, _('enable debugging output')),
3014 ('', 'debugger', None, _('start debugger')),
3015 ('', 'debugger', None, _('start debugger')),
3015 ('', 'encoding', encoding.encoding, _('set the charset encoding')),
3016 ('', 'encoding', encoding.encoding, _('set the charset encoding')),
3016 ('', 'encodingmode', encoding.encodingmode,
3017 ('', 'encodingmode', encoding.encodingmode,
3017 _('set the charset encoding mode')),
3018 _('set the charset encoding mode')),
3018 ('', 'traceback', None, _('print traceback on exception')),
3019 ('', 'traceback', None, _('print traceback on exception')),
3019 ('', 'time', None, _('time how long the command takes')),
3020 ('', 'time', None, _('time how long the command takes')),
3020 ('', 'profile', None, _('print command execution profile')),
3021 ('', 'profile', None, _('print command execution profile')),
3021 ('', 'version', None, _('output version information and exit')),
3022 ('', 'version', None, _('output version information and exit')),
3022 ('h', 'help', None, _('display help and exit')),
3023 ('h', 'help', None, _('display help and exit')),
3023 ]
3024 ]
3024
3025
3025 dryrunopts = [('n', 'dry-run', None,
3026 dryrunopts = [('n', 'dry-run', None,
3026 _('do not perform actions, just print output'))]
3027 _('do not perform actions, just print output'))]
3027
3028
3028 remoteopts = [
3029 remoteopts = [
3029 ('e', 'ssh', '', _('specify ssh command to use')),
3030 ('e', 'ssh', '', _('specify ssh command to use')),
3030 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
3031 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
3031 ]
3032 ]
3032
3033
3033 walkopts = [
3034 walkopts = [
3034 ('I', 'include', [], _('include names matching the given patterns')),
3035 ('I', 'include', [], _('include names matching the given patterns')),
3035 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3036 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3036 ]
3037 ]
3037
3038
3038 commitopts = [
3039 commitopts = [
3039 ('m', 'message', '', _('use <text> as commit message')),
3040 ('m', 'message', '', _('use <text> as commit message')),
3040 ('l', 'logfile', '', _('read commit message from <file>')),
3041 ('l', 'logfile', '', _('read commit message from <file>')),
3041 ]
3042 ]
3042
3043
3043 commitopts2 = [
3044 commitopts2 = [
3044 ('d', 'date', '', _('record datecode as commit date')),
3045 ('d', 'date', '', _('record datecode as commit date')),
3045 ('u', 'user', '', _('record user as committer')),
3046 ('u', 'user', '', _('record user as committer')),
3046 ]
3047 ]
3047
3048
3048 templateopts = [
3049 templateopts = [
3049 ('', 'style', '', _('display using template map file')),
3050 ('', 'style', '', _('display using template map file')),
3050 ('', 'template', '', _('display with template')),
3051 ('', 'template', '', _('display with template')),
3051 ]
3052 ]
3052
3053
3053 logopts = [
3054 logopts = [
3054 ('p', 'patch', None, _('show patch')),
3055 ('p', 'patch', None, _('show patch')),
3055 ('g', 'git', None, _('use git extended diff format')),
3056 ('g', 'git', None, _('use git extended diff format')),
3056 ('l', 'limit', '', _('limit number of changes displayed')),
3057 ('l', 'limit', '', _('limit number of changes displayed')),
3057 ('M', 'no-merges', None, _('do not show merges')),
3058 ('M', 'no-merges', None, _('do not show merges')),
3058 ] + templateopts
3059 ] + templateopts
3059
3060
3060 diffopts = [
3061 diffopts = [
3061 ('a', 'text', None, _('treat all files as text')),
3062 ('a', 'text', None, _('treat all files as text')),
3062 ('g', 'git', None, _('use git extended diff format')),
3063 ('g', 'git', None, _('use git extended diff format')),
3063 ('', 'nodates', None, _("don't include dates in diff headers"))
3064 ('', 'nodates', None, _("don't include dates in diff headers"))
3064 ]
3065 ]
3065
3066
3066 diffopts2 = [
3067 diffopts2 = [
3067 ('p', 'show-function', None, _('show which function each change is in')),
3068 ('p', 'show-function', None, _('show which function each change is in')),
3068 ('w', 'ignore-all-space', None,
3069 ('w', 'ignore-all-space', None,
3069 _('ignore white space when comparing lines')),
3070 _('ignore white space when comparing lines')),
3070 ('b', 'ignore-space-change', None,
3071 ('b', 'ignore-space-change', None,
3071 _('ignore changes in the amount of white space')),
3072 _('ignore changes in the amount of white space')),
3072 ('B', 'ignore-blank-lines', None,
3073 ('B', 'ignore-blank-lines', None,
3073 _('ignore changes whose lines are all blank')),
3074 _('ignore changes whose lines are all blank')),
3074 ('U', 'unified', '', _('number of lines of context to show'))
3075 ('U', 'unified', '', _('number of lines of context to show'))
3075 ]
3076 ]
3076
3077
3077 similarityopts = [
3078 similarityopts = [
3078 ('s', 'similarity', '',
3079 ('s', 'similarity', '',
3079 _('guess renamed files by similarity (0<=s<=100)'))
3080 _('guess renamed files by similarity (0<=s<=100)'))
3080 ]
3081 ]
3081
3082
3082 table = {
3083 table = {
3083 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3084 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3084 "addremove":
3085 "addremove":
3085 (addremove, similarityopts + walkopts + dryrunopts,
3086 (addremove, similarityopts + walkopts + dryrunopts,
3086 _('[OPTION]... [FILE]...')),
3087 _('[OPTION]... [FILE]...')),
3087 "^annotate|blame":
3088 "^annotate|blame":
3088 (annotate,
3089 (annotate,
3089 [('r', 'rev', '', _('annotate the specified revision')),
3090 [('r', 'rev', '', _('annotate the specified revision')),
3090 ('f', 'follow', None, _('follow file copies and renames')),
3091 ('f', 'follow', None, _('follow file copies and renames')),
3091 ('a', 'text', None, _('treat all files as text')),
3092 ('a', 'text', None, _('treat all files as text')),
3092 ('u', 'user', None, _('list the author (long with -v)')),
3093 ('u', 'user', None, _('list the author (long with -v)')),
3093 ('d', 'date', None, _('list the date (short with -q)')),
3094 ('d', 'date', None, _('list the date (short with -q)')),
3094 ('n', 'number', None, _('list the revision number (default)')),
3095 ('n', 'number', None, _('list the revision number (default)')),
3095 ('c', 'changeset', None, _('list the changeset')),
3096 ('c', 'changeset', None, _('list the changeset')),
3096 ('l', 'line-number', None,
3097 ('l', 'line-number', None,
3097 _('show line number at the first appearance'))
3098 _('show line number at the first appearance'))
3098 ] + walkopts,
3099 ] + walkopts,
3099 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3100 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3100 "archive":
3101 "archive":
3101 (archive,
3102 (archive,
3102 [('', 'no-decode', None, _('do not pass files through decoders')),
3103 [('', 'no-decode', None, _('do not pass files through decoders')),
3103 ('p', 'prefix', '', _('directory prefix for files in archive')),
3104 ('p', 'prefix', '', _('directory prefix for files in archive')),
3104 ('r', 'rev', '', _('revision to distribute')),
3105 ('r', 'rev', '', _('revision to distribute')),
3105 ('t', 'type', '', _('type of distribution to create')),
3106 ('t', 'type', '', _('type of distribution to create')),
3106 ] + walkopts,
3107 ] + walkopts,
3107 _('[OPTION]... DEST')),
3108 _('[OPTION]... DEST')),
3108 "backout":
3109 "backout":
3109 (backout,
3110 (backout,
3110 [('', 'merge', None,
3111 [('', 'merge', None,
3111 _('merge with old dirstate parent after backout')),
3112 _('merge with old dirstate parent after backout')),
3112 ('', 'parent', '', _('parent to choose when backing out merge')),
3113 ('', 'parent', '', _('parent to choose when backing out merge')),
3113 ('r', 'rev', '', _('revision to backout')),
3114 ('r', 'rev', '', _('revision to backout')),
3114 ] + walkopts + commitopts + commitopts2,
3115 ] + walkopts + commitopts + commitopts2,
3115 _('[OPTION]... [-r] REV')),
3116 _('[OPTION]... [-r] REV')),
3116 "bisect":
3117 "bisect":
3117 (bisect,
3118 (bisect,
3118 [('r', 'reset', False, _('reset bisect state')),
3119 [('r', 'reset', False, _('reset bisect state')),
3119 ('g', 'good', False, _('mark changeset good')),
3120 ('g', 'good', False, _('mark changeset good')),
3120 ('b', 'bad', False, _('mark changeset bad')),
3121 ('b', 'bad', False, _('mark changeset bad')),
3121 ('s', 'skip', False, _('skip testing changeset')),
3122 ('s', 'skip', False, _('skip testing changeset')),
3122 ('c', 'command', '', _('use command to check changeset state')),
3123 ('c', 'command', '', _('use command to check changeset state')),
3123 ('U', 'noupdate', False, _('do not update to target'))],
3124 ('U', 'noupdate', False, _('do not update to target'))],
3124 _("[-gbsr] [-c CMD] [REV]")),
3125 _("[-gbsr] [-c CMD] [REV]")),
3125 "branch":
3126 "branch":
3126 (branch,
3127 (branch,
3127 [('f', 'force', None,
3128 [('f', 'force', None,
3128 _('set branch name even if it shadows an existing branch')),
3129 _('set branch name even if it shadows an existing branch')),
3129 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3130 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3130 _('[-fC] [NAME]')),
3131 _('[-fC] [NAME]')),
3131 "branches":
3132 "branches":
3132 (branches,
3133 (branches,
3133 [('a', 'active', False,
3134 [('a', 'active', False,
3134 _('show only branches that have unmerged heads'))],
3135 _('show only branches that have unmerged heads'))],
3135 _('[-a]')),
3136 _('[-a]')),
3136 "bundle":
3137 "bundle":
3137 (bundle,
3138 (bundle,
3138 [('f', 'force', None,
3139 [('f', 'force', None,
3139 _('run even when remote repository is unrelated')),
3140 _('run even when remote repository is unrelated')),
3140 ('r', 'rev', [],
3141 ('r', 'rev', [],
3141 _('a changeset up to which you would like to bundle')),
3142 _('a changeset up to which you would like to bundle')),
3142 ('', 'base', [],
3143 ('', 'base', [],
3143 _('a base changeset to specify instead of a destination')),
3144 _('a base changeset to specify instead of a destination')),
3144 ('a', 'all', None, _('bundle all changesets in the repository')),
3145 ('a', 'all', None, _('bundle all changesets in the repository')),
3145 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3146 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3146 ] + remoteopts,
3147 ] + remoteopts,
3147 _('[-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3148 _('[-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3148 "cat":
3149 "cat":
3149 (cat,
3150 (cat,
3150 [('o', 'output', '', _('print output to file with formatted name')),
3151 [('o', 'output', '', _('print output to file with formatted name')),
3151 ('r', 'rev', '', _('print the given revision')),
3152 ('r', 'rev', '', _('print the given revision')),
3152 ('', 'decode', None, _('apply any matching decode filter')),
3153 ('', 'decode', None, _('apply any matching decode filter')),
3153 ] + walkopts,
3154 ] + walkopts,
3154 _('[OPTION]... FILE...')),
3155 _('[OPTION]... FILE...')),
3155 "^clone":
3156 "^clone":
3156 (clone,
3157 (clone,
3157 [('U', 'noupdate', None,
3158 [('U', 'noupdate', None,
3158 _('the clone will only contain a repository (no working copy)')),
3159 _('the clone will only contain a repository (no working copy)')),
3159 ('r', 'rev', [],
3160 ('r', 'rev', [],
3160 _('a changeset you would like to have after cloning')),
3161 _('a changeset you would like to have after cloning')),
3161 ('', 'pull', None, _('use pull protocol to copy metadata')),
3162 ('', 'pull', None, _('use pull protocol to copy metadata')),
3162 ('', 'uncompressed', None,
3163 ('', 'uncompressed', None,
3163 _('use uncompressed transfer (fast over LAN)')),
3164 _('use uncompressed transfer (fast over LAN)')),
3164 ] + remoteopts,
3165 ] + remoteopts,
3165 _('[OPTION]... SOURCE [DEST]')),
3166 _('[OPTION]... SOURCE [DEST]')),
3166 "^commit|ci":
3167 "^commit|ci":
3167 (commit,
3168 (commit,
3168 [('A', 'addremove', None,
3169 [('A', 'addremove', None,
3169 _('mark new/missing files as added/removed before committing')),
3170 _('mark new/missing files as added/removed before committing')),
3170 ('', 'close-branch', None,
3171 ('', 'close-branch', None,
3171 _('mark a branch as closed, hiding it from the branch list')),
3172 _('mark a branch as closed, hiding it from the branch list')),
3172 ] + walkopts + commitopts + commitopts2,
3173 ] + walkopts + commitopts + commitopts2,
3173 _('[OPTION]... [FILE]...')),
3174 _('[OPTION]... [FILE]...')),
3174 "copy|cp":
3175 "copy|cp":
3175 (copy,
3176 (copy,
3176 [('A', 'after', None, _('record a copy that has already occurred')),
3177 [('A', 'after', None, _('record a copy that has already occurred')),
3177 ('f', 'force', None,
3178 ('f', 'force', None,
3178 _('forcibly copy over an existing managed file')),
3179 _('forcibly copy over an existing managed file')),
3179 ] + walkopts + dryrunopts,
3180 ] + walkopts + dryrunopts,
3180 _('[OPTION]... [SOURCE]... DEST')),
3181 _('[OPTION]... [SOURCE]... DEST')),
3181 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3182 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3182 "debugcheckstate": (debugcheckstate, []),
3183 "debugcheckstate": (debugcheckstate, []),
3183 "debugcommands": (debugcommands, [], _('[COMMAND]')),
3184 "debugcommands": (debugcommands, [], _('[COMMAND]')),
3184 "debugcomplete":
3185 "debugcomplete":
3185 (debugcomplete,
3186 (debugcomplete,
3186 [('o', 'options', None, _('show the command options'))],
3187 [('o', 'options', None, _('show the command options'))],
3187 _('[-o] CMD')),
3188 _('[-o] CMD')),
3188 "debugdate":
3189 "debugdate":
3189 (debugdate,
3190 (debugdate,
3190 [('e', 'extended', None, _('try extended date formats'))],
3191 [('e', 'extended', None, _('try extended date formats'))],
3191 _('[-e] DATE [RANGE]')),
3192 _('[-e] DATE [RANGE]')),
3192 "debugdata": (debugdata, [], _('FILE REV')),
3193 "debugdata": (debugdata, [], _('FILE REV')),
3193 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3194 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3194 "debugindex": (debugindex, [], _('FILE')),
3195 "debugindex": (debugindex, [], _('FILE')),
3195 "debugindexdot": (debugindexdot, [], _('FILE')),
3196 "debugindexdot": (debugindexdot, [], _('FILE')),
3196 "debuginstall": (debuginstall, []),
3197 "debuginstall": (debuginstall, []),
3197 "debugrawcommit|rawcommit":
3198 "debugrawcommit|rawcommit":
3198 (rawcommit,
3199 (rawcommit,
3199 [('p', 'parent', [], _('parent')),
3200 [('p', 'parent', [], _('parent')),
3200 ('F', 'files', '', _('file list'))
3201 ('F', 'files', '', _('file list'))
3201 ] + commitopts + commitopts2,
3202 ] + commitopts + commitopts2,
3202 _('[OPTION]... [FILE]...')),
3203 _('[OPTION]... [FILE]...')),
3203 "debugrebuildstate":
3204 "debugrebuildstate":
3204 (debugrebuildstate,
3205 (debugrebuildstate,
3205 [('r', 'rev', '', _('revision to rebuild to'))],
3206 [('r', 'rev', '', _('revision to rebuild to'))],
3206 _('[-r REV] [REV]')),
3207 _('[-r REV] [REV]')),
3207 "debugrename":
3208 "debugrename":
3208 (debugrename,
3209 (debugrename,
3209 [('r', 'rev', '', _('revision to debug'))],
3210 [('r', 'rev', '', _('revision to debug'))],
3210 _('[-r REV] FILE')),
3211 _('[-r REV] FILE')),
3211 "debugsetparents":
3212 "debugsetparents":
3212 (debugsetparents, [], _('REV1 [REV2]')),
3213 (debugsetparents, [], _('REV1 [REV2]')),
3213 "debugstate":
3214 "debugstate":
3214 (debugstate,
3215 (debugstate,
3215 [('', 'nodates', None, _('do not display the saved mtime'))],
3216 [('', 'nodates', None, _('do not display the saved mtime'))],
3216 _('[OPTION]...')),
3217 _('[OPTION]...')),
3217 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3218 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3218 "^diff":
3219 "^diff":
3219 (diff,
3220 (diff,
3220 [('r', 'rev', [], _('revision')),
3221 [('r', 'rev', [], _('revision')),
3221 ('c', 'change', '', _('change made by revision'))
3222 ('c', 'change', '', _('change made by revision'))
3222 ] + diffopts + diffopts2 + walkopts,
3223 ] + diffopts + diffopts2 + walkopts,
3223 _('[OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3224 _('[OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3224 "^export":
3225 "^export":
3225 (export,
3226 (export,
3226 [('o', 'output', '', _('print output to file with formatted name')),
3227 [('o', 'output', '', _('print output to file with formatted name')),
3227 ('', 'switch-parent', None, _('diff against the second parent'))
3228 ('', 'switch-parent', None, _('diff against the second parent'))
3228 ] + diffopts,
3229 ] + diffopts,
3229 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3230 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3230 "grep":
3231 "grep":
3231 (grep,
3232 (grep,
3232 [('0', 'print0', None, _('end fields with NUL')),
3233 [('0', 'print0', None, _('end fields with NUL')),
3233 ('', 'all', None, _('print all revisions that match')),
3234 ('', 'all', None, _('print all revisions that match')),
3234 ('f', 'follow', None,
3235 ('f', 'follow', None,
3235 _('follow changeset history, or file history across copies and renames')),
3236 _('follow changeset history, or file history across copies and renames')),
3236 ('i', 'ignore-case', None, _('ignore case when matching')),
3237 ('i', 'ignore-case', None, _('ignore case when matching')),
3237 ('l', 'files-with-matches', None,
3238 ('l', 'files-with-matches', None,
3238 _('print only filenames and revisions that match')),
3239 _('print only filenames and revisions that match')),
3239 ('n', 'line-number', None, _('print matching line numbers')),
3240 ('n', 'line-number', None, _('print matching line numbers')),
3240 ('r', 'rev', [], _('search in given revision range')),
3241 ('r', 'rev', [], _('search in given revision range')),
3241 ('u', 'user', None, _('list the author (long with -v)')),
3242 ('u', 'user', None, _('list the author (long with -v)')),
3242 ('d', 'date', None, _('list the date (short with -q)')),
3243 ('d', 'date', None, _('list the date (short with -q)')),
3243 ] + walkopts,
3244 ] + walkopts,
3244 _('[OPTION]... PATTERN [FILE]...')),
3245 _('[OPTION]... PATTERN [FILE]...')),
3245 "heads":
3246 "heads":
3246 (heads,
3247 (heads,
3247 [('r', 'rev', '', _('show only heads which are descendants of REV')),
3248 [('r', 'rev', '', _('show only heads which are descendants of REV')),
3248 ('a', 'active', False,
3249 ('a', 'active', False,
3249 _('show only the active heads from open branches')),
3250 _('show only the active heads from open branches')),
3250 ] + templateopts,
3251 ] + templateopts,
3251 _('[-r REV] [REV]...')),
3252 _('[-r REV] [REV]...')),
3252 "help": (help_, [], _('[TOPIC]')),
3253 "help": (help_, [], _('[TOPIC]')),
3253 "identify|id":
3254 "identify|id":
3254 (identify,
3255 (identify,
3255 [('r', 'rev', '', _('identify the specified revision')),
3256 [('r', 'rev', '', _('identify the specified revision')),
3256 ('n', 'num', None, _('show local revision number')),
3257 ('n', 'num', None, _('show local revision number')),
3257 ('i', 'id', None, _('show global revision id')),
3258 ('i', 'id', None, _('show global revision id')),
3258 ('b', 'branch', None, _('show branch')),
3259 ('b', 'branch', None, _('show branch')),
3259 ('t', 'tags', None, _('show tags'))],
3260 ('t', 'tags', None, _('show tags'))],
3260 _('[-nibt] [-r REV] [SOURCE]')),
3261 _('[-nibt] [-r REV] [SOURCE]')),
3261 "import|patch":
3262 "import|patch":
3262 (import_,
3263 (import_,
3263 [('p', 'strip', 1,
3264 [('p', 'strip', 1,
3264 _('directory strip option for patch. This has the same '
3265 _('directory strip option for patch. This has the same '
3265 'meaning as the corresponding patch option')),
3266 'meaning as the corresponding patch option')),
3266 ('b', 'base', '', _('base path')),
3267 ('b', 'base', '', _('base path')),
3267 ('f', 'force', None,
3268 ('f', 'force', None,
3268 _('skip check for outstanding uncommitted changes')),
3269 _('skip check for outstanding uncommitted changes')),
3269 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3270 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3270 ('', 'exact', None,
3271 ('', 'exact', None,
3271 _('apply patch to the nodes from which it was generated')),
3272 _('apply patch to the nodes from which it was generated')),
3272 ('', 'import-branch', None,
3273 ('', 'import-branch', None,
3273 _('use any branch information in patch (implied by --exact)'))] +
3274 _('use any branch information in patch (implied by --exact)'))] +
3274 commitopts + commitopts2 + similarityopts,
3275 commitopts + commitopts2 + similarityopts,
3275 _('[OPTION]... PATCH...')),
3276 _('[OPTION]... PATCH...')),
3276 "incoming|in":
3277 "incoming|in":
3277 (incoming,
3278 (incoming,
3278 [('f', 'force', None,
3279 [('f', 'force', None,
3279 _('run even when remote repository is unrelated')),
3280 _('run even when remote repository is unrelated')),
3280 ('n', 'newest-first', None, _('show newest record first')),
3281 ('n', 'newest-first', None, _('show newest record first')),
3281 ('', 'bundle', '', _('file to store the bundles into')),
3282 ('', 'bundle', '', _('file to store the bundles into')),
3282 ('r', 'rev', [],
3283 ('r', 'rev', [],
3283 _('a specific revision up to which you would like to pull')),
3284 _('a specific revision up to which you would like to pull')),
3284 ] + logopts + remoteopts,
3285 ] + logopts + remoteopts,
3285 _('[-p] [-n] [-M] [-f] [-r REV]...'
3286 _('[-p] [-n] [-M] [-f] [-r REV]...'
3286 ' [--bundle FILENAME] [SOURCE]')),
3287 ' [--bundle FILENAME] [SOURCE]')),
3287 "^init":
3288 "^init":
3288 (init,
3289 (init,
3289 remoteopts,
3290 remoteopts,
3290 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3291 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3291 "locate":
3292 "locate":
3292 (locate,
3293 (locate,
3293 [('r', 'rev', '', _('search the repository as it stood at REV')),
3294 [('r', 'rev', '', _('search the repository as it stood at REV')),
3294 ('0', 'print0', None,
3295 ('0', 'print0', None,
3295 _('end filenames with NUL, for use with xargs')),
3296 _('end filenames with NUL, for use with xargs')),
3296 ('f', 'fullpath', None,
3297 ('f', 'fullpath', None,
3297 _('print complete paths from the filesystem root')),
3298 _('print complete paths from the filesystem root')),
3298 ] + walkopts,
3299 ] + walkopts,
3299 _('[OPTION]... [PATTERN]...')),
3300 _('[OPTION]... [PATTERN]...')),
3300 "^log|history":
3301 "^log|history":
3301 (log,
3302 (log,
3302 [('f', 'follow', None,
3303 [('f', 'follow', None,
3303 _('follow changeset history, or file history across copies and renames')),
3304 _('follow changeset history, or file history across copies and renames')),
3304 ('', 'follow-first', None,
3305 ('', 'follow-first', None,
3305 _('only follow the first parent of merge changesets')),
3306 _('only follow the first parent of merge changesets')),
3306 ('d', 'date', '', _('show revisions matching date spec')),
3307 ('d', 'date', '', _('show revisions matching date spec')),
3307 ('C', 'copies', None, _('show copied files')),
3308 ('C', 'copies', None, _('show copied files')),
3308 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3309 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3309 ('r', 'rev', [], _('show the specified revision or range')),
3310 ('r', 'rev', [], _('show the specified revision or range')),
3310 ('', 'removed', None, _('include revisions where files were removed')),
3311 ('', 'removed', None, _('include revisions where files were removed')),
3311 ('m', 'only-merges', None, _('show only merges')),
3312 ('m', 'only-merges', None, _('show only merges')),
3312 ('u', 'user', [], _('revisions committed by user')),
3313 ('u', 'user', [], _('revisions committed by user')),
3313 ('b', 'only-branch', [],
3314 ('b', 'only-branch', [],
3314 _('show only changesets within the given named branch')),
3315 _('show only changesets within the given named branch')),
3315 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3316 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3316 ] + logopts + walkopts,
3317 ] + logopts + walkopts,
3317 _('[OPTION]... [FILE]')),
3318 _('[OPTION]... [FILE]')),
3318 "manifest":
3319 "manifest":
3319 (manifest,
3320 (manifest,
3320 [('r', 'rev', '', _('revision to display'))],
3321 [('r', 'rev', '', _('revision to display'))],
3321 _('[-r REV]')),
3322 _('[-r REV]')),
3322 "^merge":
3323 "^merge":
3323 (merge,
3324 (merge,
3324 [('f', 'force', None, _('force a merge with outstanding changes')),
3325 [('f', 'force', None, _('force a merge with outstanding changes')),
3325 ('r', 'rev', '', _('revision to merge')),
3326 ('r', 'rev', '', _('revision to merge')),
3326 ],
3327 ],
3327 _('[-f] [[-r] REV]')),
3328 _('[-f] [[-r] REV]')),
3328 "outgoing|out":
3329 "outgoing|out":
3329 (outgoing,
3330 (outgoing,
3330 [('f', 'force', None,
3331 [('f', 'force', None,
3331 _('run even when remote repository is unrelated')),
3332 _('run even when remote repository is unrelated')),
3332 ('r', 'rev', [],
3333 ('r', 'rev', [],
3333 _('a specific revision up to which you would like to push')),
3334 _('a specific revision up to which you would like to push')),
3334 ('n', 'newest-first', None, _('show newest record first')),
3335 ('n', 'newest-first', None, _('show newest record first')),
3335 ] + logopts + remoteopts,
3336 ] + logopts + remoteopts,
3336 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3337 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3337 "^parents":
3338 "^parents":
3338 (parents,
3339 (parents,
3339 [('r', 'rev', '', _('show parents from the specified revision')),
3340 [('r', 'rev', '', _('show parents from the specified revision')),
3340 ] + templateopts,
3341 ] + templateopts,
3341 _('hg parents [-r REV] [FILE]')),
3342 _('hg parents [-r REV] [FILE]')),
3342 "paths": (paths, [], _('[NAME]')),
3343 "paths": (paths, [], _('[NAME]')),
3343 "^pull":
3344 "^pull":
3344 (pull,
3345 (pull,
3345 [('u', 'update', None,
3346 [('u', 'update', None,
3346 _('update to new tip if changesets were pulled')),
3347 _('update to new tip if changesets were pulled')),
3347 ('f', 'force', None,
3348 ('f', 'force', None,
3348 _('run even when remote repository is unrelated')),
3349 _('run even when remote repository is unrelated')),
3349 ('r', 'rev', [],
3350 ('r', 'rev', [],
3350 _('a specific revision up to which you would like to pull')),
3351 _('a specific revision up to which you would like to pull')),
3351 ] + remoteopts,
3352 ] + remoteopts,
3352 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3353 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3353 "^push":
3354 "^push":
3354 (push,
3355 (push,
3355 [('f', 'force', None, _('force push')),
3356 [('f', 'force', None, _('force push')),
3356 ('r', 'rev', [],
3357 ('r', 'rev', [],
3357 _('a specific revision up to which you would like to push')),
3358 _('a specific revision up to which you would like to push')),
3358 ] + remoteopts,
3359 ] + remoteopts,
3359 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3360 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3360 "recover": (recover, []),
3361 "recover": (recover, []),
3361 "^remove|rm":
3362 "^remove|rm":
3362 (remove,
3363 (remove,
3363 [('A', 'after', None, _('record delete for missing files')),
3364 [('A', 'after', None, _('record delete for missing files')),
3364 ('f', 'force', None,
3365 ('f', 'force', None,
3365 _('remove (and delete) file even if added or modified')),
3366 _('remove (and delete) file even if added or modified')),
3366 ] + walkopts,
3367 ] + walkopts,
3367 _('[OPTION]... FILE...')),
3368 _('[OPTION]... FILE...')),
3368 "rename|mv":
3369 "rename|mv":
3369 (rename,
3370 (rename,
3370 [('A', 'after', None, _('record a rename that has already occurred')),
3371 [('A', 'after', None, _('record a rename that has already occurred')),
3371 ('f', 'force', None,
3372 ('f', 'force', None,
3372 _('forcibly copy over an existing managed file')),
3373 _('forcibly copy over an existing managed file')),
3373 ] + walkopts + dryrunopts,
3374 ] + walkopts + dryrunopts,
3374 _('[OPTION]... SOURCE... DEST')),
3375 _('[OPTION]... SOURCE... DEST')),
3375 "resolve":
3376 "resolve":
3376 (resolve,
3377 (resolve,
3377 [('a', 'all', None, _('remerge all unresolved files')),
3378 [('a', 'all', None, _('remerge all unresolved files')),
3378 ('l', 'list', None, _('list state of files needing merge')),
3379 ('l', 'list', None, _('list state of files needing merge')),
3379 ('m', 'mark', None, _('mark files as resolved')),
3380 ('m', 'mark', None, _('mark files as resolved')),
3380 ('u', 'unmark', None, _('unmark files as resolved'))]
3381 ('u', 'unmark', None, _('unmark files as resolved'))]
3381 + walkopts,
3382 + walkopts,
3382 _('[OPTION]... [FILE]...')),
3383 _('[OPTION]... [FILE]...')),
3383 "revert":
3384 "revert":
3384 (revert,
3385 (revert,
3385 [('a', 'all', None, _('revert all changes when no arguments given')),
3386 [('a', 'all', None, _('revert all changes when no arguments given')),
3386 ('d', 'date', '', _('tipmost revision matching date')),
3387 ('d', 'date', '', _('tipmost revision matching date')),
3387 ('r', 'rev', '', _('revision to revert to')),
3388 ('r', 'rev', '', _('revision to revert to')),
3388 ('', 'no-backup', None, _('do not save backup copies of files')),
3389 ('', 'no-backup', None, _('do not save backup copies of files')),
3389 ] + walkopts + dryrunopts,
3390 ] + walkopts + dryrunopts,
3390 _('[OPTION]... [-r REV] [NAME]...')),
3391 _('[OPTION]... [-r REV] [NAME]...')),
3391 "rollback": (rollback, []),
3392 "rollback": (rollback, []),
3392 "root": (root, []),
3393 "root": (root, []),
3393 "^serve":
3394 "^serve":
3394 (serve,
3395 (serve,
3395 [('A', 'accesslog', '', _('name of access log file to write to')),
3396 [('A', 'accesslog', '', _('name of access log file to write to')),
3396 ('d', 'daemon', None, _('run server in background')),
3397 ('d', 'daemon', None, _('run server in background')),
3397 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3398 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3398 ('E', 'errorlog', '', _('name of error log file to write to')),
3399 ('E', 'errorlog', '', _('name of error log file to write to')),
3399 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3400 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3400 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3401 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3401 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3402 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3402 ('n', 'name', '',
3403 ('n', 'name', '',
3403 _('name to show in web pages (default: working directory)')),
3404 _('name to show in web pages (default: working directory)')),
3404 ('', 'webdir-conf', '', _('name of the webdir config file'
3405 ('', 'webdir-conf', '', _('name of the webdir config file'
3405 ' (serve more than one repository)')),
3406 ' (serve more than one repository)')),
3406 ('', 'pid-file', '', _('name of file to write process ID to')),
3407 ('', 'pid-file', '', _('name of file to write process ID to')),
3407 ('', 'stdio', None, _('for remote clients')),
3408 ('', 'stdio', None, _('for remote clients')),
3408 ('t', 'templates', '', _('web templates to use')),
3409 ('t', 'templates', '', _('web templates to use')),
3409 ('', 'style', '', _('template style to use')),
3410 ('', 'style', '', _('template style to use')),
3410 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3411 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3411 ('', 'certificate', '', _('SSL certificate file'))],
3412 ('', 'certificate', '', _('SSL certificate file'))],
3412 _('[OPTION]...')),
3413 _('[OPTION]...')),
3413 "showconfig|debugconfig":
3414 "showconfig|debugconfig":
3414 (showconfig,
3415 (showconfig,
3415 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3416 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3416 _('[-u] [NAME]...')),
3417 _('[-u] [NAME]...')),
3417 "^status|st":
3418 "^status|st":
3418 (status,
3419 (status,
3419 [('A', 'all', None, _('show status of all files')),
3420 [('A', 'all', None, _('show status of all files')),
3420 ('m', 'modified', None, _('show only modified files')),
3421 ('m', 'modified', None, _('show only modified files')),
3421 ('a', 'added', None, _('show only added files')),
3422 ('a', 'added', None, _('show only added files')),
3422 ('r', 'removed', None, _('show only removed files')),
3423 ('r', 'removed', None, _('show only removed files')),
3423 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3424 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3424 ('c', 'clean', None, _('show only files without changes')),
3425 ('c', 'clean', None, _('show only files without changes')),
3425 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3426 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3426 ('i', 'ignored', None, _('show only ignored files')),
3427 ('i', 'ignored', None, _('show only ignored files')),
3427 ('n', 'no-status', None, _('hide status prefix')),
3428 ('n', 'no-status', None, _('hide status prefix')),
3428 ('C', 'copies', None, _('show source of copied files')),
3429 ('C', 'copies', None, _('show source of copied files')),
3429 ('0', 'print0', None,
3430 ('0', 'print0', None,
3430 _('end filenames with NUL, for use with xargs')),
3431 _('end filenames with NUL, for use with xargs')),
3431 ('', 'rev', [], _('show difference from revision')),
3432 ('', 'rev', [], _('show difference from revision')),
3432 ] + walkopts,
3433 ] + walkopts,
3433 _('[OPTION]... [FILE]...')),
3434 _('[OPTION]... [FILE]...')),
3434 "tag":
3435 "tag":
3435 (tag,
3436 (tag,
3436 [('f', 'force', None, _('replace existing tag')),
3437 [('f', 'force', None, _('replace existing tag')),
3437 ('l', 'local', None, _('make the tag local')),
3438 ('l', 'local', None, _('make the tag local')),
3438 ('r', 'rev', '', _('revision to tag')),
3439 ('r', 'rev', '', _('revision to tag')),
3439 ('', 'remove', None, _('remove a tag')),
3440 ('', 'remove', None, _('remove a tag')),
3440 # -l/--local is already there, commitopts cannot be used
3441 # -l/--local is already there, commitopts cannot be used
3441 ('m', 'message', '', _('use <text> as commit message')),
3442 ('m', 'message', '', _('use <text> as commit message')),
3442 ] + commitopts2,
3443 ] + commitopts2,
3443 _('[-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3444 _('[-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3444 "tags": (tags, []),
3445 "tags": (tags, []),
3445 "tip":
3446 "tip":
3446 (tip,
3447 (tip,
3447 [('p', 'patch', None, _('show patch')),
3448 [('p', 'patch', None, _('show patch')),
3448 ('g', 'git', None, _('use git extended diff format')),
3449 ('g', 'git', None, _('use git extended diff format')),
3449 ] + templateopts,
3450 ] + templateopts,
3450 _('[-p]')),
3451 _('[-p]')),
3451 "unbundle":
3452 "unbundle":
3452 (unbundle,
3453 (unbundle,
3453 [('u', 'update', None,
3454 [('u', 'update', None,
3454 _('update to new tip if changesets were unbundled'))],
3455 _('update to new tip if changesets were unbundled'))],
3455 _('[-u] FILE...')),
3456 _('[-u] FILE...')),
3456 "^update|up|checkout|co":
3457 "^update|up|checkout|co":
3457 (update,
3458 (update,
3458 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3459 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3459 ('d', 'date', '', _('tipmost revision matching date')),
3460 ('d', 'date', '', _('tipmost revision matching date')),
3460 ('r', 'rev', '', _('revision'))],
3461 ('r', 'rev', '', _('revision'))],
3461 _('[-C] [-d DATE] [[-r] REV]')),
3462 _('[-C] [-d DATE] [[-r] REV]')),
3462 "verify": (verify, []),
3463 "verify": (verify, []),
3463 "version": (version_, []),
3464 "version": (version_, []),
3464 }
3465 }
3465
3466
3466 norepo = ("clone init version help debugcommands debugcomplete debugdata"
3467 norepo = ("clone init version help debugcommands debugcomplete debugdata"
3467 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3468 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3468 optionalrepo = ("identify paths serve showconfig debugancestor")
3469 optionalrepo = ("identify paths serve showconfig debugancestor")
@@ -1,136 +1,137 b''
1 # config.py - configuration parsing for Mercurial
1 # config.py - configuration parsing for Mercurial
2 #
2 #
3 # Copyright 2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from i18n import _
8 from i18n import _
9 import re, error, os
9 import error
10 import re, os
10
11
11 class sortdict(dict):
12 class sortdict(dict):
12 'a simple sorted dictionary'
13 'a simple sorted dictionary'
13 def __init__(self, data=None):
14 def __init__(self, data=None):
14 self._list = []
15 self._list = []
15 if data:
16 if data:
16 self.update(data)
17 self.update(data)
17 def copy(self):
18 def copy(self):
18 return sortdict(self)
19 return sortdict(self)
19 def __setitem__(self, key, val):
20 def __setitem__(self, key, val):
20 if key in self:
21 if key in self:
21 self._list.remove(key)
22 self._list.remove(key)
22 self._list.append(key)
23 self._list.append(key)
23 dict.__setitem__(self, key, val)
24 dict.__setitem__(self, key, val)
24 def __iter__(self):
25 def __iter__(self):
25 return self._list.__iter__()
26 return self._list.__iter__()
26 def update(self, src):
27 def update(self, src):
27 for k in src:
28 for k in src:
28 self[k] = src[k]
29 self[k] = src[k]
29 def items(self):
30 def items(self):
30 return [(k, self[k]) for k in self._list]
31 return [(k, self[k]) for k in self._list]
31 def __delitem__(self, key):
32 def __delitem__(self, key):
32 dict.__delitem__(self, key)
33 dict.__delitem__(self, key)
33 self._list.remove(key)
34 self._list.remove(key)
34
35
35 class config(object):
36 class config(object):
36 def __init__(self, data=None):
37 def __init__(self, data=None):
37 self._data = {}
38 self._data = {}
38 self._source = {}
39 self._source = {}
39 if data:
40 if data:
40 for k in data._data:
41 for k in data._data:
41 self._data[k] = data[k].copy()
42 self._data[k] = data[k].copy()
42 self._source = data._source.copy()
43 self._source = data._source.copy()
43 def copy(self):
44 def copy(self):
44 return config(self)
45 return config(self)
45 def __contains__(self, section):
46 def __contains__(self, section):
46 return section in self._data
47 return section in self._data
47 def __getitem__(self, section):
48 def __getitem__(self, section):
48 return self._data.get(section, {})
49 return self._data.get(section, {})
49 def __iter__(self):
50 def __iter__(self):
50 for d in self.sections():
51 for d in self.sections():
51 yield d
52 yield d
52 def update(self, src):
53 def update(self, src):
53 for s in src:
54 for s in src:
54 if s not in self:
55 if s not in self:
55 self._data[s] = sortdict()
56 self._data[s] = sortdict()
56 self._data[s].update(src._data[s])
57 self._data[s].update(src._data[s])
57 self._source.update(src._source)
58 self._source.update(src._source)
58 def get(self, section, item, default=None):
59 def get(self, section, item, default=None):
59 return self._data.get(section, {}).get(item, default)
60 return self._data.get(section, {}).get(item, default)
60 def source(self, section, item):
61 def source(self, section, item):
61 return self._source.get((section, item), "")
62 return self._source.get((section, item), "")
62 def sections(self):
63 def sections(self):
63 return sorted(self._data.keys())
64 return sorted(self._data.keys())
64 def items(self, section):
65 def items(self, section):
65 return self._data.get(section, {}).items()
66 return self._data.get(section, {}).items()
66 def set(self, section, item, value, source=""):
67 def set(self, section, item, value, source=""):
67 if section not in self:
68 if section not in self:
68 self._data[section] = sortdict()
69 self._data[section] = sortdict()
69 self._data[section][item] = value
70 self._data[section][item] = value
70 self._source[(section, item)] = source
71 self._source[(section, item)] = source
71
72
72 def parse(self, src, data, sections=None, remap=None, include=None):
73 def parse(self, src, data, sections=None, remap=None, include=None):
73 sectionre = re.compile(r'\[([^\[]+)\]')
74 sectionre = re.compile(r'\[([^\[]+)\]')
74 itemre = re.compile(r'([^=\s][^=]*?)\s*=\s*(.*\S|)')
75 itemre = re.compile(r'([^=\s][^=]*?)\s*=\s*(.*\S|)')
75 contre = re.compile(r'\s+(\S.*\S)')
76 contre = re.compile(r'\s+(\S.*\S)')
76 emptyre = re.compile(r'(;|#|\s*$)')
77 emptyre = re.compile(r'(;|#|\s*$)')
77 unsetre = re.compile(r'%unset\s+(\S+)')
78 unsetre = re.compile(r'%unset\s+(\S+)')
78 includere = re.compile(r'%include\s+(\S.*\S)')
79 includere = re.compile(r'%include\s+(\S.*\S)')
79 section = ""
80 section = ""
80 item = None
81 item = None
81 line = 0
82 line = 0
82 cont = 0
83 cont = 0
83
84
84 for l in data.splitlines(1):
85 for l in data.splitlines(1):
85 line += 1
86 line += 1
86 if cont:
87 if cont:
87 m = contre.match(l)
88 m = contre.match(l)
88 if m:
89 if m:
89 if sections and section not in sections:
90 if sections and section not in sections:
90 continue
91 continue
91 v = self.get(section, item) + "\n" + m.group(1)
92 v = self.get(section, item) + "\n" + m.group(1)
92 self.set(section, item, v, "%s:%d" % (src, line))
93 self.set(section, item, v, "%s:%d" % (src, line))
93 continue
94 continue
94 item = None
95 item = None
95 m = includere.match(l)
96 m = includere.match(l)
96 if m:
97 if m:
97 inc = m.group(1)
98 inc = m.group(1)
98 base = os.path.dirname(src)
99 base = os.path.dirname(src)
99 inc = os.path.normpath(os.path.join(base, inc))
100 inc = os.path.normpath(os.path.join(base, inc))
100 if include:
101 if include:
101 include(inc, remap=remap, sections=sections)
102 include(inc, remap=remap, sections=sections)
102 continue
103 continue
103 if emptyre.match(l):
104 if emptyre.match(l):
104 continue
105 continue
105 m = sectionre.match(l)
106 m = sectionre.match(l)
106 if m:
107 if m:
107 section = m.group(1)
108 section = m.group(1)
108 if remap:
109 if remap:
109 section = remap.get(section, section)
110 section = remap.get(section, section)
110 if section not in self:
111 if section not in self:
111 self._data[section] = sortdict()
112 self._data[section] = sortdict()
112 continue
113 continue
113 m = itemre.match(l)
114 m = itemre.match(l)
114 if m:
115 if m:
115 item = m.group(1)
116 item = m.group(1)
116 cont = 1
117 cont = 1
117 if sections and section not in sections:
118 if sections and section not in sections:
118 continue
119 continue
119 self.set(section, item, m.group(2), "%s:%d" % (src, line))
120 self.set(section, item, m.group(2), "%s:%d" % (src, line))
120 continue
121 continue
121 m = unsetre.match(l)
122 m = unsetre.match(l)
122 if m:
123 if m:
123 name = m.group(1)
124 name = m.group(1)
124 if sections and section not in sections:
125 if sections and section not in sections:
125 continue
126 continue
126 if self.get(section, name) != None:
127 if self.get(section, name) != None:
127 del self._data[section][name]
128 del self._data[section][name]
128 continue
129 continue
129
130
130 raise error.ConfigError(_('config error at %s:%d: \'%s\'')
131 raise error.ConfigError(_('config error at %s:%d: \'%s\'')
131 % (src, line, l.rstrip()))
132 % (src, line, l.rstrip()))
132
133
133 def read(self, path, fp=None, sections=None, remap=None):
134 def read(self, path, fp=None, sections=None, remap=None):
134 if not fp:
135 if not fp:
135 fp = open(path)
136 fp = open(path)
136 self.parse(path, fp.read(), sections, remap, self.read)
137 self.parse(path, fp.read(), sections, remap, self.read)
@@ -1,799 +1,800 b''
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from node import nullid, nullrev, short, hex
8 from node import nullid, nullrev, short, hex
9 from i18n import _
9 from i18n import _
10 import ancestor, bdiff, error, util, os, errno
10 import ancestor, bdiff, error, util
11 import os, errno
11
12
12 propertycache = util.propertycache
13 propertycache = util.propertycache
13
14
14 class changectx(object):
15 class changectx(object):
15 """A changecontext object makes access to data related to a particular
16 """A changecontext object makes access to data related to a particular
16 changeset convenient."""
17 changeset convenient."""
17 def __init__(self, repo, changeid=''):
18 def __init__(self, repo, changeid=''):
18 """changeid is a revision number, node, or tag"""
19 """changeid is a revision number, node, or tag"""
19 if changeid == '':
20 if changeid == '':
20 changeid = '.'
21 changeid = '.'
21 self._repo = repo
22 self._repo = repo
22 if isinstance(changeid, (long, int)):
23 if isinstance(changeid, (long, int)):
23 self._rev = changeid
24 self._rev = changeid
24 self._node = self._repo.changelog.node(changeid)
25 self._node = self._repo.changelog.node(changeid)
25 else:
26 else:
26 self._node = self._repo.lookup(changeid)
27 self._node = self._repo.lookup(changeid)
27 self._rev = self._repo.changelog.rev(self._node)
28 self._rev = self._repo.changelog.rev(self._node)
28
29
29 def __str__(self):
30 def __str__(self):
30 return short(self.node())
31 return short(self.node())
31
32
32 def __int__(self):
33 def __int__(self):
33 return self.rev()
34 return self.rev()
34
35
35 def __repr__(self):
36 def __repr__(self):
36 return "<changectx %s>" % str(self)
37 return "<changectx %s>" % str(self)
37
38
38 def __hash__(self):
39 def __hash__(self):
39 try:
40 try:
40 return hash(self._rev)
41 return hash(self._rev)
41 except AttributeError:
42 except AttributeError:
42 return id(self)
43 return id(self)
43
44
44 def __eq__(self, other):
45 def __eq__(self, other):
45 try:
46 try:
46 return self._rev == other._rev
47 return self._rev == other._rev
47 except AttributeError:
48 except AttributeError:
48 return False
49 return False
49
50
50 def __ne__(self, other):
51 def __ne__(self, other):
51 return not (self == other)
52 return not (self == other)
52
53
53 def __nonzero__(self):
54 def __nonzero__(self):
54 return self._rev != nullrev
55 return self._rev != nullrev
55
56
56 @propertycache
57 @propertycache
57 def _changeset(self):
58 def _changeset(self):
58 return self._repo.changelog.read(self.node())
59 return self._repo.changelog.read(self.node())
59
60
60 @propertycache
61 @propertycache
61 def _manifest(self):
62 def _manifest(self):
62 return self._repo.manifest.read(self._changeset[0])
63 return self._repo.manifest.read(self._changeset[0])
63
64
64 @propertycache
65 @propertycache
65 def _manifestdelta(self):
66 def _manifestdelta(self):
66 return self._repo.manifest.readdelta(self._changeset[0])
67 return self._repo.manifest.readdelta(self._changeset[0])
67
68
68 @propertycache
69 @propertycache
69 def _parents(self):
70 def _parents(self):
70 p = self._repo.changelog.parentrevs(self._rev)
71 p = self._repo.changelog.parentrevs(self._rev)
71 if p[1] == nullrev:
72 if p[1] == nullrev:
72 p = p[:-1]
73 p = p[:-1]
73 return [changectx(self._repo, x) for x in p]
74 return [changectx(self._repo, x) for x in p]
74
75
75 def __contains__(self, key):
76 def __contains__(self, key):
76 return key in self._manifest
77 return key in self._manifest
77
78
78 def __getitem__(self, key):
79 def __getitem__(self, key):
79 return self.filectx(key)
80 return self.filectx(key)
80
81
81 def __iter__(self):
82 def __iter__(self):
82 for f in sorted(self._manifest):
83 for f in sorted(self._manifest):
83 yield f
84 yield f
84
85
85 def changeset(self): return self._changeset
86 def changeset(self): return self._changeset
86 def manifest(self): return self._manifest
87 def manifest(self): return self._manifest
87
88
88 def rev(self): return self._rev
89 def rev(self): return self._rev
89 def node(self): return self._node
90 def node(self): return self._node
90 def hex(self): return hex(self._node)
91 def hex(self): return hex(self._node)
91 def user(self): return self._changeset[1]
92 def user(self): return self._changeset[1]
92 def date(self): return self._changeset[2]
93 def date(self): return self._changeset[2]
93 def files(self): return self._changeset[3]
94 def files(self): return self._changeset[3]
94 def description(self): return self._changeset[4]
95 def description(self): return self._changeset[4]
95 def branch(self): return self._changeset[5].get("branch")
96 def branch(self): return self._changeset[5].get("branch")
96 def extra(self): return self._changeset[5]
97 def extra(self): return self._changeset[5]
97 def tags(self): return self._repo.nodetags(self._node)
98 def tags(self): return self._repo.nodetags(self._node)
98
99
99 def parents(self):
100 def parents(self):
100 """return contexts for each parent changeset"""
101 """return contexts for each parent changeset"""
101 return self._parents
102 return self._parents
102
103
103 def children(self):
104 def children(self):
104 """return contexts for each child changeset"""
105 """return contexts for each child changeset"""
105 c = self._repo.changelog.children(self._node)
106 c = self._repo.changelog.children(self._node)
106 return [changectx(self._repo, x) for x in c]
107 return [changectx(self._repo, x) for x in c]
107
108
108 def ancestors(self):
109 def ancestors(self):
109 for a in self._repo.changelog.ancestors(self._rev):
110 for a in self._repo.changelog.ancestors(self._rev):
110 yield changectx(self._repo, a)
111 yield changectx(self._repo, a)
111
112
112 def descendants(self):
113 def descendants(self):
113 for d in self._repo.changelog.descendants(self._rev):
114 for d in self._repo.changelog.descendants(self._rev):
114 yield changectx(self._repo, d)
115 yield changectx(self._repo, d)
115
116
116 def _fileinfo(self, path):
117 def _fileinfo(self, path):
117 if '_manifest' in self.__dict__:
118 if '_manifest' in self.__dict__:
118 try:
119 try:
119 return self._manifest[path], self._manifest.flags(path)
120 return self._manifest[path], self._manifest.flags(path)
120 except KeyError:
121 except KeyError:
121 raise error.LookupError(self._node, path,
122 raise error.LookupError(self._node, path,
122 _('not found in manifest'))
123 _('not found in manifest'))
123 if '_manifestdelta' in self.__dict__ or path in self.files():
124 if '_manifestdelta' in self.__dict__ or path in self.files():
124 if path in self._manifestdelta:
125 if path in self._manifestdelta:
125 return self._manifestdelta[path], self._manifestdelta.flags(path)
126 return self._manifestdelta[path], self._manifestdelta.flags(path)
126 node, flag = self._repo.manifest.find(self._changeset[0], path)
127 node, flag = self._repo.manifest.find(self._changeset[0], path)
127 if not node:
128 if not node:
128 raise error.LookupError(self._node, path,
129 raise error.LookupError(self._node, path,
129 _('not found in manifest'))
130 _('not found in manifest'))
130
131
131 return node, flag
132 return node, flag
132
133
133 def filenode(self, path):
134 def filenode(self, path):
134 return self._fileinfo(path)[0]
135 return self._fileinfo(path)[0]
135
136
136 def flags(self, path):
137 def flags(self, path):
137 try:
138 try:
138 return self._fileinfo(path)[1]
139 return self._fileinfo(path)[1]
139 except error.LookupError:
140 except error.LookupError:
140 return ''
141 return ''
141
142
142 def filectx(self, path, fileid=None, filelog=None):
143 def filectx(self, path, fileid=None, filelog=None):
143 """get a file context from this changeset"""
144 """get a file context from this changeset"""
144 if fileid is None:
145 if fileid is None:
145 fileid = self.filenode(path)
146 fileid = self.filenode(path)
146 return filectx(self._repo, path, fileid=fileid,
147 return filectx(self._repo, path, fileid=fileid,
147 changectx=self, filelog=filelog)
148 changectx=self, filelog=filelog)
148
149
149 def ancestor(self, c2):
150 def ancestor(self, c2):
150 """
151 """
151 return the ancestor context of self and c2
152 return the ancestor context of self and c2
152 """
153 """
153 n = self._repo.changelog.ancestor(self._node, c2._node)
154 n = self._repo.changelog.ancestor(self._node, c2._node)
154 return changectx(self._repo, n)
155 return changectx(self._repo, n)
155
156
156 def walk(self, match):
157 def walk(self, match):
157 fdict = dict.fromkeys(match.files())
158 fdict = dict.fromkeys(match.files())
158 # for dirstate.walk, files=['.'] means "walk the whole tree".
159 # for dirstate.walk, files=['.'] means "walk the whole tree".
159 # follow that here, too
160 # follow that here, too
160 fdict.pop('.', None)
161 fdict.pop('.', None)
161 for fn in self:
162 for fn in self:
162 for ffn in fdict:
163 for ffn in fdict:
163 # match if the file is the exact name or a directory
164 # match if the file is the exact name or a directory
164 if ffn == fn or fn.startswith("%s/" % ffn):
165 if ffn == fn or fn.startswith("%s/" % ffn):
165 del fdict[ffn]
166 del fdict[ffn]
166 break
167 break
167 if match(fn):
168 if match(fn):
168 yield fn
169 yield fn
169 for fn in sorted(fdict):
170 for fn in sorted(fdict):
170 if match.bad(fn, 'No such file in rev ' + str(self)) and match(fn):
171 if match.bad(fn, 'No such file in rev ' + str(self)) and match(fn):
171 yield fn
172 yield fn
172
173
173 class filectx(object):
174 class filectx(object):
174 """A filecontext object makes access to data related to a particular
175 """A filecontext object makes access to data related to a particular
175 filerevision convenient."""
176 filerevision convenient."""
176 def __init__(self, repo, path, changeid=None, fileid=None,
177 def __init__(self, repo, path, changeid=None, fileid=None,
177 filelog=None, changectx=None):
178 filelog=None, changectx=None):
178 """changeid can be a changeset revision, node, or tag.
179 """changeid can be a changeset revision, node, or tag.
179 fileid can be a file revision or node."""
180 fileid can be a file revision or node."""
180 self._repo = repo
181 self._repo = repo
181 self._path = path
182 self._path = path
182
183
183 assert (changeid is not None
184 assert (changeid is not None
184 or fileid is not None
185 or fileid is not None
185 or changectx is not None)
186 or changectx is not None)
186
187
187 if filelog:
188 if filelog:
188 self._filelog = filelog
189 self._filelog = filelog
189
190
190 if changeid is not None:
191 if changeid is not None:
191 self._changeid = changeid
192 self._changeid = changeid
192 if changectx is not None:
193 if changectx is not None:
193 self._changectx = changectx
194 self._changectx = changectx
194 if fileid is not None:
195 if fileid is not None:
195 self._fileid = fileid
196 self._fileid = fileid
196
197
197 @propertycache
198 @propertycache
198 def _changectx(self):
199 def _changectx(self):
199 return changectx(self._repo, self._changeid)
200 return changectx(self._repo, self._changeid)
200
201
201 @propertycache
202 @propertycache
202 def _filelog(self):
203 def _filelog(self):
203 return self._repo.file(self._path)
204 return self._repo.file(self._path)
204
205
205 @propertycache
206 @propertycache
206 def _changeid(self):
207 def _changeid(self):
207 if '_changectx' in self.__dict__:
208 if '_changectx' in self.__dict__:
208 return self._changectx.rev()
209 return self._changectx.rev()
209 else:
210 else:
210 return self._filelog.linkrev(self._filerev)
211 return self._filelog.linkrev(self._filerev)
211
212
212 @propertycache
213 @propertycache
213 def _filenode(self):
214 def _filenode(self):
214 if '_fileid' in self.__dict__:
215 if '_fileid' in self.__dict__:
215 return self._filelog.lookup(self._fileid)
216 return self._filelog.lookup(self._fileid)
216 else:
217 else:
217 return self._changectx.filenode(self._path)
218 return self._changectx.filenode(self._path)
218
219
219 @propertycache
220 @propertycache
220 def _filerev(self):
221 def _filerev(self):
221 return self._filelog.rev(self._filenode)
222 return self._filelog.rev(self._filenode)
222
223
223 @propertycache
224 @propertycache
224 def _repopath(self):
225 def _repopath(self):
225 return self._path
226 return self._path
226
227
227 def __nonzero__(self):
228 def __nonzero__(self):
228 try:
229 try:
229 self._filenode
230 self._filenode
230 return True
231 return True
231 except error.LookupError:
232 except error.LookupError:
232 # file is missing
233 # file is missing
233 return False
234 return False
234
235
235 def __str__(self):
236 def __str__(self):
236 return "%s@%s" % (self.path(), short(self.node()))
237 return "%s@%s" % (self.path(), short(self.node()))
237
238
238 def __repr__(self):
239 def __repr__(self):
239 return "<filectx %s>" % str(self)
240 return "<filectx %s>" % str(self)
240
241
241 def __hash__(self):
242 def __hash__(self):
242 try:
243 try:
243 return hash((self._path, self._fileid))
244 return hash((self._path, self._fileid))
244 except AttributeError:
245 except AttributeError:
245 return id(self)
246 return id(self)
246
247
247 def __eq__(self, other):
248 def __eq__(self, other):
248 try:
249 try:
249 return (self._path == other._path
250 return (self._path == other._path
250 and self._fileid == other._fileid)
251 and self._fileid == other._fileid)
251 except AttributeError:
252 except AttributeError:
252 return False
253 return False
253
254
254 def __ne__(self, other):
255 def __ne__(self, other):
255 return not (self == other)
256 return not (self == other)
256
257
257 def filectx(self, fileid):
258 def filectx(self, fileid):
258 '''opens an arbitrary revision of the file without
259 '''opens an arbitrary revision of the file without
259 opening a new filelog'''
260 opening a new filelog'''
260 return filectx(self._repo, self._path, fileid=fileid,
261 return filectx(self._repo, self._path, fileid=fileid,
261 filelog=self._filelog)
262 filelog=self._filelog)
262
263
263 def filerev(self): return self._filerev
264 def filerev(self): return self._filerev
264 def filenode(self): return self._filenode
265 def filenode(self): return self._filenode
265 def flags(self): return self._changectx.flags(self._path)
266 def flags(self): return self._changectx.flags(self._path)
266 def filelog(self): return self._filelog
267 def filelog(self): return self._filelog
267
268
268 def rev(self):
269 def rev(self):
269 if '_changectx' in self.__dict__:
270 if '_changectx' in self.__dict__:
270 return self._changectx.rev()
271 return self._changectx.rev()
271 if '_changeid' in self.__dict__:
272 if '_changeid' in self.__dict__:
272 return self._changectx.rev()
273 return self._changectx.rev()
273 return self._filelog.linkrev(self._filerev)
274 return self._filelog.linkrev(self._filerev)
274
275
275 def linkrev(self): return self._filelog.linkrev(self._filerev)
276 def linkrev(self): return self._filelog.linkrev(self._filerev)
276 def node(self): return self._changectx.node()
277 def node(self): return self._changectx.node()
277 def user(self): return self._changectx.user()
278 def user(self): return self._changectx.user()
278 def date(self): return self._changectx.date()
279 def date(self): return self._changectx.date()
279 def files(self): return self._changectx.files()
280 def files(self): return self._changectx.files()
280 def description(self): return self._changectx.description()
281 def description(self): return self._changectx.description()
281 def branch(self): return self._changectx.branch()
282 def branch(self): return self._changectx.branch()
282 def manifest(self): return self._changectx.manifest()
283 def manifest(self): return self._changectx.manifest()
283 def changectx(self): return self._changectx
284 def changectx(self): return self._changectx
284
285
285 def data(self): return self._filelog.read(self._filenode)
286 def data(self): return self._filelog.read(self._filenode)
286 def path(self): return self._path
287 def path(self): return self._path
287 def size(self): return self._filelog.size(self._filerev)
288 def size(self): return self._filelog.size(self._filerev)
288
289
289 def cmp(self, text): return self._filelog.cmp(self._filenode, text)
290 def cmp(self, text): return self._filelog.cmp(self._filenode, text)
290
291
291 def renamed(self):
292 def renamed(self):
292 """check if file was actually renamed in this changeset revision
293 """check if file was actually renamed in this changeset revision
293
294
294 If rename logged in file revision, we report copy for changeset only
295 If rename logged in file revision, we report copy for changeset only
295 if file revisions linkrev points back to the changeset in question
296 if file revisions linkrev points back to the changeset in question
296 or both changeset parents contain different file revisions.
297 or both changeset parents contain different file revisions.
297 """
298 """
298
299
299 renamed = self._filelog.renamed(self._filenode)
300 renamed = self._filelog.renamed(self._filenode)
300 if not renamed:
301 if not renamed:
301 return renamed
302 return renamed
302
303
303 if self.rev() == self.linkrev():
304 if self.rev() == self.linkrev():
304 return renamed
305 return renamed
305
306
306 name = self.path()
307 name = self.path()
307 fnode = self._filenode
308 fnode = self._filenode
308 for p in self._changectx.parents():
309 for p in self._changectx.parents():
309 try:
310 try:
310 if fnode == p.filenode(name):
311 if fnode == p.filenode(name):
311 return None
312 return None
312 except error.LookupError:
313 except error.LookupError:
313 pass
314 pass
314 return renamed
315 return renamed
315
316
316 def parents(self):
317 def parents(self):
317 p = self._path
318 p = self._path
318 fl = self._filelog
319 fl = self._filelog
319 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
320 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
320
321
321 r = self._filelog.renamed(self._filenode)
322 r = self._filelog.renamed(self._filenode)
322 if r:
323 if r:
323 pl[0] = (r[0], r[1], None)
324 pl[0] = (r[0], r[1], None)
324
325
325 return [filectx(self._repo, p, fileid=n, filelog=l)
326 return [filectx(self._repo, p, fileid=n, filelog=l)
326 for p,n,l in pl if n != nullid]
327 for p,n,l in pl if n != nullid]
327
328
328 def children(self):
329 def children(self):
329 # hard for renames
330 # hard for renames
330 c = self._filelog.children(self._filenode)
331 c = self._filelog.children(self._filenode)
331 return [filectx(self._repo, self._path, fileid=x,
332 return [filectx(self._repo, self._path, fileid=x,
332 filelog=self._filelog) for x in c]
333 filelog=self._filelog) for x in c]
333
334
334 def annotate(self, follow=False, linenumber=None):
335 def annotate(self, follow=False, linenumber=None):
335 '''returns a list of tuples of (ctx, line) for each line
336 '''returns a list of tuples of (ctx, line) for each line
336 in the file, where ctx is the filectx of the node where
337 in the file, where ctx is the filectx of the node where
337 that line was last changed.
338 that line was last changed.
338 This returns tuples of ((ctx, linenumber), line) for each line,
339 This returns tuples of ((ctx, linenumber), line) for each line,
339 if "linenumber" parameter is NOT "None".
340 if "linenumber" parameter is NOT "None".
340 In such tuples, linenumber means one at the first appearance
341 In such tuples, linenumber means one at the first appearance
341 in the managed file.
342 in the managed file.
342 To reduce annotation cost,
343 To reduce annotation cost,
343 this returns fixed value(False is used) as linenumber,
344 this returns fixed value(False is used) as linenumber,
344 if "linenumber" parameter is "False".'''
345 if "linenumber" parameter is "False".'''
345
346
346 def decorate_compat(text, rev):
347 def decorate_compat(text, rev):
347 return ([rev] * len(text.splitlines()), text)
348 return ([rev] * len(text.splitlines()), text)
348
349
349 def without_linenumber(text, rev):
350 def without_linenumber(text, rev):
350 return ([(rev, False)] * len(text.splitlines()), text)
351 return ([(rev, False)] * len(text.splitlines()), text)
351
352
352 def with_linenumber(text, rev):
353 def with_linenumber(text, rev):
353 size = len(text.splitlines())
354 size = len(text.splitlines())
354 return ([(rev, i) for i in xrange(1, size + 1)], text)
355 return ([(rev, i) for i in xrange(1, size + 1)], text)
355
356
356 decorate = (((linenumber is None) and decorate_compat) or
357 decorate = (((linenumber is None) and decorate_compat) or
357 (linenumber and with_linenumber) or
358 (linenumber and with_linenumber) or
358 without_linenumber)
359 without_linenumber)
359
360
360 def pair(parent, child):
361 def pair(parent, child):
361 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
362 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
362 child[0][b1:b2] = parent[0][a1:a2]
363 child[0][b1:b2] = parent[0][a1:a2]
363 return child
364 return child
364
365
365 getlog = util.cachefunc(lambda x: self._repo.file(x))
366 getlog = util.cachefunc(lambda x: self._repo.file(x))
366 def getctx(path, fileid):
367 def getctx(path, fileid):
367 log = path == self._path and self._filelog or getlog(path)
368 log = path == self._path and self._filelog or getlog(path)
368 return filectx(self._repo, path, fileid=fileid, filelog=log)
369 return filectx(self._repo, path, fileid=fileid, filelog=log)
369 getctx = util.cachefunc(getctx)
370 getctx = util.cachefunc(getctx)
370
371
371 def parents(f):
372 def parents(f):
372 # we want to reuse filectx objects as much as possible
373 # we want to reuse filectx objects as much as possible
373 p = f._path
374 p = f._path
374 if f._filerev is None: # working dir
375 if f._filerev is None: # working dir
375 pl = [(n.path(), n.filerev()) for n in f.parents()]
376 pl = [(n.path(), n.filerev()) for n in f.parents()]
376 else:
377 else:
377 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
378 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
378
379
379 if follow:
380 if follow:
380 r = f.renamed()
381 r = f.renamed()
381 if r:
382 if r:
382 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
383 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
383
384
384 return [getctx(p, n) for p, n in pl if n != nullrev]
385 return [getctx(p, n) for p, n in pl if n != nullrev]
385
386
386 # use linkrev to find the first changeset where self appeared
387 # use linkrev to find the first changeset where self appeared
387 if self.rev() != self.linkrev():
388 if self.rev() != self.linkrev():
388 base = self.filectx(self.filerev())
389 base = self.filectx(self.filerev())
389 else:
390 else:
390 base = self
391 base = self
391
392
392 # find all ancestors
393 # find all ancestors
393 needed = {base: 1}
394 needed = {base: 1}
394 visit = [base]
395 visit = [base]
395 files = [base._path]
396 files = [base._path]
396 while visit:
397 while visit:
397 f = visit.pop(0)
398 f = visit.pop(0)
398 for p in parents(f):
399 for p in parents(f):
399 if p not in needed:
400 if p not in needed:
400 needed[p] = 1
401 needed[p] = 1
401 visit.append(p)
402 visit.append(p)
402 if p._path not in files:
403 if p._path not in files:
403 files.append(p._path)
404 files.append(p._path)
404 else:
405 else:
405 # count how many times we'll use this
406 # count how many times we'll use this
406 needed[p] += 1
407 needed[p] += 1
407
408
408 # sort by revision (per file) which is a topological order
409 # sort by revision (per file) which is a topological order
409 visit = []
410 visit = []
410 for f in files:
411 for f in files:
411 fn = [(n.rev(), n) for n in needed if n._path == f]
412 fn = [(n.rev(), n) for n in needed if n._path == f]
412 visit.extend(fn)
413 visit.extend(fn)
413
414
414 hist = {}
415 hist = {}
415 for r, f in sorted(visit):
416 for r, f in sorted(visit):
416 curr = decorate(f.data(), f)
417 curr = decorate(f.data(), f)
417 for p in parents(f):
418 for p in parents(f):
418 if p != nullid:
419 if p != nullid:
419 curr = pair(hist[p], curr)
420 curr = pair(hist[p], curr)
420 # trim the history of unneeded revs
421 # trim the history of unneeded revs
421 needed[p] -= 1
422 needed[p] -= 1
422 if not needed[p]:
423 if not needed[p]:
423 del hist[p]
424 del hist[p]
424 hist[f] = curr
425 hist[f] = curr
425
426
426 return zip(hist[f][0], hist[f][1].splitlines(1))
427 return zip(hist[f][0], hist[f][1].splitlines(1))
427
428
428 def ancestor(self, fc2):
429 def ancestor(self, fc2):
429 """
430 """
430 find the common ancestor file context, if any, of self, and fc2
431 find the common ancestor file context, if any, of self, and fc2
431 """
432 """
432
433
433 acache = {}
434 acache = {}
434
435
435 # prime the ancestor cache for the working directory
436 # prime the ancestor cache for the working directory
436 for c in (self, fc2):
437 for c in (self, fc2):
437 if c._filerev == None:
438 if c._filerev == None:
438 pl = [(n.path(), n.filenode()) for n in c.parents()]
439 pl = [(n.path(), n.filenode()) for n in c.parents()]
439 acache[(c._path, None)] = pl
440 acache[(c._path, None)] = pl
440
441
441 flcache = {self._repopath:self._filelog, fc2._repopath:fc2._filelog}
442 flcache = {self._repopath:self._filelog, fc2._repopath:fc2._filelog}
442 def parents(vertex):
443 def parents(vertex):
443 if vertex in acache:
444 if vertex in acache:
444 return acache[vertex]
445 return acache[vertex]
445 f, n = vertex
446 f, n = vertex
446 if f not in flcache:
447 if f not in flcache:
447 flcache[f] = self._repo.file(f)
448 flcache[f] = self._repo.file(f)
448 fl = flcache[f]
449 fl = flcache[f]
449 pl = [(f, p) for p in fl.parents(n) if p != nullid]
450 pl = [(f, p) for p in fl.parents(n) if p != nullid]
450 re = fl.renamed(n)
451 re = fl.renamed(n)
451 if re:
452 if re:
452 pl.append(re)
453 pl.append(re)
453 acache[vertex] = pl
454 acache[vertex] = pl
454 return pl
455 return pl
455
456
456 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
457 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
457 v = ancestor.ancestor(a, b, parents)
458 v = ancestor.ancestor(a, b, parents)
458 if v:
459 if v:
459 f, n = v
460 f, n = v
460 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
461 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
461
462
462 return None
463 return None
463
464
464 class workingctx(changectx):
465 class workingctx(changectx):
465 """A workingctx object makes access to data related to
466 """A workingctx object makes access to data related to
466 the current working directory convenient.
467 the current working directory convenient.
467 parents - a pair of parent nodeids, or None to use the dirstate.
468 parents - a pair of parent nodeids, or None to use the dirstate.
468 date - any valid date string or (unixtime, offset), or None.
469 date - any valid date string or (unixtime, offset), or None.
469 user - username string, or None.
470 user - username string, or None.
470 extra - a dictionary of extra values, or None.
471 extra - a dictionary of extra values, or None.
471 changes - a list of file lists as returned by localrepo.status()
472 changes - a list of file lists as returned by localrepo.status()
472 or None to use the repository status.
473 or None to use the repository status.
473 """
474 """
474 def __init__(self, repo, parents=None, text="", user=None, date=None,
475 def __init__(self, repo, parents=None, text="", user=None, date=None,
475 extra=None, changes=None):
476 extra=None, changes=None):
476 self._repo = repo
477 self._repo = repo
477 self._rev = None
478 self._rev = None
478 self._node = None
479 self._node = None
479 self._text = text
480 self._text = text
480 if date:
481 if date:
481 self._date = util.parsedate(date)
482 self._date = util.parsedate(date)
482 if user:
483 if user:
483 self._user = user
484 self._user = user
484 if parents:
485 if parents:
485 self._parents = [changectx(self._repo, p) for p in parents]
486 self._parents = [changectx(self._repo, p) for p in parents]
486 if changes:
487 if changes:
487 self._status = list(changes)
488 self._status = list(changes)
488
489
489 self._extra = {}
490 self._extra = {}
490 if extra:
491 if extra:
491 self._extra = extra.copy()
492 self._extra = extra.copy()
492 if 'branch' not in self._extra:
493 if 'branch' not in self._extra:
493 branch = self._repo.dirstate.branch()
494 branch = self._repo.dirstate.branch()
494 try:
495 try:
495 branch = branch.decode('UTF-8').encode('UTF-8')
496 branch = branch.decode('UTF-8').encode('UTF-8')
496 except UnicodeDecodeError:
497 except UnicodeDecodeError:
497 raise util.Abort(_('branch name not in UTF-8!'))
498 raise util.Abort(_('branch name not in UTF-8!'))
498 self._extra['branch'] = branch
499 self._extra['branch'] = branch
499 if self._extra['branch'] == '':
500 if self._extra['branch'] == '':
500 self._extra['branch'] = 'default'
501 self._extra['branch'] = 'default'
501
502
502 def __str__(self):
503 def __str__(self):
503 return str(self._parents[0]) + "+"
504 return str(self._parents[0]) + "+"
504
505
505 def __nonzero__(self):
506 def __nonzero__(self):
506 return True
507 return True
507
508
508 def __contains__(self, key):
509 def __contains__(self, key):
509 return self._repo.dirstate[key] not in "?r"
510 return self._repo.dirstate[key] not in "?r"
510
511
511 @propertycache
512 @propertycache
512 def _manifest(self):
513 def _manifest(self):
513 """generate a manifest corresponding to the working directory"""
514 """generate a manifest corresponding to the working directory"""
514
515
515 man = self._parents[0].manifest().copy()
516 man = self._parents[0].manifest().copy()
516 copied = self._repo.dirstate.copies()
517 copied = self._repo.dirstate.copies()
517 cf = lambda x: man.flags(copied.get(x, x))
518 cf = lambda x: man.flags(copied.get(x, x))
518 ff = self._repo.dirstate.flagfunc(cf)
519 ff = self._repo.dirstate.flagfunc(cf)
519 modified, added, removed, deleted, unknown = self._status[:5]
520 modified, added, removed, deleted, unknown = self._status[:5]
520 for i, l in (("a", added), ("m", modified), ("u", unknown)):
521 for i, l in (("a", added), ("m", modified), ("u", unknown)):
521 for f in l:
522 for f in l:
522 man[f] = man.get(copied.get(f, f), nullid) + i
523 man[f] = man.get(copied.get(f, f), nullid) + i
523 try:
524 try:
524 man.set(f, ff(f))
525 man.set(f, ff(f))
525 except OSError:
526 except OSError:
526 pass
527 pass
527
528
528 for f in deleted + removed:
529 for f in deleted + removed:
529 if f in man:
530 if f in man:
530 del man[f]
531 del man[f]
531
532
532 return man
533 return man
533
534
534 @propertycache
535 @propertycache
535 def _status(self):
536 def _status(self):
536 return self._repo.status(unknown=True)
537 return self._repo.status(unknown=True)
537
538
538 @propertycache
539 @propertycache
539 def _user(self):
540 def _user(self):
540 return self._repo.ui.username()
541 return self._repo.ui.username()
541
542
542 @propertycache
543 @propertycache
543 def _date(self):
544 def _date(self):
544 return util.makedate()
545 return util.makedate()
545
546
546 @propertycache
547 @propertycache
547 def _parents(self):
548 def _parents(self):
548 p = self._repo.dirstate.parents()
549 p = self._repo.dirstate.parents()
549 if p[1] == nullid:
550 if p[1] == nullid:
550 p = p[:-1]
551 p = p[:-1]
551 self._parents = [changectx(self._repo, x) for x in p]
552 self._parents = [changectx(self._repo, x) for x in p]
552 return self._parents
553 return self._parents
553
554
554 def manifest(self): return self._manifest
555 def manifest(self): return self._manifest
555
556
556 def user(self): return self._user or self._repo.ui.username()
557 def user(self): return self._user or self._repo.ui.username()
557 def date(self): return self._date
558 def date(self): return self._date
558 def description(self): return self._text
559 def description(self): return self._text
559 def files(self):
560 def files(self):
560 return sorted(self._status[0] + self._status[1] + self._status[2])
561 return sorted(self._status[0] + self._status[1] + self._status[2])
561
562
562 def modified(self): return self._status[0]
563 def modified(self): return self._status[0]
563 def added(self): return self._status[1]
564 def added(self): return self._status[1]
564 def removed(self): return self._status[2]
565 def removed(self): return self._status[2]
565 def deleted(self): return self._status[3]
566 def deleted(self): return self._status[3]
566 def unknown(self): return self._status[4]
567 def unknown(self): return self._status[4]
567 def clean(self): return self._status[5]
568 def clean(self): return self._status[5]
568 def branch(self): return self._extra['branch']
569 def branch(self): return self._extra['branch']
569 def extra(self): return self._extra
570 def extra(self): return self._extra
570
571
571 def tags(self):
572 def tags(self):
572 t = []
573 t = []
573 [t.extend(p.tags()) for p in self.parents()]
574 [t.extend(p.tags()) for p in self.parents()]
574 return t
575 return t
575
576
576 def children(self):
577 def children(self):
577 return []
578 return []
578
579
579 def flags(self, path):
580 def flags(self, path):
580 if '_manifest' in self.__dict__:
581 if '_manifest' in self.__dict__:
581 try:
582 try:
582 return self._manifest.flags(path)
583 return self._manifest.flags(path)
583 except KeyError:
584 except KeyError:
584 return ''
585 return ''
585
586
586 pnode = self._parents[0].changeset()[0]
587 pnode = self._parents[0].changeset()[0]
587 orig = self._repo.dirstate.copies().get(path, path)
588 orig = self._repo.dirstate.copies().get(path, path)
588 node, flag = self._repo.manifest.find(pnode, orig)
589 node, flag = self._repo.manifest.find(pnode, orig)
589 try:
590 try:
590 ff = self._repo.dirstate.flagfunc(lambda x: flag or '')
591 ff = self._repo.dirstate.flagfunc(lambda x: flag or '')
591 return ff(path)
592 return ff(path)
592 except OSError:
593 except OSError:
593 pass
594 pass
594
595
595 if not node or path in self.deleted() or path in self.removed():
596 if not node or path in self.deleted() or path in self.removed():
596 return ''
597 return ''
597 return flag
598 return flag
598
599
599 def filectx(self, path, filelog=None):
600 def filectx(self, path, filelog=None):
600 """get a file context from the working directory"""
601 """get a file context from the working directory"""
601 return workingfilectx(self._repo, path, workingctx=self,
602 return workingfilectx(self._repo, path, workingctx=self,
602 filelog=filelog)
603 filelog=filelog)
603
604
604 def ancestor(self, c2):
605 def ancestor(self, c2):
605 """return the ancestor context of self and c2"""
606 """return the ancestor context of self and c2"""
606 return self._parents[0].ancestor(c2) # punt on two parents for now
607 return self._parents[0].ancestor(c2) # punt on two parents for now
607
608
608 def walk(self, match):
609 def walk(self, match):
609 return sorted(self._repo.dirstate.walk(match, True, False))
610 return sorted(self._repo.dirstate.walk(match, True, False))
610
611
611 class workingfilectx(filectx):
612 class workingfilectx(filectx):
612 """A workingfilectx object makes access to data related to a particular
613 """A workingfilectx object makes access to data related to a particular
613 file in the working directory convenient."""
614 file in the working directory convenient."""
614 def __init__(self, repo, path, filelog=None, workingctx=None):
615 def __init__(self, repo, path, filelog=None, workingctx=None):
615 """changeid can be a changeset revision, node, or tag.
616 """changeid can be a changeset revision, node, or tag.
616 fileid can be a file revision or node."""
617 fileid can be a file revision or node."""
617 self._repo = repo
618 self._repo = repo
618 self._path = path
619 self._path = path
619 self._changeid = None
620 self._changeid = None
620 self._filerev = self._filenode = None
621 self._filerev = self._filenode = None
621
622
622 if filelog:
623 if filelog:
623 self._filelog = filelog
624 self._filelog = filelog
624 if workingctx:
625 if workingctx:
625 self._changectx = workingctx
626 self._changectx = workingctx
626
627
627 @propertycache
628 @propertycache
628 def _changectx(self):
629 def _changectx(self):
629 return workingctx(self._repo)
630 return workingctx(self._repo)
630
631
631 @propertycache
632 @propertycache
632 def _repopath(self):
633 def _repopath(self):
633 return self._repo.dirstate.copied(self._path) or self._path
634 return self._repo.dirstate.copied(self._path) or self._path
634
635
635 @propertycache
636 @propertycache
636 def _filelog(self):
637 def _filelog(self):
637 return self._repo.file(self._repopath)
638 return self._repo.file(self._repopath)
638
639
639 def __nonzero__(self):
640 def __nonzero__(self):
640 return True
641 return True
641
642
642 def __str__(self):
643 def __str__(self):
643 return "%s@%s" % (self.path(), self._changectx)
644 return "%s@%s" % (self.path(), self._changectx)
644
645
645 def filectx(self, fileid):
646 def filectx(self, fileid):
646 '''opens an arbitrary revision of the file without
647 '''opens an arbitrary revision of the file without
647 opening a new filelog'''
648 opening a new filelog'''
648 return filectx(self._repo, self._repopath, fileid=fileid,
649 return filectx(self._repo, self._repopath, fileid=fileid,
649 filelog=self._filelog)
650 filelog=self._filelog)
650
651
651 def rev(self):
652 def rev(self):
652 if '_changectx' in self.__dict__:
653 if '_changectx' in self.__dict__:
653 return self._changectx.rev()
654 return self._changectx.rev()
654 return self._filelog.linkrev(self._filerev)
655 return self._filelog.linkrev(self._filerev)
655
656
656 def data(self): return self._repo.wread(self._path)
657 def data(self): return self._repo.wread(self._path)
657 def renamed(self):
658 def renamed(self):
658 rp = self._repopath
659 rp = self._repopath
659 if rp == self._path:
660 if rp == self._path:
660 return None
661 return None
661 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
662 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
662
663
663 def parents(self):
664 def parents(self):
664 '''return parent filectxs, following copies if necessary'''
665 '''return parent filectxs, following copies if necessary'''
665 p = self._path
666 p = self._path
666 rp = self._repopath
667 rp = self._repopath
667 pcl = self._changectx._parents
668 pcl = self._changectx._parents
668 fl = self._filelog
669 fl = self._filelog
669 pl = [(rp, pcl[0]._manifest.get(rp, nullid), fl)]
670 pl = [(rp, pcl[0]._manifest.get(rp, nullid), fl)]
670 if len(pcl) > 1:
671 if len(pcl) > 1:
671 if rp != p:
672 if rp != p:
672 fl = None
673 fl = None
673 pl.append((p, pcl[1]._manifest.get(p, nullid), fl))
674 pl.append((p, pcl[1]._manifest.get(p, nullid), fl))
674
675
675 return [filectx(self._repo, p, fileid=n, filelog=l)
676 return [filectx(self._repo, p, fileid=n, filelog=l)
676 for p,n,l in pl if n != nullid]
677 for p,n,l in pl if n != nullid]
677
678
678 def children(self):
679 def children(self):
679 return []
680 return []
680
681
681 def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
682 def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
682 def date(self):
683 def date(self):
683 t, tz = self._changectx.date()
684 t, tz = self._changectx.date()
684 try:
685 try:
685 return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz)
686 return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz)
686 except OSError, err:
687 except OSError, err:
687 if err.errno != errno.ENOENT: raise
688 if err.errno != errno.ENOENT: raise
688 return (t, tz)
689 return (t, tz)
689
690
690 def cmp(self, text): return self._repo.wread(self._path) == text
691 def cmp(self, text): return self._repo.wread(self._path) == text
691
692
692 class memctx(object):
693 class memctx(object):
693 """Use memctx to perform in-memory commits via localrepo.commitctx().
694 """Use memctx to perform in-memory commits via localrepo.commitctx().
694
695
695 Revision information is supplied at initialization time while
696 Revision information is supplied at initialization time while
696 related files data and is made available through a callback
697 related files data and is made available through a callback
697 mechanism. 'repo' is the current localrepo, 'parents' is a
698 mechanism. 'repo' is the current localrepo, 'parents' is a
698 sequence of two parent revisions identifiers (pass None for every
699 sequence of two parent revisions identifiers (pass None for every
699 missing parent), 'text' is the commit message and 'files' lists
700 missing parent), 'text' is the commit message and 'files' lists
700 names of files touched by the revision (normalized and relative to
701 names of files touched by the revision (normalized and relative to
701 repository root).
702 repository root).
702
703
703 filectxfn(repo, memctx, path) is a callable receiving the
704 filectxfn(repo, memctx, path) is a callable receiving the
704 repository, the current memctx object and the normalized path of
705 repository, the current memctx object and the normalized path of
705 requested file, relative to repository root. It is fired by the
706 requested file, relative to repository root. It is fired by the
706 commit function for every file in 'files', but calls order is
707 commit function for every file in 'files', but calls order is
707 undefined. If the file is available in the revision being
708 undefined. If the file is available in the revision being
708 committed (updated or added), filectxfn returns a memfilectx
709 committed (updated or added), filectxfn returns a memfilectx
709 object. If the file was removed, filectxfn raises an
710 object. If the file was removed, filectxfn raises an
710 IOError. Moved files are represented by marking the source file
711 IOError. Moved files are represented by marking the source file
711 removed and the new file added with copy information (see
712 removed and the new file added with copy information (see
712 memfilectx).
713 memfilectx).
713
714
714 user receives the committer name and defaults to current
715 user receives the committer name and defaults to current
715 repository username, date is the commit date in any format
716 repository username, date is the commit date in any format
716 supported by util.parsedate() and defaults to current date, extra
717 supported by util.parsedate() and defaults to current date, extra
717 is a dictionary of metadata or is left empty.
718 is a dictionary of metadata or is left empty.
718 """
719 """
719 def __init__(self, repo, parents, text, files, filectxfn, user=None,
720 def __init__(self, repo, parents, text, files, filectxfn, user=None,
720 date=None, extra=None):
721 date=None, extra=None):
721 self._repo = repo
722 self._repo = repo
722 self._rev = None
723 self._rev = None
723 self._node = None
724 self._node = None
724 self._text = text
725 self._text = text
725 self._date = date and util.parsedate(date) or util.makedate()
726 self._date = date and util.parsedate(date) or util.makedate()
726 self._user = user
727 self._user = user
727 parents = [(p or nullid) for p in parents]
728 parents = [(p or nullid) for p in parents]
728 p1, p2 = parents
729 p1, p2 = parents
729 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
730 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
730 files = sorted(set(files))
731 files = sorted(set(files))
731 self._status = [files, [], [], [], []]
732 self._status = [files, [], [], [], []]
732 self._filectxfn = filectxfn
733 self._filectxfn = filectxfn
733
734
734 self._extra = extra and extra.copy() or {}
735 self._extra = extra and extra.copy() or {}
735 if 'branch' not in self._extra:
736 if 'branch' not in self._extra:
736 self._extra['branch'] = 'default'
737 self._extra['branch'] = 'default'
737 elif self._extra.get('branch') == '':
738 elif self._extra.get('branch') == '':
738 self._extra['branch'] = 'default'
739 self._extra['branch'] = 'default'
739
740
740 def __str__(self):
741 def __str__(self):
741 return str(self._parents[0]) + "+"
742 return str(self._parents[0]) + "+"
742
743
743 def __int__(self):
744 def __int__(self):
744 return self._rev
745 return self._rev
745
746
746 def __nonzero__(self):
747 def __nonzero__(self):
747 return True
748 return True
748
749
749 def user(self): return self._user or self._repo.ui.username()
750 def user(self): return self._user or self._repo.ui.username()
750 def date(self): return self._date
751 def date(self): return self._date
751 def description(self): return self._text
752 def description(self): return self._text
752 def files(self): return self.modified()
753 def files(self): return self.modified()
753 def modified(self): return self._status[0]
754 def modified(self): return self._status[0]
754 def added(self): return self._status[1]
755 def added(self): return self._status[1]
755 def removed(self): return self._status[2]
756 def removed(self): return self._status[2]
756 def deleted(self): return self._status[3]
757 def deleted(self): return self._status[3]
757 def unknown(self): return self._status[4]
758 def unknown(self): return self._status[4]
758 def clean(self): return self._status[5]
759 def clean(self): return self._status[5]
759 def branch(self): return self._extra['branch']
760 def branch(self): return self._extra['branch']
760 def extra(self): return self._extra
761 def extra(self): return self._extra
761 def flags(self, f): return self[f].flags()
762 def flags(self, f): return self[f].flags()
762
763
763 def parents(self):
764 def parents(self):
764 """return contexts for each parent changeset"""
765 """return contexts for each parent changeset"""
765 return self._parents
766 return self._parents
766
767
767 def filectx(self, path, filelog=None):
768 def filectx(self, path, filelog=None):
768 """get a file context from the working directory"""
769 """get a file context from the working directory"""
769 return self._filectxfn(self._repo, self, path)
770 return self._filectxfn(self._repo, self, path)
770
771
771 class memfilectx(object):
772 class memfilectx(object):
772 """memfilectx represents an in-memory file to commit.
773 """memfilectx represents an in-memory file to commit.
773
774
774 See memctx for more details.
775 See memctx for more details.
775 """
776 """
776 def __init__(self, path, data, islink, isexec, copied):
777 def __init__(self, path, data, islink, isexec, copied):
777 """
778 """
778 path is the normalized file path relative to repository root.
779 path is the normalized file path relative to repository root.
779 data is the file content as a string.
780 data is the file content as a string.
780 islink is True if the file is a symbolic link.
781 islink is True if the file is a symbolic link.
781 isexec is True if the file is executable.
782 isexec is True if the file is executable.
782 copied is the source file path if current file was copied in the
783 copied is the source file path if current file was copied in the
783 revision being committed, or None."""
784 revision being committed, or None."""
784 self._path = path
785 self._path = path
785 self._data = data
786 self._data = data
786 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
787 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
787 self._copied = None
788 self._copied = None
788 if copied:
789 if copied:
789 self._copied = (copied, nullid)
790 self._copied = (copied, nullid)
790
791
791 def __nonzero__(self): return True
792 def __nonzero__(self): return True
792 def __str__(self): return "%s@%s" % (self.path(), self._changectx)
793 def __str__(self): return "%s@%s" % (self.path(), self._changectx)
793 def path(self): return self._path
794 def path(self): return self._path
794 def data(self): return self._data
795 def data(self): return self._data
795 def flags(self): return self._flags
796 def flags(self): return self._flags
796 def isexec(self): return 'x' in self._flags
797 def isexec(self): return 'x' in self._flags
797 def islink(self): return 'l' in self._flags
798 def islink(self): return 'l' in self._flags
798 def renamed(self): return self._copied
799 def renamed(self): return self._copied
799
800
@@ -1,232 +1,233 b''
1 # copies.py - copy detection for Mercurial
1 # copies.py - copy detection for Mercurial
2 #
2 #
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from i18n import _
8 from i18n import _
9 import util, heapq
9 import util
10 import heapq
10
11
11 def _nonoverlap(d1, d2, d3):
12 def _nonoverlap(d1, d2, d3):
12 "Return list of elements in d1 not in d2 or d3"
13 "Return list of elements in d1 not in d2 or d3"
13 return sorted([d for d in d1 if d not in d3 and d not in d2])
14 return sorted([d for d in d1 if d not in d3 and d not in d2])
14
15
15 def _dirname(f):
16 def _dirname(f):
16 s = f.rfind("/")
17 s = f.rfind("/")
17 if s == -1:
18 if s == -1:
18 return ""
19 return ""
19 return f[:s]
20 return f[:s]
20
21
21 def _dirs(files):
22 def _dirs(files):
22 d = {}
23 d = {}
23 for f in files:
24 for f in files:
24 f = _dirname(f)
25 f = _dirname(f)
25 while f not in d:
26 while f not in d:
26 d[f] = True
27 d[f] = True
27 f = _dirname(f)
28 f = _dirname(f)
28 return d
29 return d
29
30
30 def _findoldnames(fctx, limit):
31 def _findoldnames(fctx, limit):
31 "find files that path was copied from, back to linkrev limit"
32 "find files that path was copied from, back to linkrev limit"
32 old = {}
33 old = {}
33 seen = {}
34 seen = {}
34 orig = fctx.path()
35 orig = fctx.path()
35 visit = [(fctx, 0)]
36 visit = [(fctx, 0)]
36 while visit:
37 while visit:
37 fc, depth = visit.pop()
38 fc, depth = visit.pop()
38 s = str(fc)
39 s = str(fc)
39 if s in seen:
40 if s in seen:
40 continue
41 continue
41 seen[s] = 1
42 seen[s] = 1
42 if fc.path() != orig and fc.path() not in old:
43 if fc.path() != orig and fc.path() not in old:
43 old[fc.path()] = (depth, fc.path()) # remember depth
44 old[fc.path()] = (depth, fc.path()) # remember depth
44 if fc.rev() < limit and fc.rev() is not None:
45 if fc.rev() < limit and fc.rev() is not None:
45 continue
46 continue
46 visit += [(p, depth - 1) for p in fc.parents()]
47 visit += [(p, depth - 1) for p in fc.parents()]
47
48
48 # return old names sorted by depth
49 # return old names sorted by depth
49 return [o[1] for o in sorted(old.values())]
50 return [o[1] for o in sorted(old.values())]
50
51
51 def _findlimit(repo, a, b):
52 def _findlimit(repo, a, b):
52 "find the earliest revision that's an ancestor of a or b but not both"
53 "find the earliest revision that's an ancestor of a or b but not both"
53 # basic idea:
54 # basic idea:
54 # - mark a and b with different sides
55 # - mark a and b with different sides
55 # - if a parent's children are all on the same side, the parent is
56 # - if a parent's children are all on the same side, the parent is
56 # on that side, otherwise it is on no side
57 # on that side, otherwise it is on no side
57 # - walk the graph in topological order with the help of a heap;
58 # - walk the graph in topological order with the help of a heap;
58 # - add unseen parents to side map
59 # - add unseen parents to side map
59 # - clear side of any parent that has children on different sides
60 # - clear side of any parent that has children on different sides
60 # - track number of interesting revs that might still be on a side
61 # - track number of interesting revs that might still be on a side
61 # - track the lowest interesting rev seen
62 # - track the lowest interesting rev seen
62 # - quit when interesting revs is zero
63 # - quit when interesting revs is zero
63
64
64 cl = repo.changelog
65 cl = repo.changelog
65 working = len(cl) # pseudo rev for the working directory
66 working = len(cl) # pseudo rev for the working directory
66 if a is None:
67 if a is None:
67 a = working
68 a = working
68 if b is None:
69 if b is None:
69 b = working
70 b = working
70
71
71 side = {a: -1, b: 1}
72 side = {a: -1, b: 1}
72 visit = [-a, -b]
73 visit = [-a, -b]
73 heapq.heapify(visit)
74 heapq.heapify(visit)
74 interesting = len(visit)
75 interesting = len(visit)
75 limit = working
76 limit = working
76
77
77 while interesting:
78 while interesting:
78 r = -heapq.heappop(visit)
79 r = -heapq.heappop(visit)
79 if r == working:
80 if r == working:
80 parents = [cl.rev(p) for p in repo.dirstate.parents()]
81 parents = [cl.rev(p) for p in repo.dirstate.parents()]
81 else:
82 else:
82 parents = cl.parentrevs(r)
83 parents = cl.parentrevs(r)
83 for p in parents:
84 for p in parents:
84 if p not in side:
85 if p not in side:
85 # first time we see p; add it to visit
86 # first time we see p; add it to visit
86 side[p] = side[r]
87 side[p] = side[r]
87 if side[p]:
88 if side[p]:
88 interesting += 1
89 interesting += 1
89 heapq.heappush(visit, -p)
90 heapq.heappush(visit, -p)
90 elif side[p] and side[p] != side[r]:
91 elif side[p] and side[p] != side[r]:
91 # p was interesting but now we know better
92 # p was interesting but now we know better
92 side[p] = 0
93 side[p] = 0
93 interesting -= 1
94 interesting -= 1
94 if side[r]:
95 if side[r]:
95 limit = r # lowest rev visited
96 limit = r # lowest rev visited
96 interesting -= 1
97 interesting -= 1
97 return limit
98 return limit
98
99
99 def copies(repo, c1, c2, ca, checkdirs=False):
100 def copies(repo, c1, c2, ca, checkdirs=False):
100 """
101 """
101 Find moves and copies between context c1 and c2
102 Find moves and copies between context c1 and c2
102 """
103 """
103 # avoid silly behavior for update from empty dir
104 # avoid silly behavior for update from empty dir
104 if not c1 or not c2 or c1 == c2:
105 if not c1 or not c2 or c1 == c2:
105 return {}, {}
106 return {}, {}
106
107
107 # avoid silly behavior for parent -> working dir
108 # avoid silly behavior for parent -> working dir
108 if c2.node() == None and c1.node() == repo.dirstate.parents()[0]:
109 if c2.node() == None and c1.node() == repo.dirstate.parents()[0]:
109 return repo.dirstate.copies(), {}
110 return repo.dirstate.copies(), {}
110
111
111 limit = _findlimit(repo, c1.rev(), c2.rev())
112 limit = _findlimit(repo, c1.rev(), c2.rev())
112 m1 = c1.manifest()
113 m1 = c1.manifest()
113 m2 = c2.manifest()
114 m2 = c2.manifest()
114 ma = ca.manifest()
115 ma = ca.manifest()
115
116
116 def makectx(f, n):
117 def makectx(f, n):
117 if len(n) != 20: # in a working context?
118 if len(n) != 20: # in a working context?
118 if c1.rev() is None:
119 if c1.rev() is None:
119 return c1.filectx(f)
120 return c1.filectx(f)
120 return c2.filectx(f)
121 return c2.filectx(f)
121 return repo.filectx(f, fileid=n)
122 return repo.filectx(f, fileid=n)
122 ctx = util.cachefunc(makectx)
123 ctx = util.cachefunc(makectx)
123
124
124 copy = {}
125 copy = {}
125 fullcopy = {}
126 fullcopy = {}
126 diverge = {}
127 diverge = {}
127
128
128 def checkcopies(f, m1, m2):
129 def checkcopies(f, m1, m2):
129 '''check possible copies of f from m1 to m2'''
130 '''check possible copies of f from m1 to m2'''
130 c1 = ctx(f, m1[f])
131 c1 = ctx(f, m1[f])
131 for of in _findoldnames(c1, limit):
132 for of in _findoldnames(c1, limit):
132 fullcopy[f] = of # remember for dir rename detection
133 fullcopy[f] = of # remember for dir rename detection
133 if of in m2: # original file not in other manifest?
134 if of in m2: # original file not in other manifest?
134 # if the original file is unchanged on the other branch,
135 # if the original file is unchanged on the other branch,
135 # no merge needed
136 # no merge needed
136 if m2[of] != ma.get(of):
137 if m2[of] != ma.get(of):
137 c2 = ctx(of, m2[of])
138 c2 = ctx(of, m2[of])
138 ca = c1.ancestor(c2)
139 ca = c1.ancestor(c2)
139 # related and named changed on only one side?
140 # related and named changed on only one side?
140 if ca and (ca.path() == f or ca.path() == c2.path()):
141 if ca and (ca.path() == f or ca.path() == c2.path()):
141 if c1 != ca or c2 != ca: # merge needed?
142 if c1 != ca or c2 != ca: # merge needed?
142 copy[f] = of
143 copy[f] = of
143 elif of in ma:
144 elif of in ma:
144 diverge.setdefault(of, []).append(f)
145 diverge.setdefault(of, []).append(f)
145
146
146 repo.ui.debug(_(" searching for copies back to rev %d\n") % limit)
147 repo.ui.debug(_(" searching for copies back to rev %d\n") % limit)
147
148
148 u1 = _nonoverlap(m1, m2, ma)
149 u1 = _nonoverlap(m1, m2, ma)
149 u2 = _nonoverlap(m2, m1, ma)
150 u2 = _nonoverlap(m2, m1, ma)
150
151
151 if u1:
152 if u1:
152 repo.ui.debug(_(" unmatched files in local:\n %s\n")
153 repo.ui.debug(_(" unmatched files in local:\n %s\n")
153 % "\n ".join(u1))
154 % "\n ".join(u1))
154 if u2:
155 if u2:
155 repo.ui.debug(_(" unmatched files in other:\n %s\n")
156 repo.ui.debug(_(" unmatched files in other:\n %s\n")
156 % "\n ".join(u2))
157 % "\n ".join(u2))
157
158
158 for f in u1:
159 for f in u1:
159 checkcopies(f, m1, m2)
160 checkcopies(f, m1, m2)
160 for f in u2:
161 for f in u2:
161 checkcopies(f, m2, m1)
162 checkcopies(f, m2, m1)
162
163
163 diverge2 = set()
164 diverge2 = set()
164 for of, fl in diverge.items():
165 for of, fl in diverge.items():
165 if len(fl) == 1:
166 if len(fl) == 1:
166 del diverge[of] # not actually divergent
167 del diverge[of] # not actually divergent
167 else:
168 else:
168 diverge2.update(fl) # reverse map for below
169 diverge2.update(fl) # reverse map for below
169
170
170 if fullcopy:
171 if fullcopy:
171 repo.ui.debug(_(" all copies found (* = to merge, ! = divergent):\n"))
172 repo.ui.debug(_(" all copies found (* = to merge, ! = divergent):\n"))
172 for f in fullcopy:
173 for f in fullcopy:
173 note = ""
174 note = ""
174 if f in copy: note += "*"
175 if f in copy: note += "*"
175 if f in diverge2: note += "!"
176 if f in diverge2: note += "!"
176 repo.ui.debug(_(" %s -> %s %s\n") % (f, fullcopy[f], note))
177 repo.ui.debug(_(" %s -> %s %s\n") % (f, fullcopy[f], note))
177 del diverge2
178 del diverge2
178
179
179 if not fullcopy or not checkdirs:
180 if not fullcopy or not checkdirs:
180 return copy, diverge
181 return copy, diverge
181
182
182 repo.ui.debug(_(" checking for directory renames\n"))
183 repo.ui.debug(_(" checking for directory renames\n"))
183
184
184 # generate a directory move map
185 # generate a directory move map
185 d1, d2 = _dirs(m1), _dirs(m2)
186 d1, d2 = _dirs(m1), _dirs(m2)
186 invalid = {}
187 invalid = {}
187 dirmove = {}
188 dirmove = {}
188
189
189 # examine each file copy for a potential directory move, which is
190 # examine each file copy for a potential directory move, which is
190 # when all the files in a directory are moved to a new directory
191 # when all the files in a directory are moved to a new directory
191 for dst, src in fullcopy.iteritems():
192 for dst, src in fullcopy.iteritems():
192 dsrc, ddst = _dirname(src), _dirname(dst)
193 dsrc, ddst = _dirname(src), _dirname(dst)
193 if dsrc in invalid:
194 if dsrc in invalid:
194 # already seen to be uninteresting
195 # already seen to be uninteresting
195 continue
196 continue
196 elif dsrc in d1 and ddst in d1:
197 elif dsrc in d1 and ddst in d1:
197 # directory wasn't entirely moved locally
198 # directory wasn't entirely moved locally
198 invalid[dsrc] = True
199 invalid[dsrc] = True
199 elif dsrc in d2 and ddst in d2:
200 elif dsrc in d2 and ddst in d2:
200 # directory wasn't entirely moved remotely
201 # directory wasn't entirely moved remotely
201 invalid[dsrc] = True
202 invalid[dsrc] = True
202 elif dsrc in dirmove and dirmove[dsrc] != ddst:
203 elif dsrc in dirmove and dirmove[dsrc] != ddst:
203 # files from the same directory moved to two different places
204 # files from the same directory moved to two different places
204 invalid[dsrc] = True
205 invalid[dsrc] = True
205 else:
206 else:
206 # looks good so far
207 # looks good so far
207 dirmove[dsrc + "/"] = ddst + "/"
208 dirmove[dsrc + "/"] = ddst + "/"
208
209
209 for i in invalid:
210 for i in invalid:
210 if i in dirmove:
211 if i in dirmove:
211 del dirmove[i]
212 del dirmove[i]
212 del d1, d2, invalid
213 del d1, d2, invalid
213
214
214 if not dirmove:
215 if not dirmove:
215 return copy, diverge
216 return copy, diverge
216
217
217 for d in dirmove:
218 for d in dirmove:
218 repo.ui.debug(_(" dir %s -> %s\n") % (d, dirmove[d]))
219 repo.ui.debug(_(" dir %s -> %s\n") % (d, dirmove[d]))
219
220
220 # check unaccounted nonoverlapping files against directory moves
221 # check unaccounted nonoverlapping files against directory moves
221 for f in u1 + u2:
222 for f in u1 + u2:
222 if f not in fullcopy:
223 if f not in fullcopy:
223 for d in dirmove:
224 for d in dirmove:
224 if f.startswith(d):
225 if f.startswith(d):
225 # new file added in a directory that was moved, move it
226 # new file added in a directory that was moved, move it
226 df = dirmove[d] + f[len(d):]
227 df = dirmove[d] + f[len(d):]
227 if df not in copy:
228 if df not in copy:
228 copy[f] = df
229 copy[f] = df
229 repo.ui.debug(_(" file %s -> %s\n") % (f, copy[f]))
230 repo.ui.debug(_(" file %s -> %s\n") % (f, copy[f]))
230 break
231 break
231
232
232 return copy, diverge
233 return copy, diverge
@@ -1,591 +1,592 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from node import nullid
8 from node import nullid
9 from i18n import _
9 from i18n import _
10 import struct, os, stat, util, errno, ignore
10 import util, ignore, osutil, parsers
11 import cStringIO, osutil, sys, parsers
11 import struct, os, stat, errno
12 import cStringIO, sys
12
13
13 _unknown = ('?', 0, 0, 0)
14 _unknown = ('?', 0, 0, 0)
14 _format = ">cllll"
15 _format = ">cllll"
15 propertycache = util.propertycache
16 propertycache = util.propertycache
16
17
17 def _finddirs(path):
18 def _finddirs(path):
18 pos = path.rfind('/')
19 pos = path.rfind('/')
19 while pos != -1:
20 while pos != -1:
20 yield path[:pos]
21 yield path[:pos]
21 pos = path.rfind('/', 0, pos)
22 pos = path.rfind('/', 0, pos)
22
23
23 def _incdirs(dirs, path):
24 def _incdirs(dirs, path):
24 for base in _finddirs(path):
25 for base in _finddirs(path):
25 if base in dirs:
26 if base in dirs:
26 dirs[base] += 1
27 dirs[base] += 1
27 return
28 return
28 dirs[base] = 1
29 dirs[base] = 1
29
30
30 def _decdirs(dirs, path):
31 def _decdirs(dirs, path):
31 for base in _finddirs(path):
32 for base in _finddirs(path):
32 if dirs[base] > 1:
33 if dirs[base] > 1:
33 dirs[base] -= 1
34 dirs[base] -= 1
34 return
35 return
35 del dirs[base]
36 del dirs[base]
36
37
37 class dirstate(object):
38 class dirstate(object):
38
39
39 def __init__(self, opener, ui, root):
40 def __init__(self, opener, ui, root):
40 self._opener = opener
41 self._opener = opener
41 self._root = root
42 self._root = root
42 self._rootdir = os.path.join(root, '')
43 self._rootdir = os.path.join(root, '')
43 self._dirty = False
44 self._dirty = False
44 self._dirtypl = False
45 self._dirtypl = False
45 self._ui = ui
46 self._ui = ui
46
47
47 @propertycache
48 @propertycache
48 def _map(self):
49 def _map(self):
49 self._read()
50 self._read()
50 return self._map
51 return self._map
51
52
52 @propertycache
53 @propertycache
53 def _copymap(self):
54 def _copymap(self):
54 self._read()
55 self._read()
55 return self._copymap
56 return self._copymap
56
57
57 @propertycache
58 @propertycache
58 def _foldmap(self):
59 def _foldmap(self):
59 f = {}
60 f = {}
60 for name in self._map:
61 for name in self._map:
61 f[os.path.normcase(name)] = name
62 f[os.path.normcase(name)] = name
62 return f
63 return f
63
64
64 @propertycache
65 @propertycache
65 def _branch(self):
66 def _branch(self):
66 try:
67 try:
67 return self._opener("branch").read().strip() or "default"
68 return self._opener("branch").read().strip() or "default"
68 except IOError:
69 except IOError:
69 return "default"
70 return "default"
70
71
71 @propertycache
72 @propertycache
72 def _pl(self):
73 def _pl(self):
73 try:
74 try:
74 st = self._opener("dirstate").read(40)
75 st = self._opener("dirstate").read(40)
75 if len(st) == 40:
76 if len(st) == 40:
76 return st[:20], st[20:40]
77 return st[:20], st[20:40]
77 except IOError, err:
78 except IOError, err:
78 if err.errno != errno.ENOENT: raise
79 if err.errno != errno.ENOENT: raise
79 return [nullid, nullid]
80 return [nullid, nullid]
80
81
81 @propertycache
82 @propertycache
82 def _dirs(self):
83 def _dirs(self):
83 dirs = {}
84 dirs = {}
84 for f,s in self._map.iteritems():
85 for f,s in self._map.iteritems():
85 if s[0] != 'r':
86 if s[0] != 'r':
86 _incdirs(dirs, f)
87 _incdirs(dirs, f)
87 return dirs
88 return dirs
88
89
89 @propertycache
90 @propertycache
90 def _ignore(self):
91 def _ignore(self):
91 files = [self._join('.hgignore')]
92 files = [self._join('.hgignore')]
92 for name, path in self._ui.configitems("ui"):
93 for name, path in self._ui.configitems("ui"):
93 if name == 'ignore' or name.startswith('ignore.'):
94 if name == 'ignore' or name.startswith('ignore.'):
94 files.append(os.path.expanduser(path))
95 files.append(os.path.expanduser(path))
95 return ignore.ignore(self._root, files, self._ui.warn)
96 return ignore.ignore(self._root, files, self._ui.warn)
96
97
97 @propertycache
98 @propertycache
98 def _slash(self):
99 def _slash(self):
99 return self._ui.configbool('ui', 'slash') and os.sep != '/'
100 return self._ui.configbool('ui', 'slash') and os.sep != '/'
100
101
101 @propertycache
102 @propertycache
102 def _checklink(self):
103 def _checklink(self):
103 return util.checklink(self._root)
104 return util.checklink(self._root)
104
105
105 @propertycache
106 @propertycache
106 def _checkexec(self):
107 def _checkexec(self):
107 return util.checkexec(self._root)
108 return util.checkexec(self._root)
108
109
109 @propertycache
110 @propertycache
110 def _checkcase(self):
111 def _checkcase(self):
111 return not util.checkcase(self._join('.hg'))
112 return not util.checkcase(self._join('.hg'))
112
113
113 @propertycache
114 @propertycache
114 def normalize(self):
115 def normalize(self):
115 if self._checkcase:
116 if self._checkcase:
116 return self._normalize
117 return self._normalize
117 return lambda x, y=False: x
118 return lambda x, y=False: x
118
119
119 def _join(self, f):
120 def _join(self, f):
120 # much faster than os.path.join()
121 # much faster than os.path.join()
121 # it's safe because f is always a relative path
122 # it's safe because f is always a relative path
122 return self._rootdir + f
123 return self._rootdir + f
123
124
124 def flagfunc(self, fallback):
125 def flagfunc(self, fallback):
125 if self._checklink:
126 if self._checklink:
126 if self._checkexec:
127 if self._checkexec:
127 def f(x):
128 def f(x):
128 p = self._join(x)
129 p = self._join(x)
129 if os.path.islink(p):
130 if os.path.islink(p):
130 return 'l'
131 return 'l'
131 if util.is_exec(p):
132 if util.is_exec(p):
132 return 'x'
133 return 'x'
133 return ''
134 return ''
134 return f
135 return f
135 def f(x):
136 def f(x):
136 if os.path.islink(self._join(x)):
137 if os.path.islink(self._join(x)):
137 return 'l'
138 return 'l'
138 if 'x' in fallback(x):
139 if 'x' in fallback(x):
139 return 'x'
140 return 'x'
140 return ''
141 return ''
141 return f
142 return f
142 if self._checkexec:
143 if self._checkexec:
143 def f(x):
144 def f(x):
144 if 'l' in fallback(x):
145 if 'l' in fallback(x):
145 return 'l'
146 return 'l'
146 if util.is_exec(self._join(x)):
147 if util.is_exec(self._join(x)):
147 return 'x'
148 return 'x'
148 return ''
149 return ''
149 return f
150 return f
150 return fallback
151 return fallback
151
152
152 def getcwd(self):
153 def getcwd(self):
153 cwd = os.getcwd()
154 cwd = os.getcwd()
154 if cwd == self._root: return ''
155 if cwd == self._root: return ''
155 # self._root ends with a path separator if self._root is '/' or 'C:\'
156 # self._root ends with a path separator if self._root is '/' or 'C:\'
156 rootsep = self._root
157 rootsep = self._root
157 if not util.endswithsep(rootsep):
158 if not util.endswithsep(rootsep):
158 rootsep += os.sep
159 rootsep += os.sep
159 if cwd.startswith(rootsep):
160 if cwd.startswith(rootsep):
160 return cwd[len(rootsep):]
161 return cwd[len(rootsep):]
161 else:
162 else:
162 # we're outside the repo. return an absolute path.
163 # we're outside the repo. return an absolute path.
163 return cwd
164 return cwd
164
165
165 def pathto(self, f, cwd=None):
166 def pathto(self, f, cwd=None):
166 if cwd is None:
167 if cwd is None:
167 cwd = self.getcwd()
168 cwd = self.getcwd()
168 path = util.pathto(self._root, cwd, f)
169 path = util.pathto(self._root, cwd, f)
169 if self._slash:
170 if self._slash:
170 return util.normpath(path)
171 return util.normpath(path)
171 return path
172 return path
172
173
173 def __getitem__(self, key):
174 def __getitem__(self, key):
174 ''' current states:
175 ''' current states:
175 n normal
176 n normal
176 m needs merging
177 m needs merging
177 r marked for removal
178 r marked for removal
178 a marked for addition
179 a marked for addition
179 ? not tracked'''
180 ? not tracked'''
180 return self._map.get(key, ("?",))[0]
181 return self._map.get(key, ("?",))[0]
181
182
182 def __contains__(self, key):
183 def __contains__(self, key):
183 return key in self._map
184 return key in self._map
184
185
185 def __iter__(self):
186 def __iter__(self):
186 for x in sorted(self._map):
187 for x in sorted(self._map):
187 yield x
188 yield x
188
189
189 def parents(self):
190 def parents(self):
190 return self._pl
191 return self._pl
191
192
192 def branch(self):
193 def branch(self):
193 return self._branch
194 return self._branch
194
195
195 def setparents(self, p1, p2=nullid):
196 def setparents(self, p1, p2=nullid):
196 self._dirty = self._dirtypl = True
197 self._dirty = self._dirtypl = True
197 self._pl = p1, p2
198 self._pl = p1, p2
198
199
199 def setbranch(self, branch):
200 def setbranch(self, branch):
200 self._branch = branch
201 self._branch = branch
201 self._opener("branch", "w").write(branch + '\n')
202 self._opener("branch", "w").write(branch + '\n')
202
203
203 def _read(self):
204 def _read(self):
204 self._map = {}
205 self._map = {}
205 self._copymap = {}
206 self._copymap = {}
206 try:
207 try:
207 st = self._opener("dirstate").read()
208 st = self._opener("dirstate").read()
208 except IOError, err:
209 except IOError, err:
209 if err.errno != errno.ENOENT: raise
210 if err.errno != errno.ENOENT: raise
210 return
211 return
211 if not st:
212 if not st:
212 return
213 return
213
214
214 p = parsers.parse_dirstate(self._map, self._copymap, st)
215 p = parsers.parse_dirstate(self._map, self._copymap, st)
215 if not self._dirtypl:
216 if not self._dirtypl:
216 self._pl = p
217 self._pl = p
217
218
218 def invalidate(self):
219 def invalidate(self):
219 for a in "_map _copymap _foldmap _branch _pl _dirs _ignore".split():
220 for a in "_map _copymap _foldmap _branch _pl _dirs _ignore".split():
220 if a in self.__dict__:
221 if a in self.__dict__:
221 delattr(self, a)
222 delattr(self, a)
222 self._dirty = False
223 self._dirty = False
223
224
224 def copy(self, source, dest):
225 def copy(self, source, dest):
225 """Mark dest as a copy of source. Unmark dest if source is None.
226 """Mark dest as a copy of source. Unmark dest if source is None.
226 """
227 """
227 if source == dest:
228 if source == dest:
228 return
229 return
229 self._dirty = True
230 self._dirty = True
230 if source is not None:
231 if source is not None:
231 self._copymap[dest] = source
232 self._copymap[dest] = source
232 elif dest in self._copymap:
233 elif dest in self._copymap:
233 del self._copymap[dest]
234 del self._copymap[dest]
234
235
235 def copied(self, file):
236 def copied(self, file):
236 return self._copymap.get(file, None)
237 return self._copymap.get(file, None)
237
238
238 def copies(self):
239 def copies(self):
239 return self._copymap
240 return self._copymap
240
241
241 def _droppath(self, f):
242 def _droppath(self, f):
242 if self[f] not in "?r" and "_dirs" in self.__dict__:
243 if self[f] not in "?r" and "_dirs" in self.__dict__:
243 _decdirs(self._dirs, f)
244 _decdirs(self._dirs, f)
244
245
245 def _addpath(self, f, check=False):
246 def _addpath(self, f, check=False):
246 oldstate = self[f]
247 oldstate = self[f]
247 if check or oldstate == "r":
248 if check or oldstate == "r":
248 if '\r' in f or '\n' in f:
249 if '\r' in f or '\n' in f:
249 raise util.Abort(
250 raise util.Abort(
250 _("'\\n' and '\\r' disallowed in filenames: %r") % f)
251 _("'\\n' and '\\r' disallowed in filenames: %r") % f)
251 if f in self._dirs:
252 if f in self._dirs:
252 raise util.Abort(_('directory %r already in dirstate') % f)
253 raise util.Abort(_('directory %r already in dirstate') % f)
253 # shadows
254 # shadows
254 for d in _finddirs(f):
255 for d in _finddirs(f):
255 if d in self._dirs:
256 if d in self._dirs:
256 break
257 break
257 if d in self._map and self[d] != 'r':
258 if d in self._map and self[d] != 'r':
258 raise util.Abort(
259 raise util.Abort(
259 _('file %r in dirstate clashes with %r') % (d, f))
260 _('file %r in dirstate clashes with %r') % (d, f))
260 if oldstate in "?r" and "_dirs" in self.__dict__:
261 if oldstate in "?r" and "_dirs" in self.__dict__:
261 _incdirs(self._dirs, f)
262 _incdirs(self._dirs, f)
262
263
263 def normal(self, f):
264 def normal(self, f):
264 'mark a file normal and clean'
265 'mark a file normal and clean'
265 self._dirty = True
266 self._dirty = True
266 self._addpath(f)
267 self._addpath(f)
267 s = os.lstat(self._join(f))
268 s = os.lstat(self._join(f))
268 self._map[f] = ('n', s.st_mode, s.st_size, int(s.st_mtime))
269 self._map[f] = ('n', s.st_mode, s.st_size, int(s.st_mtime))
269 if f in self._copymap:
270 if f in self._copymap:
270 del self._copymap[f]
271 del self._copymap[f]
271
272
272 def normallookup(self, f):
273 def normallookup(self, f):
273 'mark a file normal, but possibly dirty'
274 'mark a file normal, but possibly dirty'
274 if self._pl[1] != nullid and f in self._map:
275 if self._pl[1] != nullid and f in self._map:
275 # if there is a merge going on and the file was either
276 # if there is a merge going on and the file was either
276 # in state 'm' or dirty before being removed, restore that state.
277 # in state 'm' or dirty before being removed, restore that state.
277 entry = self._map[f]
278 entry = self._map[f]
278 if entry[0] == 'r' and entry[2] in (-1, -2):
279 if entry[0] == 'r' and entry[2] in (-1, -2):
279 source = self._copymap.get(f)
280 source = self._copymap.get(f)
280 if entry[2] == -1:
281 if entry[2] == -1:
281 self.merge(f)
282 self.merge(f)
282 elif entry[2] == -2:
283 elif entry[2] == -2:
283 self.normaldirty(f)
284 self.normaldirty(f)
284 if source:
285 if source:
285 self.copy(source, f)
286 self.copy(source, f)
286 return
287 return
287 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
288 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
288 return
289 return
289 self._dirty = True
290 self._dirty = True
290 self._addpath(f)
291 self._addpath(f)
291 self._map[f] = ('n', 0, -1, -1)
292 self._map[f] = ('n', 0, -1, -1)
292 if f in self._copymap:
293 if f in self._copymap:
293 del self._copymap[f]
294 del self._copymap[f]
294
295
295 def normaldirty(self, f):
296 def normaldirty(self, f):
296 'mark a file normal, but dirty'
297 'mark a file normal, but dirty'
297 self._dirty = True
298 self._dirty = True
298 self._addpath(f)
299 self._addpath(f)
299 self._map[f] = ('n', 0, -2, -1)
300 self._map[f] = ('n', 0, -2, -1)
300 if f in self._copymap:
301 if f in self._copymap:
301 del self._copymap[f]
302 del self._copymap[f]
302
303
303 def add(self, f):
304 def add(self, f):
304 'mark a file added'
305 'mark a file added'
305 self._dirty = True
306 self._dirty = True
306 self._addpath(f, True)
307 self._addpath(f, True)
307 self._map[f] = ('a', 0, -1, -1)
308 self._map[f] = ('a', 0, -1, -1)
308 if f in self._copymap:
309 if f in self._copymap:
309 del self._copymap[f]
310 del self._copymap[f]
310
311
311 def remove(self, f):
312 def remove(self, f):
312 'mark a file removed'
313 'mark a file removed'
313 self._dirty = True
314 self._dirty = True
314 self._droppath(f)
315 self._droppath(f)
315 size = 0
316 size = 0
316 if self._pl[1] != nullid and f in self._map:
317 if self._pl[1] != nullid and f in self._map:
317 entry = self._map[f]
318 entry = self._map[f]
318 if entry[0] == 'm':
319 if entry[0] == 'm':
319 size = -1
320 size = -1
320 elif entry[0] == 'n' and entry[2] == -2:
321 elif entry[0] == 'n' and entry[2] == -2:
321 size = -2
322 size = -2
322 self._map[f] = ('r', 0, size, 0)
323 self._map[f] = ('r', 0, size, 0)
323 if size == 0 and f in self._copymap:
324 if size == 0 and f in self._copymap:
324 del self._copymap[f]
325 del self._copymap[f]
325
326
326 def merge(self, f):
327 def merge(self, f):
327 'mark a file merged'
328 'mark a file merged'
328 self._dirty = True
329 self._dirty = True
329 s = os.lstat(self._join(f))
330 s = os.lstat(self._join(f))
330 self._addpath(f)
331 self._addpath(f)
331 self._map[f] = ('m', s.st_mode, s.st_size, int(s.st_mtime))
332 self._map[f] = ('m', s.st_mode, s.st_size, int(s.st_mtime))
332 if f in self._copymap:
333 if f in self._copymap:
333 del self._copymap[f]
334 del self._copymap[f]
334
335
335 def forget(self, f):
336 def forget(self, f):
336 'forget a file'
337 'forget a file'
337 self._dirty = True
338 self._dirty = True
338 try:
339 try:
339 self._droppath(f)
340 self._droppath(f)
340 del self._map[f]
341 del self._map[f]
341 except KeyError:
342 except KeyError:
342 self._ui.warn(_("not in dirstate: %s\n") % f)
343 self._ui.warn(_("not in dirstate: %s\n") % f)
343
344
344 def _normalize(self, path, knownpath=False):
345 def _normalize(self, path, knownpath=False):
345 norm_path = os.path.normcase(path)
346 norm_path = os.path.normcase(path)
346 fold_path = self._foldmap.get(norm_path, None)
347 fold_path = self._foldmap.get(norm_path, None)
347 if fold_path is None:
348 if fold_path is None:
348 if knownpath or not os.path.exists(os.path.join(self._root, path)):
349 if knownpath or not os.path.exists(os.path.join(self._root, path)):
349 fold_path = path
350 fold_path = path
350 else:
351 else:
351 fold_path = self._foldmap.setdefault(norm_path,
352 fold_path = self._foldmap.setdefault(norm_path,
352 util.fspath(path, self._root))
353 util.fspath(path, self._root))
353 return fold_path
354 return fold_path
354
355
355 def clear(self):
356 def clear(self):
356 self._map = {}
357 self._map = {}
357 if "_dirs" in self.__dict__:
358 if "_dirs" in self.__dict__:
358 delattr(self, "_dirs");
359 delattr(self, "_dirs");
359 self._copymap = {}
360 self._copymap = {}
360 self._pl = [nullid, nullid]
361 self._pl = [nullid, nullid]
361 self._dirty = True
362 self._dirty = True
362
363
363 def rebuild(self, parent, files):
364 def rebuild(self, parent, files):
364 self.clear()
365 self.clear()
365 for f in files:
366 for f in files:
366 if 'x' in files.flags(f):
367 if 'x' in files.flags(f):
367 self._map[f] = ('n', 0777, -1, 0)
368 self._map[f] = ('n', 0777, -1, 0)
368 else:
369 else:
369 self._map[f] = ('n', 0666, -1, 0)
370 self._map[f] = ('n', 0666, -1, 0)
370 self._pl = (parent, nullid)
371 self._pl = (parent, nullid)
371 self._dirty = True
372 self._dirty = True
372
373
373 def write(self):
374 def write(self):
374 if not self._dirty:
375 if not self._dirty:
375 return
376 return
376 st = self._opener("dirstate", "w", atomictemp=True)
377 st = self._opener("dirstate", "w", atomictemp=True)
377
378
378 try:
379 try:
379 gran = int(self._ui.config('dirstate', 'granularity', 1))
380 gran = int(self._ui.config('dirstate', 'granularity', 1))
380 except ValueError:
381 except ValueError:
381 gran = 1
382 gran = 1
382 limit = sys.maxint
383 limit = sys.maxint
383 if gran > 0:
384 if gran > 0:
384 limit = util.fstat(st).st_mtime - gran
385 limit = util.fstat(st).st_mtime - gran
385
386
386 cs = cStringIO.StringIO()
387 cs = cStringIO.StringIO()
387 copymap = self._copymap
388 copymap = self._copymap
388 pack = struct.pack
389 pack = struct.pack
389 write = cs.write
390 write = cs.write
390 write("".join(self._pl))
391 write("".join(self._pl))
391 for f, e in self._map.iteritems():
392 for f, e in self._map.iteritems():
392 if f in copymap:
393 if f in copymap:
393 f = "%s\0%s" % (f, copymap[f])
394 f = "%s\0%s" % (f, copymap[f])
394 if e[3] > limit and e[0] == 'n':
395 if e[3] > limit and e[0] == 'n':
395 e = (e[0], 0, -1, -1)
396 e = (e[0], 0, -1, -1)
396 e = pack(_format, e[0], e[1], e[2], e[3], len(f))
397 e = pack(_format, e[0], e[1], e[2], e[3], len(f))
397 write(e)
398 write(e)
398 write(f)
399 write(f)
399 st.write(cs.getvalue())
400 st.write(cs.getvalue())
400 st.rename()
401 st.rename()
401 self._dirty = self._dirtypl = False
402 self._dirty = self._dirtypl = False
402
403
403 def _dirignore(self, f):
404 def _dirignore(self, f):
404 if f == '.':
405 if f == '.':
405 return False
406 return False
406 if self._ignore(f):
407 if self._ignore(f):
407 return True
408 return True
408 for p in _finddirs(f):
409 for p in _finddirs(f):
409 if self._ignore(p):
410 if self._ignore(p):
410 return True
411 return True
411 return False
412 return False
412
413
413 def walk(self, match, unknown, ignored):
414 def walk(self, match, unknown, ignored):
414 '''
415 '''
415 walk recursively through the directory tree, finding all files
416 walk recursively through the directory tree, finding all files
416 matched by the match function
417 matched by the match function
417
418
418 results are yielded in a tuple (filename, stat), where stat
419 results are yielded in a tuple (filename, stat), where stat
419 and st is the stat result if the file was found in the directory.
420 and st is the stat result if the file was found in the directory.
420 '''
421 '''
421
422
422 def fwarn(f, msg):
423 def fwarn(f, msg):
423 self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
424 self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
424 return False
425 return False
425 badfn = fwarn
426 badfn = fwarn
426 if hasattr(match, 'bad'):
427 if hasattr(match, 'bad'):
427 badfn = match.bad
428 badfn = match.bad
428
429
429 def badtype(f, mode):
430 def badtype(f, mode):
430 kind = _('unknown')
431 kind = _('unknown')
431 if stat.S_ISCHR(mode): kind = _('character device')
432 if stat.S_ISCHR(mode): kind = _('character device')
432 elif stat.S_ISBLK(mode): kind = _('block device')
433 elif stat.S_ISBLK(mode): kind = _('block device')
433 elif stat.S_ISFIFO(mode): kind = _('fifo')
434 elif stat.S_ISFIFO(mode): kind = _('fifo')
434 elif stat.S_ISSOCK(mode): kind = _('socket')
435 elif stat.S_ISSOCK(mode): kind = _('socket')
435 elif stat.S_ISDIR(mode): kind = _('directory')
436 elif stat.S_ISDIR(mode): kind = _('directory')
436 self._ui.warn(_('%s: unsupported file type (type is %s)\n')
437 self._ui.warn(_('%s: unsupported file type (type is %s)\n')
437 % (self.pathto(f), kind))
438 % (self.pathto(f), kind))
438
439
439 ignore = self._ignore
440 ignore = self._ignore
440 dirignore = self._dirignore
441 dirignore = self._dirignore
441 if ignored:
442 if ignored:
442 ignore = util.never
443 ignore = util.never
443 dirignore = util.never
444 dirignore = util.never
444 elif not unknown:
445 elif not unknown:
445 # if unknown and ignored are False, skip step 2
446 # if unknown and ignored are False, skip step 2
446 ignore = util.always
447 ignore = util.always
447 dirignore = util.always
448 dirignore = util.always
448
449
449 matchfn = match.matchfn
450 matchfn = match.matchfn
450 dmap = self._map
451 dmap = self._map
451 normpath = util.normpath
452 normpath = util.normpath
452 normalize = self.normalize
453 normalize = self.normalize
453 listdir = osutil.listdir
454 listdir = osutil.listdir
454 lstat = os.lstat
455 lstat = os.lstat
455 getkind = stat.S_IFMT
456 getkind = stat.S_IFMT
456 dirkind = stat.S_IFDIR
457 dirkind = stat.S_IFDIR
457 regkind = stat.S_IFREG
458 regkind = stat.S_IFREG
458 lnkkind = stat.S_IFLNK
459 lnkkind = stat.S_IFLNK
459 join = self._join
460 join = self._join
460 work = []
461 work = []
461 wadd = work.append
462 wadd = work.append
462
463
463 files = set(match.files())
464 files = set(match.files())
464 if not files or '.' in files:
465 if not files or '.' in files:
465 files = ['']
466 files = ['']
466 results = {'.hg': None}
467 results = {'.hg': None}
467
468
468 # step 1: find all explicit files
469 # step 1: find all explicit files
469 for ff in sorted(files):
470 for ff in sorted(files):
470 nf = normalize(normpath(ff))
471 nf = normalize(normpath(ff))
471 if nf in results:
472 if nf in results:
472 continue
473 continue
473
474
474 try:
475 try:
475 st = lstat(join(nf))
476 st = lstat(join(nf))
476 kind = getkind(st.st_mode)
477 kind = getkind(st.st_mode)
477 if kind == dirkind:
478 if kind == dirkind:
478 if not dirignore(nf):
479 if not dirignore(nf):
479 wadd(nf)
480 wadd(nf)
480 elif kind == regkind or kind == lnkkind:
481 elif kind == regkind or kind == lnkkind:
481 results[nf] = st
482 results[nf] = st
482 else:
483 else:
483 badtype(ff, kind)
484 badtype(ff, kind)
484 if nf in dmap:
485 if nf in dmap:
485 results[nf] = None
486 results[nf] = None
486 except OSError, inst:
487 except OSError, inst:
487 keep = False
488 keep = False
488 prefix = nf + "/"
489 prefix = nf + "/"
489 for fn in dmap:
490 for fn in dmap:
490 if nf == fn or fn.startswith(prefix):
491 if nf == fn or fn.startswith(prefix):
491 keep = True
492 keep = True
492 break
493 break
493 if not keep:
494 if not keep:
494 if inst.errno != errno.ENOENT:
495 if inst.errno != errno.ENOENT:
495 fwarn(ff, inst.strerror)
496 fwarn(ff, inst.strerror)
496 elif badfn(ff, inst.strerror):
497 elif badfn(ff, inst.strerror):
497 if (nf in dmap or not ignore(nf)) and matchfn(nf):
498 if (nf in dmap or not ignore(nf)) and matchfn(nf):
498 results[nf] = None
499 results[nf] = None
499
500
500 # step 2: visit subdirectories
501 # step 2: visit subdirectories
501 while work:
502 while work:
502 nd = work.pop()
503 nd = work.pop()
503 if hasattr(match, 'dir'):
504 if hasattr(match, 'dir'):
504 match.dir(nd)
505 match.dir(nd)
505 skip = None
506 skip = None
506 if nd == '.':
507 if nd == '.':
507 nd = ''
508 nd = ''
508 else:
509 else:
509 skip = '.hg'
510 skip = '.hg'
510 try:
511 try:
511 entries = listdir(join(nd), stat=True, skip=skip)
512 entries = listdir(join(nd), stat=True, skip=skip)
512 except OSError, inst:
513 except OSError, inst:
513 if inst.errno == errno.EACCES:
514 if inst.errno == errno.EACCES:
514 fwarn(nd, inst.strerror)
515 fwarn(nd, inst.strerror)
515 continue
516 continue
516 raise
517 raise
517 for f, kind, st in entries:
518 for f, kind, st in entries:
518 nf = normalize(nd and (nd + "/" + f) or f, True)
519 nf = normalize(nd and (nd + "/" + f) or f, True)
519 if nf not in results:
520 if nf not in results:
520 if kind == dirkind:
521 if kind == dirkind:
521 if not ignore(nf):
522 if not ignore(nf):
522 wadd(nf)
523 wadd(nf)
523 if nf in dmap and matchfn(nf):
524 if nf in dmap and matchfn(nf):
524 results[nf] = None
525 results[nf] = None
525 elif kind == regkind or kind == lnkkind:
526 elif kind == regkind or kind == lnkkind:
526 if nf in dmap:
527 if nf in dmap:
527 if matchfn(nf):
528 if matchfn(nf):
528 results[nf] = st
529 results[nf] = st
529 elif matchfn(nf) and not ignore(nf):
530 elif matchfn(nf) and not ignore(nf):
530 results[nf] = st
531 results[nf] = st
531 elif nf in dmap and matchfn(nf):
532 elif nf in dmap and matchfn(nf):
532 results[nf] = None
533 results[nf] = None
533
534
534 # step 3: report unseen items in the dmap hash
535 # step 3: report unseen items in the dmap hash
535 visit = sorted([f for f in dmap if f not in results and match(f)])
536 visit = sorted([f for f in dmap if f not in results and match(f)])
536 for nf, st in zip(visit, util.statfiles([join(i) for i in visit])):
537 for nf, st in zip(visit, util.statfiles([join(i) for i in visit])):
537 if not st is None and not getkind(st.st_mode) in (regkind, lnkkind):
538 if not st is None and not getkind(st.st_mode) in (regkind, lnkkind):
538 st = None
539 st = None
539 results[nf] = st
540 results[nf] = st
540
541
541 del results['.hg']
542 del results['.hg']
542 return results
543 return results
543
544
544 def status(self, match, ignored, clean, unknown):
545 def status(self, match, ignored, clean, unknown):
545 listignored, listclean, listunknown = ignored, clean, unknown
546 listignored, listclean, listunknown = ignored, clean, unknown
546 lookup, modified, added, unknown, ignored = [], [], [], [], []
547 lookup, modified, added, unknown, ignored = [], [], [], [], []
547 removed, deleted, clean = [], [], []
548 removed, deleted, clean = [], [], []
548
549
549 dmap = self._map
550 dmap = self._map
550 ladd = lookup.append
551 ladd = lookup.append
551 madd = modified.append
552 madd = modified.append
552 aadd = added.append
553 aadd = added.append
553 uadd = unknown.append
554 uadd = unknown.append
554 iadd = ignored.append
555 iadd = ignored.append
555 radd = removed.append
556 radd = removed.append
556 dadd = deleted.append
557 dadd = deleted.append
557 cadd = clean.append
558 cadd = clean.append
558
559
559 for fn, st in self.walk(match, listunknown, listignored).iteritems():
560 for fn, st in self.walk(match, listunknown, listignored).iteritems():
560 if fn not in dmap:
561 if fn not in dmap:
561 if (listignored or match.exact(fn)) and self._dirignore(fn):
562 if (listignored or match.exact(fn)) and self._dirignore(fn):
562 if listignored:
563 if listignored:
563 iadd(fn)
564 iadd(fn)
564 elif listunknown:
565 elif listunknown:
565 uadd(fn)
566 uadd(fn)
566 continue
567 continue
567
568
568 state, mode, size, time = dmap[fn]
569 state, mode, size, time = dmap[fn]
569
570
570 if not st and state in "nma":
571 if not st and state in "nma":
571 dadd(fn)
572 dadd(fn)
572 elif state == 'n':
573 elif state == 'n':
573 if (size >= 0 and
574 if (size >= 0 and
574 (size != st.st_size
575 (size != st.st_size
575 or ((mode ^ st.st_mode) & 0100 and self._checkexec))
576 or ((mode ^ st.st_mode) & 0100 and self._checkexec))
576 or size == -2
577 or size == -2
577 or fn in self._copymap):
578 or fn in self._copymap):
578 madd(fn)
579 madd(fn)
579 elif time != int(st.st_mtime):
580 elif time != int(st.st_mtime):
580 ladd(fn)
581 ladd(fn)
581 elif listclean:
582 elif listclean:
582 cadd(fn)
583 cadd(fn)
583 elif state == 'm':
584 elif state == 'm':
584 madd(fn)
585 madd(fn)
585 elif state == 'a':
586 elif state == 'a':
586 aadd(fn)
587 aadd(fn)
587 elif state == 'r':
588 elif state == 'r':
588 radd(fn)
589 radd(fn)
589
590
590 return (lookup, modified, added, removed, deleted, unknown, ignored,
591 return (lookup, modified, added, removed, deleted, unknown, ignored,
591 clean)
592 clean)
@@ -1,74 +1,75 b''
1 # encoding.py - character transcoding support for Mercurial
1 # encoding.py - character transcoding support for Mercurial
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 import sys, unicodedata, locale, os, error
8 import error
9 import sys, unicodedata, locale, os
9
10
10 _encodingfixup = {'646': 'ascii', 'ANSI_X3.4-1968': 'ascii'}
11 _encodingfixup = {'646': 'ascii', 'ANSI_X3.4-1968': 'ascii'}
11
12
12 try:
13 try:
13 encoding = os.environ.get("HGENCODING")
14 encoding = os.environ.get("HGENCODING")
14 if sys.platform == 'darwin' and not encoding:
15 if sys.platform == 'darwin' and not encoding:
15 # On darwin, getpreferredencoding ignores the locale environment and
16 # On darwin, getpreferredencoding ignores the locale environment and
16 # always returns mac-roman. We override this if the environment is
17 # always returns mac-roman. We override this if the environment is
17 # not C (has been customized by the user).
18 # not C (has been customized by the user).
18 locale.setlocale(locale.LC_CTYPE, '')
19 locale.setlocale(locale.LC_CTYPE, '')
19 encoding = locale.getlocale()[1]
20 encoding = locale.getlocale()[1]
20 if not encoding:
21 if not encoding:
21 encoding = locale.getpreferredencoding() or 'ascii'
22 encoding = locale.getpreferredencoding() or 'ascii'
22 encoding = _encodingfixup.get(encoding, encoding)
23 encoding = _encodingfixup.get(encoding, encoding)
23 except locale.Error:
24 except locale.Error:
24 encoding = 'ascii'
25 encoding = 'ascii'
25 encodingmode = os.environ.get("HGENCODINGMODE", "strict")
26 encodingmode = os.environ.get("HGENCODINGMODE", "strict")
26 fallbackencoding = 'ISO-8859-1'
27 fallbackencoding = 'ISO-8859-1'
27
28
28 def tolocal(s):
29 def tolocal(s):
29 """
30 """
30 Convert a string from internal UTF-8 to local encoding
31 Convert a string from internal UTF-8 to local encoding
31
32
32 All internal strings should be UTF-8 but some repos before the
33 All internal strings should be UTF-8 but some repos before the
33 implementation of locale support may contain latin1 or possibly
34 implementation of locale support may contain latin1 or possibly
34 other character sets. We attempt to decode everything strictly
35 other character sets. We attempt to decode everything strictly
35 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
36 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
36 replace unknown characters.
37 replace unknown characters.
37 """
38 """
38 for e in ('UTF-8', fallbackencoding):
39 for e in ('UTF-8', fallbackencoding):
39 try:
40 try:
40 u = s.decode(e) # attempt strict decoding
41 u = s.decode(e) # attempt strict decoding
41 return u.encode(encoding, "replace")
42 return u.encode(encoding, "replace")
42 except LookupError, k:
43 except LookupError, k:
43 raise error.Abort("%s, please check your locale settings" % k)
44 raise error.Abort("%s, please check your locale settings" % k)
44 except UnicodeDecodeError:
45 except UnicodeDecodeError:
45 pass
46 pass
46 u = s.decode("utf-8", "replace") # last ditch
47 u = s.decode("utf-8", "replace") # last ditch
47 return u.encode(encoding, "replace")
48 return u.encode(encoding, "replace")
48
49
49 def fromlocal(s):
50 def fromlocal(s):
50 """
51 """
51 Convert a string from the local character encoding to UTF-8
52 Convert a string from the local character encoding to UTF-8
52
53
53 We attempt to decode strings using the encoding mode set by
54 We attempt to decode strings using the encoding mode set by
54 HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
55 HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
55 characters will cause an error message. Other modes include
56 characters will cause an error message. Other modes include
56 'replace', which replaces unknown characters with a special
57 'replace', which replaces unknown characters with a special
57 Unicode character, and 'ignore', which drops the character.
58 Unicode character, and 'ignore', which drops the character.
58 """
59 """
59 try:
60 try:
60 return s.decode(encoding, encodingmode).encode("utf-8")
61 return s.decode(encoding, encodingmode).encode("utf-8")
61 except UnicodeDecodeError, inst:
62 except UnicodeDecodeError, inst:
62 sub = s[max(0, inst.start-10):inst.start+10]
63 sub = s[max(0, inst.start-10):inst.start+10]
63 raise error.Abort("decoding near '%s': %s!" % (sub, inst))
64 raise error.Abort("decoding near '%s': %s!" % (sub, inst))
64 except LookupError, k:
65 except LookupError, k:
65 raise error.Abort("%s, please check your locale settings" % k)
66 raise error.Abort("%s, please check your locale settings" % k)
66
67
67 def colwidth(s):
68 def colwidth(s):
68 "Find the column width of a UTF-8 string for display"
69 "Find the column width of a UTF-8 string for display"
69 d = s.decode(encoding, 'replace')
70 d = s.decode(encoding, 'replace')
70 if hasattr(unicodedata, 'east_asian_width'):
71 if hasattr(unicodedata, 'east_asian_width'):
71 w = unicodedata.east_asian_width
72 w = unicodedata.east_asian_width
72 return sum([w(c) in 'WF' and 2 or 1 for c in d])
73 return sum([w(c) in 'WF' and 2 or 1 for c in d])
73 return len(d)
74 return len(d)
74
75
@@ -1,221 +1,222 b''
1 # filemerge.py - file-level merge handling for Mercurial
1 # filemerge.py - file-level merge handling for Mercurial
2 #
2 #
3 # Copyright 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from node import short
8 from node import short
9 from i18n import _
9 from i18n import _
10 import util, os, tempfile, simplemerge, re, filecmp
10 import util, simplemerge
11 import os, tempfile, re, filecmp
11
12
12 def _toolstr(ui, tool, part, default=""):
13 def _toolstr(ui, tool, part, default=""):
13 return ui.config("merge-tools", tool + "." + part, default)
14 return ui.config("merge-tools", tool + "." + part, default)
14
15
15 def _toolbool(ui, tool, part, default=False):
16 def _toolbool(ui, tool, part, default=False):
16 return ui.configbool("merge-tools", tool + "." + part, default)
17 return ui.configbool("merge-tools", tool + "." + part, default)
17
18
18 def _findtool(ui, tool):
19 def _findtool(ui, tool):
19 if tool in ("internal:fail", "internal:local", "internal:other"):
20 if tool in ("internal:fail", "internal:local", "internal:other"):
20 return tool
21 return tool
21 k = _toolstr(ui, tool, "regkey")
22 k = _toolstr(ui, tool, "regkey")
22 if k:
23 if k:
23 p = util.lookup_reg(k, _toolstr(ui, tool, "regname"))
24 p = util.lookup_reg(k, _toolstr(ui, tool, "regname"))
24 if p:
25 if p:
25 p = util.find_exe(p + _toolstr(ui, tool, "regappend"))
26 p = util.find_exe(p + _toolstr(ui, tool, "regappend"))
26 if p:
27 if p:
27 return p
28 return p
28 return util.find_exe(_toolstr(ui, tool, "executable", tool))
29 return util.find_exe(_toolstr(ui, tool, "executable", tool))
29
30
30 def _picktool(repo, ui, path, binary, symlink):
31 def _picktool(repo, ui, path, binary, symlink):
31 def check(tool, pat, symlink, binary):
32 def check(tool, pat, symlink, binary):
32 tmsg = tool
33 tmsg = tool
33 if pat:
34 if pat:
34 tmsg += " specified for " + pat
35 tmsg += " specified for " + pat
35 if not _findtool(ui, tool):
36 if not _findtool(ui, tool):
36 if pat: # explicitly requested tool deserves a warning
37 if pat: # explicitly requested tool deserves a warning
37 ui.warn(_("couldn't find merge tool %s\n") % tmsg)
38 ui.warn(_("couldn't find merge tool %s\n") % tmsg)
38 else: # configured but non-existing tools are more silent
39 else: # configured but non-existing tools are more silent
39 ui.note(_("couldn't find merge tool %s\n") % tmsg)
40 ui.note(_("couldn't find merge tool %s\n") % tmsg)
40 elif symlink and not _toolbool(ui, tool, "symlink"):
41 elif symlink and not _toolbool(ui, tool, "symlink"):
41 ui.warn(_("tool %s can't handle symlinks\n") % tmsg)
42 ui.warn(_("tool %s can't handle symlinks\n") % tmsg)
42 elif binary and not _toolbool(ui, tool, "binary"):
43 elif binary and not _toolbool(ui, tool, "binary"):
43 ui.warn(_("tool %s can't handle binary\n") % tmsg)
44 ui.warn(_("tool %s can't handle binary\n") % tmsg)
44 elif not util.gui() and _toolbool(ui, tool, "gui"):
45 elif not util.gui() and _toolbool(ui, tool, "gui"):
45 ui.warn(_("tool %s requires a GUI\n") % tmsg)
46 ui.warn(_("tool %s requires a GUI\n") % tmsg)
46 else:
47 else:
47 return True
48 return True
48 return False
49 return False
49
50
50 # HGMERGE takes precedence
51 # HGMERGE takes precedence
51 hgmerge = os.environ.get("HGMERGE")
52 hgmerge = os.environ.get("HGMERGE")
52 if hgmerge:
53 if hgmerge:
53 return (hgmerge, hgmerge)
54 return (hgmerge, hgmerge)
54
55
55 # then patterns
56 # then patterns
56 for pat, tool in ui.configitems("merge-patterns"):
57 for pat, tool in ui.configitems("merge-patterns"):
57 mf = util.matcher(repo.root, "", [pat], [], [])[1]
58 mf = util.matcher(repo.root, "", [pat], [], [])[1]
58 if mf(path) and check(tool, pat, symlink, False):
59 if mf(path) and check(tool, pat, symlink, False):
59 toolpath = _findtool(ui, tool)
60 toolpath = _findtool(ui, tool)
60 return (tool, '"' + toolpath + '"')
61 return (tool, '"' + toolpath + '"')
61
62
62 # then merge tools
63 # then merge tools
63 tools = {}
64 tools = {}
64 for k,v in ui.configitems("merge-tools"):
65 for k,v in ui.configitems("merge-tools"):
65 t = k.split('.')[0]
66 t = k.split('.')[0]
66 if t not in tools:
67 if t not in tools:
67 tools[t] = int(_toolstr(ui, t, "priority", "0"))
68 tools[t] = int(_toolstr(ui, t, "priority", "0"))
68 names = tools.keys()
69 names = tools.keys()
69 tools = sorted([(-p,t) for t,p in tools.items()])
70 tools = sorted([(-p,t) for t,p in tools.items()])
70 uimerge = ui.config("ui", "merge")
71 uimerge = ui.config("ui", "merge")
71 if uimerge:
72 if uimerge:
72 if uimerge not in names:
73 if uimerge not in names:
73 return (uimerge, uimerge)
74 return (uimerge, uimerge)
74 tools.insert(0, (None, uimerge)) # highest priority
75 tools.insert(0, (None, uimerge)) # highest priority
75 tools.append((None, "hgmerge")) # the old default, if found
76 tools.append((None, "hgmerge")) # the old default, if found
76 for p,t in tools:
77 for p,t in tools:
77 if check(t, None, symlink, binary):
78 if check(t, None, symlink, binary):
78 toolpath = _findtool(ui, t)
79 toolpath = _findtool(ui, t)
79 return (t, '"' + toolpath + '"')
80 return (t, '"' + toolpath + '"')
80 # internal merge as last resort
81 # internal merge as last resort
81 return (not (symlink or binary) and "internal:merge" or None, None)
82 return (not (symlink or binary) and "internal:merge" or None, None)
82
83
83 def _eoltype(data):
84 def _eoltype(data):
84 "Guess the EOL type of a file"
85 "Guess the EOL type of a file"
85 if '\0' in data: # binary
86 if '\0' in data: # binary
86 return None
87 return None
87 if '\r\n' in data: # Windows
88 if '\r\n' in data: # Windows
88 return '\r\n'
89 return '\r\n'
89 if '\r' in data: # Old Mac
90 if '\r' in data: # Old Mac
90 return '\r'
91 return '\r'
91 if '\n' in data: # UNIX
92 if '\n' in data: # UNIX
92 return '\n'
93 return '\n'
93 return None # unknown
94 return None # unknown
94
95
95 def _matcheol(file, origfile):
96 def _matcheol(file, origfile):
96 "Convert EOL markers in a file to match origfile"
97 "Convert EOL markers in a file to match origfile"
97 tostyle = _eoltype(open(origfile, "rb").read())
98 tostyle = _eoltype(open(origfile, "rb").read())
98 if tostyle:
99 if tostyle:
99 data = open(file, "rb").read()
100 data = open(file, "rb").read()
100 style = _eoltype(data)
101 style = _eoltype(data)
101 if style:
102 if style:
102 newdata = data.replace(style, tostyle)
103 newdata = data.replace(style, tostyle)
103 if newdata != data:
104 if newdata != data:
104 open(file, "wb").write(newdata)
105 open(file, "wb").write(newdata)
105
106
106 def filemerge(repo, mynode, orig, fcd, fco, fca):
107 def filemerge(repo, mynode, orig, fcd, fco, fca):
107 """perform a 3-way merge in the working directory
108 """perform a 3-way merge in the working directory
108
109
109 mynode = parent node before merge
110 mynode = parent node before merge
110 orig = original local filename before merge
111 orig = original local filename before merge
111 fco = other file context
112 fco = other file context
112 fca = ancestor file context
113 fca = ancestor file context
113 fcd = local file context for current/destination file
114 fcd = local file context for current/destination file
114 """
115 """
115
116
116 def temp(prefix, ctx):
117 def temp(prefix, ctx):
117 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
118 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
118 (fd, name) = tempfile.mkstemp(prefix=pre)
119 (fd, name) = tempfile.mkstemp(prefix=pre)
119 data = repo.wwritedata(ctx.path(), ctx.data())
120 data = repo.wwritedata(ctx.path(), ctx.data())
120 f = os.fdopen(fd, "wb")
121 f = os.fdopen(fd, "wb")
121 f.write(data)
122 f.write(data)
122 f.close()
123 f.close()
123 return name
124 return name
124
125
125 def isbin(ctx):
126 def isbin(ctx):
126 try:
127 try:
127 return util.binary(ctx.data())
128 return util.binary(ctx.data())
128 except IOError:
129 except IOError:
129 return False
130 return False
130
131
131 if not fco.cmp(fcd.data()): # files identical?
132 if not fco.cmp(fcd.data()): # files identical?
132 return None
133 return None
133
134
134 ui = repo.ui
135 ui = repo.ui
135 fd = fcd.path()
136 fd = fcd.path()
136 binary = isbin(fcd) or isbin(fco) or isbin(fca)
137 binary = isbin(fcd) or isbin(fco) or isbin(fca)
137 symlink = 'l' in fcd.flags() + fco.flags()
138 symlink = 'l' in fcd.flags() + fco.flags()
138 tool, toolpath = _picktool(repo, ui, fd, binary, symlink)
139 tool, toolpath = _picktool(repo, ui, fd, binary, symlink)
139 ui.debug(_("picked tool '%s' for %s (binary %s symlink %s)\n") %
140 ui.debug(_("picked tool '%s' for %s (binary %s symlink %s)\n") %
140 (tool, fd, binary, symlink))
141 (tool, fd, binary, symlink))
141
142
142 if not tool:
143 if not tool:
143 tool = "internal:local"
144 tool = "internal:local"
144 if ui.prompt(_(" no tool found to merge %s\n"
145 if ui.prompt(_(" no tool found to merge %s\n"
145 "keep (l)ocal or take (o)ther?") % fd,
146 "keep (l)ocal or take (o)ther?") % fd,
146 (_("&Local"), _("&Other")), _("l")) != _("l"):
147 (_("&Local"), _("&Other")), _("l")) != _("l"):
147 tool = "internal:other"
148 tool = "internal:other"
148 if tool == "internal:local":
149 if tool == "internal:local":
149 return 0
150 return 0
150 if tool == "internal:other":
151 if tool == "internal:other":
151 repo.wwrite(fd, fco.data(), fco.flags())
152 repo.wwrite(fd, fco.data(), fco.flags())
152 return 0
153 return 0
153 if tool == "internal:fail":
154 if tool == "internal:fail":
154 return 1
155 return 1
155
156
156 # do the actual merge
157 # do the actual merge
157 a = repo.wjoin(fd)
158 a = repo.wjoin(fd)
158 b = temp("base", fca)
159 b = temp("base", fca)
159 c = temp("other", fco)
160 c = temp("other", fco)
160 out = ""
161 out = ""
161 back = a + ".orig"
162 back = a + ".orig"
162 util.copyfile(a, back)
163 util.copyfile(a, back)
163
164
164 if orig != fco.path():
165 if orig != fco.path():
165 repo.ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd))
166 repo.ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd))
166 else:
167 else:
167 repo.ui.status(_("merging %s\n") % fd)
168 repo.ui.status(_("merging %s\n") % fd)
168
169
169 repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcd, fco, fca))
170 repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcd, fco, fca))
170
171
171 # do we attempt to simplemerge first?
172 # do we attempt to simplemerge first?
172 if _toolbool(ui, tool, "premerge", not (binary or symlink)):
173 if _toolbool(ui, tool, "premerge", not (binary or symlink)):
173 r = simplemerge.simplemerge(ui, a, b, c, quiet=True)
174 r = simplemerge.simplemerge(ui, a, b, c, quiet=True)
174 if not r:
175 if not r:
175 ui.debug(_(" premerge successful\n"))
176 ui.debug(_(" premerge successful\n"))
176 os.unlink(back)
177 os.unlink(back)
177 os.unlink(b)
178 os.unlink(b)
178 os.unlink(c)
179 os.unlink(c)
179 return 0
180 return 0
180 util.copyfile(back, a) # restore from backup and try again
181 util.copyfile(back, a) # restore from backup and try again
181
182
182 env = dict(HG_FILE=fd,
183 env = dict(HG_FILE=fd,
183 HG_MY_NODE=short(mynode),
184 HG_MY_NODE=short(mynode),
184 HG_OTHER_NODE=str(fco.changectx()),
185 HG_OTHER_NODE=str(fco.changectx()),
185 HG_MY_ISLINK='l' in fcd.flags(),
186 HG_MY_ISLINK='l' in fcd.flags(),
186 HG_OTHER_ISLINK='l' in fco.flags(),
187 HG_OTHER_ISLINK='l' in fco.flags(),
187 HG_BASE_ISLINK='l' in fca.flags())
188 HG_BASE_ISLINK='l' in fca.flags())
188
189
189 if tool == "internal:merge":
190 if tool == "internal:merge":
190 r = simplemerge.simplemerge(ui, a, b, c, label=['local', 'other'])
191 r = simplemerge.simplemerge(ui, a, b, c, label=['local', 'other'])
191 else:
192 else:
192 args = _toolstr(ui, tool, "args", '$local $base $other')
193 args = _toolstr(ui, tool, "args", '$local $base $other')
193 if "$output" in args:
194 if "$output" in args:
194 out, a = a, back # read input from backup, write to original
195 out, a = a, back # read input from backup, write to original
195 replace = dict(local=a, base=b, other=c, output=out)
196 replace = dict(local=a, base=b, other=c, output=out)
196 args = re.sub("\$(local|base|other|output)",
197 args = re.sub("\$(local|base|other|output)",
197 lambda x: '"%s"' % replace[x.group()[1:]], args)
198 lambda x: '"%s"' % replace[x.group()[1:]], args)
198 r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env)
199 r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env)
199
200
200 if not r and _toolbool(ui, tool, "checkconflicts"):
201 if not r and _toolbool(ui, tool, "checkconflicts"):
201 if re.match("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcd.data()):
202 if re.match("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcd.data()):
202 r = 1
203 r = 1
203
204
204 if not r and _toolbool(ui, tool, "checkchanged"):
205 if not r and _toolbool(ui, tool, "checkchanged"):
205 if filecmp.cmp(repo.wjoin(fd), back):
206 if filecmp.cmp(repo.wjoin(fd), back):
206 if ui.prompt(_(" output file %s appears unchanged\n"
207 if ui.prompt(_(" output file %s appears unchanged\n"
207 "was merge successful (yn)?") % fd,
208 "was merge successful (yn)?") % fd,
208 (_("&Yes"), _("&No")), _("n")) != _("y"):
209 (_("&Yes"), _("&No")), _("n")) != _("y"):
209 r = 1
210 r = 1
210
211
211 if _toolbool(ui, tool, "fixeol"):
212 if _toolbool(ui, tool, "fixeol"):
212 _matcheol(repo.wjoin(fd), back)
213 _matcheol(repo.wjoin(fd), back)
213
214
214 if r:
215 if r:
215 repo.ui.warn(_("merging %s failed!\n") % fd)
216 repo.ui.warn(_("merging %s failed!\n") % fd)
216 else:
217 else:
217 os.unlink(back)
218 os.unlink(back)
218
219
219 os.unlink(b)
220 os.unlink(b)
220 os.unlink(c)
221 os.unlink(c)
221 return r
222 return r
@@ -1,298 +1,299 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2, incorporated herein by reference.
7 # GNU General Public License version 2, incorporated herein by reference.
8
8
9 from i18n import _
9 from i18n import _
10 from lock import release
10 from lock import release
11 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
11 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
12 import errno, lock, os, shutil, util, extensions, error
12 import lock, util, extensions, error
13 import merge as _merge
13 import merge as _merge
14 import verify as _verify
14 import verify as _verify
15 import errno, os, shutil
15
16
16 def _local(path):
17 def _local(path):
17 return (os.path.isfile(util.drop_scheme('file', path)) and
18 return (os.path.isfile(util.drop_scheme('file', path)) and
18 bundlerepo or localrepo)
19 bundlerepo or localrepo)
19
20
20 def parseurl(url, revs=[]):
21 def parseurl(url, revs=[]):
21 '''parse url#branch, returning url, branch + revs'''
22 '''parse url#branch, returning url, branch + revs'''
22
23
23 if '#' not in url:
24 if '#' not in url:
24 return url, (revs or None), revs and revs[-1] or None
25 return url, (revs or None), revs and revs[-1] or None
25
26
26 url, branch = url.split('#', 1)
27 url, branch = url.split('#', 1)
27 checkout = revs and revs[-1] or branch
28 checkout = revs and revs[-1] or branch
28 return url, (revs or []) + [branch], checkout
29 return url, (revs or []) + [branch], checkout
29
30
30 schemes = {
31 schemes = {
31 'bundle': bundlerepo,
32 'bundle': bundlerepo,
32 'file': _local,
33 'file': _local,
33 'http': httprepo,
34 'http': httprepo,
34 'https': httprepo,
35 'https': httprepo,
35 'ssh': sshrepo,
36 'ssh': sshrepo,
36 'static-http': statichttprepo,
37 'static-http': statichttprepo,
37 }
38 }
38
39
39 def _lookup(path):
40 def _lookup(path):
40 scheme = 'file'
41 scheme = 'file'
41 if path:
42 if path:
42 c = path.find(':')
43 c = path.find(':')
43 if c > 0:
44 if c > 0:
44 scheme = path[:c]
45 scheme = path[:c]
45 thing = schemes.get(scheme) or schemes['file']
46 thing = schemes.get(scheme) or schemes['file']
46 try:
47 try:
47 return thing(path)
48 return thing(path)
48 except TypeError:
49 except TypeError:
49 return thing
50 return thing
50
51
51 def islocal(repo):
52 def islocal(repo):
52 '''return true if repo or path is local'''
53 '''return true if repo or path is local'''
53 if isinstance(repo, str):
54 if isinstance(repo, str):
54 try:
55 try:
55 return _lookup(repo).islocal(repo)
56 return _lookup(repo).islocal(repo)
56 except AttributeError:
57 except AttributeError:
57 return False
58 return False
58 return repo.local()
59 return repo.local()
59
60
60 def repository(ui, path='', create=False):
61 def repository(ui, path='', create=False):
61 """return a repository object for the specified path"""
62 """return a repository object for the specified path"""
62 repo = _lookup(path).instance(ui, path, create)
63 repo = _lookup(path).instance(ui, path, create)
63 ui = getattr(repo, "ui", ui)
64 ui = getattr(repo, "ui", ui)
64 for name, module in extensions.extensions():
65 for name, module in extensions.extensions():
65 hook = getattr(module, 'reposetup', None)
66 hook = getattr(module, 'reposetup', None)
66 if hook:
67 if hook:
67 hook(ui, repo)
68 hook(ui, repo)
68 return repo
69 return repo
69
70
70 def defaultdest(source):
71 def defaultdest(source):
71 '''return default destination of clone if none is given'''
72 '''return default destination of clone if none is given'''
72 return os.path.basename(os.path.normpath(source))
73 return os.path.basename(os.path.normpath(source))
73
74
74 def localpath(path):
75 def localpath(path):
75 if path.startswith('file://localhost/'):
76 if path.startswith('file://localhost/'):
76 return path[16:]
77 return path[16:]
77 if path.startswith('file://'):
78 if path.startswith('file://'):
78 return path[7:]
79 return path[7:]
79 if path.startswith('file:'):
80 if path.startswith('file:'):
80 return path[5:]
81 return path[5:]
81 return path
82 return path
82
83
83 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
84 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
84 stream=False):
85 stream=False):
85 """Make a copy of an existing repository.
86 """Make a copy of an existing repository.
86
87
87 Create a copy of an existing repository in a new directory. The
88 Create a copy of an existing repository in a new directory. The
88 source and destination are URLs, as passed to the repository
89 source and destination are URLs, as passed to the repository
89 function. Returns a pair of repository objects, the source and
90 function. Returns a pair of repository objects, the source and
90 newly created destination.
91 newly created destination.
91
92
92 The location of the source is added to the new repository's
93 The location of the source is added to the new repository's
93 .hg/hgrc file, as the default to be used for future pulls and
94 .hg/hgrc file, as the default to be used for future pulls and
94 pushes.
95 pushes.
95
96
96 If an exception is raised, the partly cloned/updated destination
97 If an exception is raised, the partly cloned/updated destination
97 repository will be deleted.
98 repository will be deleted.
98
99
99 Arguments:
100 Arguments:
100
101
101 source: repository object or URL
102 source: repository object or URL
102
103
103 dest: URL of destination repository to create (defaults to base
104 dest: URL of destination repository to create (defaults to base
104 name of source repository)
105 name of source repository)
105
106
106 pull: always pull from source repository, even in local case
107 pull: always pull from source repository, even in local case
107
108
108 stream: stream raw data uncompressed from repository (fast over
109 stream: stream raw data uncompressed from repository (fast over
109 LAN, slow over WAN)
110 LAN, slow over WAN)
110
111
111 rev: revision to clone up to (implies pull=True)
112 rev: revision to clone up to (implies pull=True)
112
113
113 update: update working directory after clone completes, if
114 update: update working directory after clone completes, if
114 destination is local repository (True means update to default rev,
115 destination is local repository (True means update to default rev,
115 anything else is treated as a revision)
116 anything else is treated as a revision)
116 """
117 """
117
118
118 if isinstance(source, str):
119 if isinstance(source, str):
119 origsource = ui.expandpath(source)
120 origsource = ui.expandpath(source)
120 source, rev, checkout = parseurl(origsource, rev)
121 source, rev, checkout = parseurl(origsource, rev)
121 src_repo = repository(ui, source)
122 src_repo = repository(ui, source)
122 else:
123 else:
123 src_repo = source
124 src_repo = source
124 origsource = source = src_repo.url()
125 origsource = source = src_repo.url()
125 checkout = rev and rev[-1] or None
126 checkout = rev and rev[-1] or None
126
127
127 if dest is None:
128 if dest is None:
128 dest = defaultdest(source)
129 dest = defaultdest(source)
129 ui.status(_("destination directory: %s\n") % dest)
130 ui.status(_("destination directory: %s\n") % dest)
130
131
131 dest = localpath(dest)
132 dest = localpath(dest)
132 source = localpath(source)
133 source = localpath(source)
133
134
134 if os.path.exists(dest):
135 if os.path.exists(dest):
135 if not os.path.isdir(dest):
136 if not os.path.isdir(dest):
136 raise util.Abort(_("destination '%s' already exists") % dest)
137 raise util.Abort(_("destination '%s' already exists") % dest)
137 elif os.listdir(dest):
138 elif os.listdir(dest):
138 raise util.Abort(_("destination '%s' is not empty") % dest)
139 raise util.Abort(_("destination '%s' is not empty") % dest)
139
140
140 class DirCleanup(object):
141 class DirCleanup(object):
141 def __init__(self, dir_):
142 def __init__(self, dir_):
142 self.rmtree = shutil.rmtree
143 self.rmtree = shutil.rmtree
143 self.dir_ = dir_
144 self.dir_ = dir_
144 def close(self):
145 def close(self):
145 self.dir_ = None
146 self.dir_ = None
146 def cleanup(self):
147 def cleanup(self):
147 if self.dir_:
148 if self.dir_:
148 self.rmtree(self.dir_, True)
149 self.rmtree(self.dir_, True)
149
150
150 src_lock = dest_lock = dir_cleanup = None
151 src_lock = dest_lock = dir_cleanup = None
151 try:
152 try:
152 if islocal(dest):
153 if islocal(dest):
153 dir_cleanup = DirCleanup(dest)
154 dir_cleanup = DirCleanup(dest)
154
155
155 abspath = origsource
156 abspath = origsource
156 copy = False
157 copy = False
157 if src_repo.cancopy() and islocal(dest):
158 if src_repo.cancopy() and islocal(dest):
158 abspath = os.path.abspath(util.drop_scheme('file', origsource))
159 abspath = os.path.abspath(util.drop_scheme('file', origsource))
159 copy = not pull and not rev
160 copy = not pull and not rev
160
161
161 if copy:
162 if copy:
162 try:
163 try:
163 # we use a lock here because if we race with commit, we
164 # we use a lock here because if we race with commit, we
164 # can end up with extra data in the cloned revlogs that's
165 # can end up with extra data in the cloned revlogs that's
165 # not pointed to by changesets, thus causing verify to
166 # not pointed to by changesets, thus causing verify to
166 # fail
167 # fail
167 src_lock = src_repo.lock()
168 src_lock = src_repo.lock()
168 except error.LockError:
169 except error.LockError:
169 copy = False
170 copy = False
170
171
171 if copy:
172 if copy:
172 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
173 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
173 if not os.path.exists(dest):
174 if not os.path.exists(dest):
174 os.mkdir(dest)
175 os.mkdir(dest)
175 else:
176 else:
176 # only clean up directories we create ourselves
177 # only clean up directories we create ourselves
177 dir_cleanup.dir_ = hgdir
178 dir_cleanup.dir_ = hgdir
178 try:
179 try:
179 dest_path = hgdir
180 dest_path = hgdir
180 os.mkdir(dest_path)
181 os.mkdir(dest_path)
181 except OSError, inst:
182 except OSError, inst:
182 if inst.errno == errno.EEXIST:
183 if inst.errno == errno.EEXIST:
183 dir_cleanup.close()
184 dir_cleanup.close()
184 raise util.Abort(_("destination '%s' already exists")
185 raise util.Abort(_("destination '%s' already exists")
185 % dest)
186 % dest)
186 raise
187 raise
187
188
188 for f in src_repo.store.copylist():
189 for f in src_repo.store.copylist():
189 src = os.path.join(src_repo.path, f)
190 src = os.path.join(src_repo.path, f)
190 dst = os.path.join(dest_path, f)
191 dst = os.path.join(dest_path, f)
191 dstbase = os.path.dirname(dst)
192 dstbase = os.path.dirname(dst)
192 if dstbase and not os.path.exists(dstbase):
193 if dstbase and not os.path.exists(dstbase):
193 os.mkdir(dstbase)
194 os.mkdir(dstbase)
194 if os.path.exists(src):
195 if os.path.exists(src):
195 if dst.endswith('data'):
196 if dst.endswith('data'):
196 # lock to avoid premature writing to the target
197 # lock to avoid premature writing to the target
197 dest_lock = lock.lock(os.path.join(dstbase, "lock"))
198 dest_lock = lock.lock(os.path.join(dstbase, "lock"))
198 util.copyfiles(src, dst)
199 util.copyfiles(src, dst)
199
200
200 # we need to re-init the repo after manually copying the data
201 # we need to re-init the repo after manually copying the data
201 # into it
202 # into it
202 dest_repo = repository(ui, dest)
203 dest_repo = repository(ui, dest)
203
204
204 else:
205 else:
205 try:
206 try:
206 dest_repo = repository(ui, dest, create=True)
207 dest_repo = repository(ui, dest, create=True)
207 except OSError, inst:
208 except OSError, inst:
208 if inst.errno == errno.EEXIST:
209 if inst.errno == errno.EEXIST:
209 dir_cleanup.close()
210 dir_cleanup.close()
210 raise util.Abort(_("destination '%s' already exists")
211 raise util.Abort(_("destination '%s' already exists")
211 % dest)
212 % dest)
212 raise
213 raise
213
214
214 revs = None
215 revs = None
215 if rev:
216 if rev:
216 if 'lookup' not in src_repo.capabilities:
217 if 'lookup' not in src_repo.capabilities:
217 raise util.Abort(_("src repository does not support revision "
218 raise util.Abort(_("src repository does not support revision "
218 "lookup and so doesn't support clone by "
219 "lookup and so doesn't support clone by "
219 "revision"))
220 "revision"))
220 revs = [src_repo.lookup(r) for r in rev]
221 revs = [src_repo.lookup(r) for r in rev]
221
222
222 if dest_repo.local():
223 if dest_repo.local():
223 dest_repo.clone(src_repo, heads=revs, stream=stream)
224 dest_repo.clone(src_repo, heads=revs, stream=stream)
224 elif src_repo.local():
225 elif src_repo.local():
225 src_repo.push(dest_repo, revs=revs)
226 src_repo.push(dest_repo, revs=revs)
226 else:
227 else:
227 raise util.Abort(_("clone from remote to remote not supported"))
228 raise util.Abort(_("clone from remote to remote not supported"))
228
229
229 if dir_cleanup:
230 if dir_cleanup:
230 dir_cleanup.close()
231 dir_cleanup.close()
231
232
232 if dest_repo.local():
233 if dest_repo.local():
233 fp = dest_repo.opener("hgrc", "w", text=True)
234 fp = dest_repo.opener("hgrc", "w", text=True)
234 fp.write("[paths]\n")
235 fp.write("[paths]\n")
235 fp.write("default = %s\n" % abspath)
236 fp.write("default = %s\n" % abspath)
236 fp.close()
237 fp.close()
237
238
238 if update:
239 if update:
239 dest_repo.ui.status(_("updating working directory\n"))
240 dest_repo.ui.status(_("updating working directory\n"))
240 if update is not True:
241 if update is not True:
241 checkout = update
242 checkout = update
242 for test in (checkout, 'default', 'tip'):
243 for test in (checkout, 'default', 'tip'):
243 try:
244 try:
244 uprev = dest_repo.lookup(test)
245 uprev = dest_repo.lookup(test)
245 break
246 break
246 except:
247 except:
247 continue
248 continue
248 _update(dest_repo, uprev)
249 _update(dest_repo, uprev)
249
250
250 return src_repo, dest_repo
251 return src_repo, dest_repo
251 finally:
252 finally:
252 release(src_lock, dest_lock)
253 release(src_lock, dest_lock)
253 if dir_cleanup is not None:
254 if dir_cleanup is not None:
254 dir_cleanup.cleanup()
255 dir_cleanup.cleanup()
255
256
256 def _showstats(repo, stats):
257 def _showstats(repo, stats):
257 stats = ((stats[0], _("updated")),
258 stats = ((stats[0], _("updated")),
258 (stats[1], _("merged")),
259 (stats[1], _("merged")),
259 (stats[2], _("removed")),
260 (stats[2], _("removed")),
260 (stats[3], _("unresolved")))
261 (stats[3], _("unresolved")))
261 note = ", ".join([_("%d files %s") % s for s in stats])
262 note = ", ".join([_("%d files %s") % s for s in stats])
262 repo.ui.status("%s\n" % note)
263 repo.ui.status("%s\n" % note)
263
264
264 def update(repo, node):
265 def update(repo, node):
265 """update the working directory to node, merging linear changes"""
266 """update the working directory to node, merging linear changes"""
266 stats = _merge.update(repo, node, False, False, None)
267 stats = _merge.update(repo, node, False, False, None)
267 _showstats(repo, stats)
268 _showstats(repo, stats)
268 if stats[3]:
269 if stats[3]:
269 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
270 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
270 return stats[3] > 0
271 return stats[3] > 0
271
272
272 # naming conflict in clone()
273 # naming conflict in clone()
273 _update = update
274 _update = update
274
275
275 def clean(repo, node, show_stats=True):
276 def clean(repo, node, show_stats=True):
276 """forcibly switch the working directory to node, clobbering changes"""
277 """forcibly switch the working directory to node, clobbering changes"""
277 stats = _merge.update(repo, node, False, True, None)
278 stats = _merge.update(repo, node, False, True, None)
278 if show_stats: _showstats(repo, stats)
279 if show_stats: _showstats(repo, stats)
279 return stats[3] > 0
280 return stats[3] > 0
280
281
281 def merge(repo, node, force=None, remind=True):
282 def merge(repo, node, force=None, remind=True):
282 """branch merge with node, resolving changes"""
283 """branch merge with node, resolving changes"""
283 stats = _merge.update(repo, node, True, force, False)
284 stats = _merge.update(repo, node, True, force, False)
284 _showstats(repo, stats)
285 _showstats(repo, stats)
285 if stats[3]:
286 if stats[3]:
286 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
287 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
287 "or 'hg up --clean' to abandon\n"))
288 "or 'hg up --clean' to abandon\n"))
288 elif remind:
289 elif remind:
289 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
290 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
290 return stats[3] > 0
291 return stats[3] > 0
291
292
292 def revert(repo, node, choose):
293 def revert(repo, node, choose):
293 """revert changes to revision in node without updating dirstate"""
294 """revert changes to revision in node without updating dirstate"""
294 return _merge.update(repo, node, False, True, choose)[3] > 0
295 return _merge.update(repo, node, False, True, choose)[3] > 0
295
296
296 def verify(repo):
297 def verify(repo):
297 """verify the consistency of a repository"""
298 """verify the consistency of a repository"""
298 return _verify.verify(repo)
299 return _verify.verify(repo)
@@ -1,127 +1,127 b''
1 # hook.py - hook support for mercurial
1 # hook.py - hook support for mercurial
2 #
2 #
3 # Copyright 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from i18n import _
8 from i18n import _
9 import util, os, sys
9 import os, sys
10 from mercurial import extensions
10 import extensions, util
11
11
12 def _pythonhook(ui, repo, name, hname, funcname, args, throw):
12 def _pythonhook(ui, repo, name, hname, funcname, args, throw):
13 '''call python hook. hook is callable object, looked up as
13 '''call python hook. hook is callable object, looked up as
14 name in python module. if callable returns "true", hook
14 name in python module. if callable returns "true", hook
15 fails, else passes. if hook raises exception, treated as
15 fails, else passes. if hook raises exception, treated as
16 hook failure. exception propagates if throw is "true".
16 hook failure. exception propagates if throw is "true".
17
17
18 reason for "true" meaning "hook failed" is so that
18 reason for "true" meaning "hook failed" is so that
19 unmodified commands (e.g. mercurial.commands.update) can
19 unmodified commands (e.g. mercurial.commands.update) can
20 be run as hooks without wrappers to convert return values.'''
20 be run as hooks without wrappers to convert return values.'''
21
21
22 ui.note(_("calling hook %s: %s\n") % (hname, funcname))
22 ui.note(_("calling hook %s: %s\n") % (hname, funcname))
23 obj = funcname
23 obj = funcname
24 if not callable(obj):
24 if not callable(obj):
25 d = funcname.rfind('.')
25 d = funcname.rfind('.')
26 if d == -1:
26 if d == -1:
27 raise util.Abort(_('%s hook is invalid ("%s" not in '
27 raise util.Abort(_('%s hook is invalid ("%s" not in '
28 'a module)') % (hname, funcname))
28 'a module)') % (hname, funcname))
29 modname = funcname[:d]
29 modname = funcname[:d]
30 try:
30 try:
31 obj = __import__(modname)
31 obj = __import__(modname)
32 except ImportError:
32 except ImportError:
33 try:
33 try:
34 # extensions are loaded with hgext_ prefix
34 # extensions are loaded with hgext_ prefix
35 obj = __import__("hgext_%s" % modname)
35 obj = __import__("hgext_%s" % modname)
36 except ImportError:
36 except ImportError:
37 raise util.Abort(_('%s hook is invalid '
37 raise util.Abort(_('%s hook is invalid '
38 '(import of "%s" failed)') %
38 '(import of "%s" failed)') %
39 (hname, modname))
39 (hname, modname))
40 try:
40 try:
41 for p in funcname.split('.')[1:]:
41 for p in funcname.split('.')[1:]:
42 obj = getattr(obj, p)
42 obj = getattr(obj, p)
43 except AttributeError:
43 except AttributeError:
44 raise util.Abort(_('%s hook is invalid '
44 raise util.Abort(_('%s hook is invalid '
45 '("%s" is not defined)') %
45 '("%s" is not defined)') %
46 (hname, funcname))
46 (hname, funcname))
47 if not callable(obj):
47 if not callable(obj):
48 raise util.Abort(_('%s hook is invalid '
48 raise util.Abort(_('%s hook is invalid '
49 '("%s" is not callable)') %
49 '("%s" is not callable)') %
50 (hname, funcname))
50 (hname, funcname))
51 try:
51 try:
52 r = obj(ui=ui, repo=repo, hooktype=name, **args)
52 r = obj(ui=ui, repo=repo, hooktype=name, **args)
53 except KeyboardInterrupt:
53 except KeyboardInterrupt:
54 raise
54 raise
55 except Exception, exc:
55 except Exception, exc:
56 if isinstance(exc, util.Abort):
56 if isinstance(exc, util.Abort):
57 ui.warn(_('error: %s hook failed: %s\n') %
57 ui.warn(_('error: %s hook failed: %s\n') %
58 (hname, exc.args[0]))
58 (hname, exc.args[0]))
59 else:
59 else:
60 ui.warn(_('error: %s hook raised an exception: '
60 ui.warn(_('error: %s hook raised an exception: '
61 '%s\n') % (hname, exc))
61 '%s\n') % (hname, exc))
62 if throw:
62 if throw:
63 raise
63 raise
64 ui.traceback()
64 ui.traceback()
65 return True
65 return True
66 if r:
66 if r:
67 if throw:
67 if throw:
68 raise util.Abort(_('%s hook failed') % hname)
68 raise util.Abort(_('%s hook failed') % hname)
69 ui.warn(_('warning: %s hook failed\n') % hname)
69 ui.warn(_('warning: %s hook failed\n') % hname)
70 return r
70 return r
71
71
72 def _exthook(ui, repo, name, cmd, args, throw):
72 def _exthook(ui, repo, name, cmd, args, throw):
73 ui.note(_("running hook %s: %s\n") % (name, cmd))
73 ui.note(_("running hook %s: %s\n") % (name, cmd))
74
74
75 env = {}
75 env = {}
76 for k, v in args.iteritems():
76 for k, v in args.iteritems():
77 if callable(v):
77 if callable(v):
78 v = v()
78 v = v()
79 env['HG_' + k.upper()] = v
79 env['HG_' + k.upper()] = v
80
80
81 if repo:
81 if repo:
82 cwd = repo.root
82 cwd = repo.root
83 else:
83 else:
84 cwd = os.getcwd()
84 cwd = os.getcwd()
85 r = util.system(cmd, environ=env, cwd=cwd)
85 r = util.system(cmd, environ=env, cwd=cwd)
86 if r:
86 if r:
87 desc, r = util.explain_exit(r)
87 desc, r = util.explain_exit(r)
88 if throw:
88 if throw:
89 raise util.Abort(_('%s hook %s') % (name, desc))
89 raise util.Abort(_('%s hook %s') % (name, desc))
90 ui.warn(_('warning: %s hook %s\n') % (name, desc))
90 ui.warn(_('warning: %s hook %s\n') % (name, desc))
91 return r
91 return r
92
92
93 _redirect = False
93 _redirect = False
94 def redirect(state):
94 def redirect(state):
95 global _redirect
95 global _redirect
96 _redirect = state
96 _redirect = state
97
97
98 def hook(ui, repo, name, throw=False, **args):
98 def hook(ui, repo, name, throw=False, **args):
99 r = False
99 r = False
100
100
101 if _redirect:
101 if _redirect:
102 # temporarily redirect stdout to stderr
102 # temporarily redirect stdout to stderr
103 oldstdout = os.dup(sys.__stdout__.fileno())
103 oldstdout = os.dup(sys.__stdout__.fileno())
104 os.dup2(sys.__stderr__.fileno(), sys.__stdout__.fileno())
104 os.dup2(sys.__stderr__.fileno(), sys.__stdout__.fileno())
105
105
106 try:
106 try:
107 for hname, cmd in ui.configitems('hooks'):
107 for hname, cmd in ui.configitems('hooks'):
108 if hname.split('.')[0] != name or not cmd:
108 if hname.split('.')[0] != name or not cmd:
109 continue
109 continue
110 if callable(cmd):
110 if callable(cmd):
111 r = _pythonhook(ui, repo, name, hname, cmd, args, throw) or r
111 r = _pythonhook(ui, repo, name, hname, cmd, args, throw) or r
112 elif cmd.startswith('python:'):
112 elif cmd.startswith('python:'):
113 if cmd.count(':') == 2:
113 if cmd.count(':') == 2:
114 path, cmd = cmd[7:].split(':')
114 path, cmd = cmd[7:].split(':')
115 mod = extensions.loadpath(path, 'hgkook.%s' % hname)
115 mod = extensions.loadpath(path, 'hgkook.%s' % hname)
116 hookfn = getattr(mod, cmd)
116 hookfn = getattr(mod, cmd)
117 else:
117 else:
118 hookfn = cmd[7:].strip()
118 hookfn = cmd[7:].strip()
119 r = _pythonhook(ui, repo, name, hname, hookfn, args, throw) or r
119 r = _pythonhook(ui, repo, name, hname, hookfn, args, throw) or r
120 else:
120 else:
121 r = _exthook(ui, repo, hname, cmd, args, throw) or r
121 r = _exthook(ui, repo, hname, cmd, args, throw) or r
122 finally:
122 finally:
123 if _redirect:
123 if _redirect:
124 os.dup2(oldstdout, sys.__stdout__.fileno())
124 os.dup2(oldstdout, sys.__stdout__.fileno())
125 os.close(oldstdout)
125 os.close(oldstdout)
126
126
127 return r
127 return r
@@ -1,244 +1,245 b''
1 # httprepo.py - HTTP repository proxy classes for mercurial
1 # httprepo.py - HTTP repository proxy classes for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2, incorporated herein by reference.
7 # GNU General Public License version 2, incorporated herein by reference.
8
8
9 from node import bin, hex, nullid
9 from node import bin, hex, nullid
10 from i18n import _
10 from i18n import _
11 import repo, os, urllib, urllib2, urlparse, zlib, util, httplib
11 import repo, changegroup, statichttprepo, error, url, util
12 import errno, socket, changegroup, statichttprepo, error, url
12 import os, urllib, urllib2, urlparse, zlib, httplib
13 import errno, socket
13
14
14 def zgenerator(f):
15 def zgenerator(f):
15 zd = zlib.decompressobj()
16 zd = zlib.decompressobj()
16 try:
17 try:
17 for chunk in util.filechunkiter(f):
18 for chunk in util.filechunkiter(f):
18 yield zd.decompress(chunk)
19 yield zd.decompress(chunk)
19 except httplib.HTTPException:
20 except httplib.HTTPException:
20 raise IOError(None, _('connection ended unexpectedly'))
21 raise IOError(None, _('connection ended unexpectedly'))
21 yield zd.flush()
22 yield zd.flush()
22
23
23 class httprepository(repo.repository):
24 class httprepository(repo.repository):
24 def __init__(self, ui, path):
25 def __init__(self, ui, path):
25 self.path = path
26 self.path = path
26 self.caps = None
27 self.caps = None
27 self.handler = None
28 self.handler = None
28 scheme, netloc, urlpath, query, frag = urlparse.urlsplit(path)
29 scheme, netloc, urlpath, query, frag = urlparse.urlsplit(path)
29 if query or frag:
30 if query or frag:
30 raise util.Abort(_('unsupported URL component: "%s"') %
31 raise util.Abort(_('unsupported URL component: "%s"') %
31 (query or frag))
32 (query or frag))
32
33
33 # urllib cannot handle URLs with embedded user or passwd
34 # urllib cannot handle URLs with embedded user or passwd
34 self._url, authinfo = url.getauthinfo(path)
35 self._url, authinfo = url.getauthinfo(path)
35
36
36 self.ui = ui
37 self.ui = ui
37 self.ui.debug(_('using %s\n') % self._url)
38 self.ui.debug(_('using %s\n') % self._url)
38
39
39 self.urlopener = url.opener(ui, authinfo)
40 self.urlopener = url.opener(ui, authinfo)
40
41
41 def __del__(self):
42 def __del__(self):
42 for h in self.urlopener.handlers:
43 for h in self.urlopener.handlers:
43 h.close()
44 h.close()
44 if hasattr(h, "close_all"):
45 if hasattr(h, "close_all"):
45 h.close_all()
46 h.close_all()
46
47
47 def url(self):
48 def url(self):
48 return self.path
49 return self.path
49
50
50 # look up capabilities only when needed
51 # look up capabilities only when needed
51
52
52 def get_caps(self):
53 def get_caps(self):
53 if self.caps is None:
54 if self.caps is None:
54 try:
55 try:
55 self.caps = set(self.do_read('capabilities').split())
56 self.caps = set(self.do_read('capabilities').split())
56 except error.RepoError:
57 except error.RepoError:
57 self.caps = set()
58 self.caps = set()
58 self.ui.debug(_('capabilities: %s\n') %
59 self.ui.debug(_('capabilities: %s\n') %
59 (' '.join(self.caps or ['none'])))
60 (' '.join(self.caps or ['none'])))
60 return self.caps
61 return self.caps
61
62
62 capabilities = property(get_caps)
63 capabilities = property(get_caps)
63
64
64 def lock(self):
65 def lock(self):
65 raise util.Abort(_('operation not supported over http'))
66 raise util.Abort(_('operation not supported over http'))
66
67
67 def do_cmd(self, cmd, **args):
68 def do_cmd(self, cmd, **args):
68 data = args.pop('data', None)
69 data = args.pop('data', None)
69 headers = args.pop('headers', {})
70 headers = args.pop('headers', {})
70 self.ui.debug(_("sending %s command\n") % cmd)
71 self.ui.debug(_("sending %s command\n") % cmd)
71 q = {"cmd": cmd}
72 q = {"cmd": cmd}
72 q.update(args)
73 q.update(args)
73 qs = '?%s' % urllib.urlencode(q)
74 qs = '?%s' % urllib.urlencode(q)
74 cu = "%s%s" % (self._url, qs)
75 cu = "%s%s" % (self._url, qs)
75 try:
76 try:
76 if data:
77 if data:
77 self.ui.debug(_("sending %s bytes\n") % len(data))
78 self.ui.debug(_("sending %s bytes\n") % len(data))
78 resp = self.urlopener.open(urllib2.Request(cu, data, headers))
79 resp = self.urlopener.open(urllib2.Request(cu, data, headers))
79 except urllib2.HTTPError, inst:
80 except urllib2.HTTPError, inst:
80 if inst.code == 401:
81 if inst.code == 401:
81 raise util.Abort(_('authorization failed'))
82 raise util.Abort(_('authorization failed'))
82 raise
83 raise
83 except httplib.HTTPException, inst:
84 except httplib.HTTPException, inst:
84 self.ui.debug(_('http error while sending %s command\n') % cmd)
85 self.ui.debug(_('http error while sending %s command\n') % cmd)
85 self.ui.traceback()
86 self.ui.traceback()
86 raise IOError(None, inst)
87 raise IOError(None, inst)
87 except IndexError:
88 except IndexError:
88 # this only happens with Python 2.3, later versions raise URLError
89 # this only happens with Python 2.3, later versions raise URLError
89 raise util.Abort(_('http error, possibly caused by proxy setting'))
90 raise util.Abort(_('http error, possibly caused by proxy setting'))
90 # record the url we got redirected to
91 # record the url we got redirected to
91 resp_url = resp.geturl()
92 resp_url = resp.geturl()
92 if resp_url.endswith(qs):
93 if resp_url.endswith(qs):
93 resp_url = resp_url[:-len(qs)]
94 resp_url = resp_url[:-len(qs)]
94 if self._url != resp_url:
95 if self._url != resp_url:
95 self.ui.status(_('real URL is %s\n') % resp_url)
96 self.ui.status(_('real URL is %s\n') % resp_url)
96 self._url = resp_url
97 self._url = resp_url
97 try:
98 try:
98 proto = resp.getheader('content-type')
99 proto = resp.getheader('content-type')
99 except AttributeError:
100 except AttributeError:
100 proto = resp.headers['content-type']
101 proto = resp.headers['content-type']
101
102
102 safeurl = url.hidepassword(self._url)
103 safeurl = url.hidepassword(self._url)
103 # accept old "text/plain" and "application/hg-changegroup" for now
104 # accept old "text/plain" and "application/hg-changegroup" for now
104 if not (proto.startswith('application/mercurial-') or
105 if not (proto.startswith('application/mercurial-') or
105 proto.startswith('text/plain') or
106 proto.startswith('text/plain') or
106 proto.startswith('application/hg-changegroup')):
107 proto.startswith('application/hg-changegroup')):
107 self.ui.debug(_("requested URL: '%s'\n") % url.hidepassword(cu))
108 self.ui.debug(_("requested URL: '%s'\n") % url.hidepassword(cu))
108 raise error.RepoError(_("'%s' does not appear to be an hg repository")
109 raise error.RepoError(_("'%s' does not appear to be an hg repository")
109 % safeurl)
110 % safeurl)
110
111
111 if proto.startswith('application/mercurial-'):
112 if proto.startswith('application/mercurial-'):
112 try:
113 try:
113 version = proto.split('-', 1)[1]
114 version = proto.split('-', 1)[1]
114 version_info = tuple([int(n) for n in version.split('.')])
115 version_info = tuple([int(n) for n in version.split('.')])
115 except ValueError:
116 except ValueError:
116 raise error.RepoError(_("'%s' sent a broken Content-Type "
117 raise error.RepoError(_("'%s' sent a broken Content-Type "
117 "header (%s)") % (safeurl, proto))
118 "header (%s)") % (safeurl, proto))
118 if version_info > (0, 1):
119 if version_info > (0, 1):
119 raise error.RepoError(_("'%s' uses newer protocol %s") %
120 raise error.RepoError(_("'%s' uses newer protocol %s") %
120 (safeurl, version))
121 (safeurl, version))
121
122
122 return resp
123 return resp
123
124
124 def do_read(self, cmd, **args):
125 def do_read(self, cmd, **args):
125 fp = self.do_cmd(cmd, **args)
126 fp = self.do_cmd(cmd, **args)
126 try:
127 try:
127 return fp.read()
128 return fp.read()
128 finally:
129 finally:
129 # if using keepalive, allow connection to be reused
130 # if using keepalive, allow connection to be reused
130 fp.close()
131 fp.close()
131
132
132 def lookup(self, key):
133 def lookup(self, key):
133 self.requirecap('lookup', _('look up remote revision'))
134 self.requirecap('lookup', _('look up remote revision'))
134 d = self.do_cmd("lookup", key = key).read()
135 d = self.do_cmd("lookup", key = key).read()
135 success, data = d[:-1].split(' ', 1)
136 success, data = d[:-1].split(' ', 1)
136 if int(success):
137 if int(success):
137 return bin(data)
138 return bin(data)
138 raise error.RepoError(data)
139 raise error.RepoError(data)
139
140
140 def heads(self):
141 def heads(self):
141 d = self.do_read("heads")
142 d = self.do_read("heads")
142 try:
143 try:
143 return map(bin, d[:-1].split(" "))
144 return map(bin, d[:-1].split(" "))
144 except:
145 except:
145 raise error.ResponseError(_("unexpected response:"), d)
146 raise error.ResponseError(_("unexpected response:"), d)
146
147
147 def branches(self, nodes):
148 def branches(self, nodes):
148 n = " ".join(map(hex, nodes))
149 n = " ".join(map(hex, nodes))
149 d = self.do_read("branches", nodes=n)
150 d = self.do_read("branches", nodes=n)
150 try:
151 try:
151 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
152 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
152 return br
153 return br
153 except:
154 except:
154 raise error.ResponseError(_("unexpected response:"), d)
155 raise error.ResponseError(_("unexpected response:"), d)
155
156
156 def between(self, pairs):
157 def between(self, pairs):
157 batch = 8 # avoid giant requests
158 batch = 8 # avoid giant requests
158 r = []
159 r = []
159 for i in xrange(0, len(pairs), batch):
160 for i in xrange(0, len(pairs), batch):
160 n = " ".join(["-".join(map(hex, p)) for p in pairs[i:i + batch]])
161 n = " ".join(["-".join(map(hex, p)) for p in pairs[i:i + batch]])
161 d = self.do_read("between", pairs=n)
162 d = self.do_read("between", pairs=n)
162 try:
163 try:
163 r += [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
164 r += [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
164 except:
165 except:
165 raise error.ResponseError(_("unexpected response:"), d)
166 raise error.ResponseError(_("unexpected response:"), d)
166 return r
167 return r
167
168
168 def changegroup(self, nodes, kind):
169 def changegroup(self, nodes, kind):
169 n = " ".join(map(hex, nodes))
170 n = " ".join(map(hex, nodes))
170 f = self.do_cmd("changegroup", roots=n)
171 f = self.do_cmd("changegroup", roots=n)
171 return util.chunkbuffer(zgenerator(f))
172 return util.chunkbuffer(zgenerator(f))
172
173
173 def changegroupsubset(self, bases, heads, source):
174 def changegroupsubset(self, bases, heads, source):
174 self.requirecap('changegroupsubset', _('look up remote changes'))
175 self.requirecap('changegroupsubset', _('look up remote changes'))
175 baselst = " ".join([hex(n) for n in bases])
176 baselst = " ".join([hex(n) for n in bases])
176 headlst = " ".join([hex(n) for n in heads])
177 headlst = " ".join([hex(n) for n in heads])
177 f = self.do_cmd("changegroupsubset", bases=baselst, heads=headlst)
178 f = self.do_cmd("changegroupsubset", bases=baselst, heads=headlst)
178 return util.chunkbuffer(zgenerator(f))
179 return util.chunkbuffer(zgenerator(f))
179
180
180 def unbundle(self, cg, heads, source):
181 def unbundle(self, cg, heads, source):
181 # have to stream bundle to a temp file because we do not have
182 # have to stream bundle to a temp file because we do not have
182 # http 1.1 chunked transfer.
183 # http 1.1 chunked transfer.
183
184
184 type = ""
185 type = ""
185 types = self.capable('unbundle')
186 types = self.capable('unbundle')
186 # servers older than d1b16a746db6 will send 'unbundle' as a
187 # servers older than d1b16a746db6 will send 'unbundle' as a
187 # boolean capability
188 # boolean capability
188 try:
189 try:
189 types = types.split(',')
190 types = types.split(',')
190 except AttributeError:
191 except AttributeError:
191 types = [""]
192 types = [""]
192 if types:
193 if types:
193 for x in types:
194 for x in types:
194 if x in changegroup.bundletypes:
195 if x in changegroup.bundletypes:
195 type = x
196 type = x
196 break
197 break
197
198
198 tempname = changegroup.writebundle(cg, None, type)
199 tempname = changegroup.writebundle(cg, None, type)
199 fp = url.httpsendfile(tempname, "rb")
200 fp = url.httpsendfile(tempname, "rb")
200 try:
201 try:
201 try:
202 try:
202 resp = self.do_read(
203 resp = self.do_read(
203 'unbundle', data=fp,
204 'unbundle', data=fp,
204 headers={'Content-Type': 'application/octet-stream'},
205 headers={'Content-Type': 'application/octet-stream'},
205 heads=' '.join(map(hex, heads)))
206 heads=' '.join(map(hex, heads)))
206 resp_code, output = resp.split('\n', 1)
207 resp_code, output = resp.split('\n', 1)
207 try:
208 try:
208 ret = int(resp_code)
209 ret = int(resp_code)
209 except ValueError, err:
210 except ValueError, err:
210 raise error.ResponseError(
211 raise error.ResponseError(
211 _('push failed (unexpected response):'), resp)
212 _('push failed (unexpected response):'), resp)
212 self.ui.write(output)
213 self.ui.write(output)
213 return ret
214 return ret
214 except socket.error, err:
215 except socket.error, err:
215 if err[0] in (errno.ECONNRESET, errno.EPIPE):
216 if err[0] in (errno.ECONNRESET, errno.EPIPE):
216 raise util.Abort(_('push failed: %s') % err[1])
217 raise util.Abort(_('push failed: %s') % err[1])
217 raise util.Abort(err[1])
218 raise util.Abort(err[1])
218 finally:
219 finally:
219 fp.close()
220 fp.close()
220 os.unlink(tempname)
221 os.unlink(tempname)
221
222
222 def stream_out(self):
223 def stream_out(self):
223 return self.do_cmd('stream_out')
224 return self.do_cmd('stream_out')
224
225
225 class httpsrepository(httprepository):
226 class httpsrepository(httprepository):
226 def __init__(self, ui, path):
227 def __init__(self, ui, path):
227 if not url.has_https:
228 if not url.has_https:
228 raise util.Abort(_('Python support for SSL and HTTPS '
229 raise util.Abort(_('Python support for SSL and HTTPS '
229 'is not installed'))
230 'is not installed'))
230 httprepository.__init__(self, ui, path)
231 httprepository.__init__(self, ui, path)
231
232
232 def instance(ui, path, create):
233 def instance(ui, path, create):
233 if create:
234 if create:
234 raise util.Abort(_('cannot create new http repository'))
235 raise util.Abort(_('cannot create new http repository'))
235 try:
236 try:
236 if path.startswith('https:'):
237 if path.startswith('https:'):
237 inst = httpsrepository(ui, path)
238 inst = httpsrepository(ui, path)
238 else:
239 else:
239 inst = httprepository(ui, path)
240 inst = httprepository(ui, path)
240 inst.between([(nullid, nullid)])
241 inst.between([(nullid, nullid)])
241 return inst
242 return inst
242 except error.RepoError:
243 except error.RepoError:
243 ui.note('(falling back to static-http)\n')
244 ui.note('(falling back to static-http)\n')
244 return statichttprepo.instance(ui, "static-" + path, create)
245 return statichttprepo.instance(ui, "static-" + path, create)
@@ -1,47 +1,48 b''
1 # i18n.py - internationalization support for mercurial
1 # i18n.py - internationalization support for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 import gettext, sys, os, encoding
8 import encoding
9 import gettext, sys, os
9
10
10 # modelled after templater.templatepath:
11 # modelled after templater.templatepath:
11 if hasattr(sys, 'frozen'):
12 if hasattr(sys, 'frozen'):
12 module = sys.executable
13 module = sys.executable
13 else:
14 else:
14 module = __file__
15 module = __file__
15
16
16 base = os.path.dirname(module)
17 base = os.path.dirname(module)
17 for dir in ('.', '..'):
18 for dir in ('.', '..'):
18 localedir = os.path.normpath(os.path.join(base, dir, 'locale'))
19 localedir = os.path.normpath(os.path.join(base, dir, 'locale'))
19 if os.path.isdir(localedir):
20 if os.path.isdir(localedir):
20 break
21 break
21
22
22 t = gettext.translation('hg', localedir, fallback=True)
23 t = gettext.translation('hg', localedir, fallback=True)
23
24
24 def gettext(message):
25 def gettext(message):
25 """Translate message.
26 """Translate message.
26
27
27 The message is looked up in the catalog to get a Unicode string,
28 The message is looked up in the catalog to get a Unicode string,
28 which is encoded in the local encoding before being returned.
29 which is encoded in the local encoding before being returned.
29
30
30 Important: message is restricted to characters in the encoding
31 Important: message is restricted to characters in the encoding
31 given by sys.getdefaultencoding() which is most likely 'ascii'.
32 given by sys.getdefaultencoding() which is most likely 'ascii'.
32 """
33 """
33 # If message is None, t.ugettext will return u'None' as the
34 # If message is None, t.ugettext will return u'None' as the
34 # translation whereas our callers expect us to return None.
35 # translation whereas our callers expect us to return None.
35 if message is None:
36 if message is None:
36 return message
37 return message
37
38
38 # We cannot just run the text through encoding.tolocal since that
39 # We cannot just run the text through encoding.tolocal since that
39 # leads to infinite recursion when encoding._encoding is invalid.
40 # leads to infinite recursion when encoding._encoding is invalid.
40 try:
41 try:
41 u = t.ugettext(message)
42 u = t.ugettext(message)
42 return u.encode(encoding.encoding, "replace")
43 return u.encode(encoding.encoding, "replace")
43 except LookupError:
44 except LookupError:
44 return message
45 return message
45
46
46 _ = gettext
47 _ = gettext
47
48
@@ -1,90 +1,91 b''
1 # ignore.py - ignored file handling for mercurial
1 # ignore.py - ignored file handling for mercurial
2 #
2 #
3 # Copyright 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from i18n import _
8 from i18n import _
9 import util, re
9 import util
10 import re
10
11
11 _commentre = None
12 _commentre = None
12
13
13 def _parselines(fp):
14 def _parselines(fp):
14 for line in fp:
15 for line in fp:
15 if "#" in line:
16 if "#" in line:
16 global _commentre
17 global _commentre
17 if not _commentre:
18 if not _commentre:
18 _commentre = re.compile(r'((^|[^\\])(\\\\)*)#.*')
19 _commentre = re.compile(r'((^|[^\\])(\\\\)*)#.*')
19 # remove comments prefixed by an even number of escapes
20 # remove comments prefixed by an even number of escapes
20 line = _commentre.sub(r'\1', line)
21 line = _commentre.sub(r'\1', line)
21 # fixup properly escaped comments that survived the above
22 # fixup properly escaped comments that survived the above
22 line = line.replace("\\#", "#")
23 line = line.replace("\\#", "#")
23 line = line.rstrip()
24 line = line.rstrip()
24 if line:
25 if line:
25 yield line
26 yield line
26
27
27 def ignore(root, files, warn):
28 def ignore(root, files, warn):
28 '''return the contents of .hgignore files as a list of patterns.
29 '''return the contents of .hgignore files as a list of patterns.
29
30
30 the files parsed for patterns include:
31 the files parsed for patterns include:
31 .hgignore in the repository root
32 .hgignore in the repository root
32 any additional files specified in the [ui] section of ~/.hgrc
33 any additional files specified in the [ui] section of ~/.hgrc
33
34
34 trailing white space is dropped.
35 trailing white space is dropped.
35 the escape character is backslash.
36 the escape character is backslash.
36 comments start with #.
37 comments start with #.
37 empty lines are skipped.
38 empty lines are skipped.
38
39
39 lines can be of the following formats:
40 lines can be of the following formats:
40
41
41 syntax: regexp # defaults following lines to non-rooted regexps
42 syntax: regexp # defaults following lines to non-rooted regexps
42 syntax: glob # defaults following lines to non-rooted globs
43 syntax: glob # defaults following lines to non-rooted globs
43 re:pattern # non-rooted regular expression
44 re:pattern # non-rooted regular expression
44 glob:pattern # non-rooted glob
45 glob:pattern # non-rooted glob
45 pattern # pattern of the current default type'''
46 pattern # pattern of the current default type'''
46
47
47 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
48 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
48 pats = {}
49 pats = {}
49 for f in files:
50 for f in files:
50 try:
51 try:
51 pats[f] = []
52 pats[f] = []
52 fp = open(f)
53 fp = open(f)
53 syntax = 'relre:'
54 syntax = 'relre:'
54 for line in _parselines(fp):
55 for line in _parselines(fp):
55 if line.startswith('syntax:'):
56 if line.startswith('syntax:'):
56 s = line[7:].strip()
57 s = line[7:].strip()
57 try:
58 try:
58 syntax = syntaxes[s]
59 syntax = syntaxes[s]
59 except KeyError:
60 except KeyError:
60 warn(_("%s: ignoring invalid syntax '%s'\n") % (f, s))
61 warn(_("%s: ignoring invalid syntax '%s'\n") % (f, s))
61 continue
62 continue
62 pat = syntax + line
63 pat = syntax + line
63 for s, rels in syntaxes.iteritems():
64 for s, rels in syntaxes.iteritems():
64 if line.startswith(rels):
65 if line.startswith(rels):
65 pat = line
66 pat = line
66 break
67 break
67 elif line.startswith(s+':'):
68 elif line.startswith(s+':'):
68 pat = rels + line[len(s)+1:]
69 pat = rels + line[len(s)+1:]
69 break
70 break
70 pats[f].append(pat)
71 pats[f].append(pat)
71 except IOError, inst:
72 except IOError, inst:
72 if f != files[0]:
73 if f != files[0]:
73 warn(_("skipping unreadable ignore file '%s': %s\n") %
74 warn(_("skipping unreadable ignore file '%s': %s\n") %
74 (f, inst.strerror))
75 (f, inst.strerror))
75
76
76 allpats = []
77 allpats = []
77 [allpats.extend(patlist) for patlist in pats.values()]
78 [allpats.extend(patlist) for patlist in pats.values()]
78 if not allpats:
79 if not allpats:
79 return util.never
80 return util.never
80
81
81 try:
82 try:
82 files, ignorefunc, anypats = (
83 files, ignorefunc, anypats = (
83 util.matcher(root, inc=allpats, src='.hgignore'))
84 util.matcher(root, inc=allpats, src='.hgignore'))
84 except util.Abort:
85 except util.Abort:
85 # Re-raise an exception where the src is the right file
86 # Re-raise an exception where the src is the right file
86 for f, patlist in pats.iteritems():
87 for f, patlist in pats.iteritems():
87 files, ignorefunc, anypats = (
88 files, ignorefunc, anypats = (
88 util.matcher(root, inc=patlist, src=f))
89 util.matcher(root, inc=patlist, src=f))
89
90
90 return ignorefunc
91 return ignorefunc
@@ -1,2172 +1,2172 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from node import bin, hex, nullid, nullrev, short
8 from node import bin, hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import repo, changegroup
10 import repo, changegroup
11 import changelog, dirstate, filelog, manifest, context, weakref
11 import changelog, dirstate, filelog, manifest, context
12 import lock, transaction, stat, errno, ui, store, encoding
12 import lock, transaction, ui, store, encoding
13 import os, time, util, extensions, hook, inspect, error
13 import util, extensions, hook, error
14 import match as match_
14 import match as match_
15 import merge as merge_
15 import merge as merge_
16
17 from lock import release
16 from lock import release
17 import weakref, stat, errno, os, time, inspect
18 propertycache = util.propertycache
18 propertycache = util.propertycache
19
19
20 class localrepository(repo.repository):
20 class localrepository(repo.repository):
21 capabilities = set(('lookup', 'changegroupsubset'))
21 capabilities = set(('lookup', 'changegroupsubset'))
22 supported = set('revlogv1 store fncache'.split())
22 supported = set('revlogv1 store fncache'.split())
23
23
24 def __init__(self, baseui, path=None, create=0):
24 def __init__(self, baseui, path=None, create=0):
25 repo.repository.__init__(self)
25 repo.repository.__init__(self)
26 self.root = os.path.realpath(path)
26 self.root = os.path.realpath(path)
27 self.path = os.path.join(self.root, ".hg")
27 self.path = os.path.join(self.root, ".hg")
28 self.origroot = path
28 self.origroot = path
29 self.opener = util.opener(self.path)
29 self.opener = util.opener(self.path)
30 self.wopener = util.opener(self.root)
30 self.wopener = util.opener(self.root)
31
31
32 if not os.path.isdir(self.path):
32 if not os.path.isdir(self.path):
33 if create:
33 if create:
34 if not os.path.exists(path):
34 if not os.path.exists(path):
35 os.mkdir(path)
35 os.mkdir(path)
36 os.mkdir(self.path)
36 os.mkdir(self.path)
37 requirements = ["revlogv1"]
37 requirements = ["revlogv1"]
38 if baseui.configbool('format', 'usestore', True):
38 if baseui.configbool('format', 'usestore', True):
39 os.mkdir(os.path.join(self.path, "store"))
39 os.mkdir(os.path.join(self.path, "store"))
40 requirements.append("store")
40 requirements.append("store")
41 if baseui.configbool('format', 'usefncache', True):
41 if baseui.configbool('format', 'usefncache', True):
42 requirements.append("fncache")
42 requirements.append("fncache")
43 # create an invalid changelog
43 # create an invalid changelog
44 self.opener("00changelog.i", "a").write(
44 self.opener("00changelog.i", "a").write(
45 '\0\0\0\2' # represents revlogv2
45 '\0\0\0\2' # represents revlogv2
46 ' dummy changelog to prevent using the old repo layout'
46 ' dummy changelog to prevent using the old repo layout'
47 )
47 )
48 reqfile = self.opener("requires", "w")
48 reqfile = self.opener("requires", "w")
49 for r in requirements:
49 for r in requirements:
50 reqfile.write("%s\n" % r)
50 reqfile.write("%s\n" % r)
51 reqfile.close()
51 reqfile.close()
52 else:
52 else:
53 raise error.RepoError(_("repository %s not found") % path)
53 raise error.RepoError(_("repository %s not found") % path)
54 elif create:
54 elif create:
55 raise error.RepoError(_("repository %s already exists") % path)
55 raise error.RepoError(_("repository %s already exists") % path)
56 else:
56 else:
57 # find requirements
57 # find requirements
58 requirements = set()
58 requirements = set()
59 try:
59 try:
60 requirements = set(self.opener("requires").read().splitlines())
60 requirements = set(self.opener("requires").read().splitlines())
61 except IOError, inst:
61 except IOError, inst:
62 if inst.errno != errno.ENOENT:
62 if inst.errno != errno.ENOENT:
63 raise
63 raise
64 for r in requirements - self.supported:
64 for r in requirements - self.supported:
65 raise error.RepoError(_("requirement '%s' not supported") % r)
65 raise error.RepoError(_("requirement '%s' not supported") % r)
66
66
67 self.store = store.store(requirements, self.path, util.opener)
67 self.store = store.store(requirements, self.path, util.opener)
68 self.spath = self.store.path
68 self.spath = self.store.path
69 self.sopener = self.store.opener
69 self.sopener = self.store.opener
70 self.sjoin = self.store.join
70 self.sjoin = self.store.join
71 self.opener.createmode = self.store.createmode
71 self.opener.createmode = self.store.createmode
72
72
73 self.baseui = baseui
73 self.baseui = baseui
74 self.ui = baseui.copy()
74 self.ui = baseui.copy()
75 try:
75 try:
76 self.ui.readconfig(self.join("hgrc"), self.root)
76 self.ui.readconfig(self.join("hgrc"), self.root)
77 extensions.loadall(self.ui)
77 extensions.loadall(self.ui)
78 except IOError:
78 except IOError:
79 pass
79 pass
80
80
81 self.tagscache = None
81 self.tagscache = None
82 self._tagstypecache = None
82 self._tagstypecache = None
83 self.branchcache = None
83 self.branchcache = None
84 self._ubranchcache = None # UTF-8 version of branchcache
84 self._ubranchcache = None # UTF-8 version of branchcache
85 self._branchcachetip = None
85 self._branchcachetip = None
86 self.nodetagscache = None
86 self.nodetagscache = None
87 self.filterpats = {}
87 self.filterpats = {}
88 self._datafilters = {}
88 self._datafilters = {}
89 self._transref = self._lockref = self._wlockref = None
89 self._transref = self._lockref = self._wlockref = None
90
90
91 @propertycache
91 @propertycache
92 def changelog(self):
92 def changelog(self):
93 c = changelog.changelog(self.sopener)
93 c = changelog.changelog(self.sopener)
94 if 'HG_PENDING' in os.environ:
94 if 'HG_PENDING' in os.environ:
95 p = os.environ['HG_PENDING']
95 p = os.environ['HG_PENDING']
96 if p.startswith(self.root):
96 if p.startswith(self.root):
97 c.readpending('00changelog.i.a')
97 c.readpending('00changelog.i.a')
98 self.sopener.defversion = c.version
98 self.sopener.defversion = c.version
99 return c
99 return c
100
100
101 @propertycache
101 @propertycache
102 def manifest(self):
102 def manifest(self):
103 return manifest.manifest(self.sopener)
103 return manifest.manifest(self.sopener)
104
104
105 @propertycache
105 @propertycache
106 def dirstate(self):
106 def dirstate(self):
107 return dirstate.dirstate(self.opener, self.ui, self.root)
107 return dirstate.dirstate(self.opener, self.ui, self.root)
108
108
109 def __getitem__(self, changeid):
109 def __getitem__(self, changeid):
110 if changeid == None:
110 if changeid == None:
111 return context.workingctx(self)
111 return context.workingctx(self)
112 return context.changectx(self, changeid)
112 return context.changectx(self, changeid)
113
113
114 def __nonzero__(self):
114 def __nonzero__(self):
115 return True
115 return True
116
116
117 def __len__(self):
117 def __len__(self):
118 return len(self.changelog)
118 return len(self.changelog)
119
119
120 def __iter__(self):
120 def __iter__(self):
121 for i in xrange(len(self)):
121 for i in xrange(len(self)):
122 yield i
122 yield i
123
123
124 def url(self):
124 def url(self):
125 return 'file:' + self.root
125 return 'file:' + self.root
126
126
127 def hook(self, name, throw=False, **args):
127 def hook(self, name, throw=False, **args):
128 return hook.hook(self.ui, self, name, throw, **args)
128 return hook.hook(self.ui, self, name, throw, **args)
129
129
130 tag_disallowed = ':\r\n'
130 tag_disallowed = ':\r\n'
131
131
132 def _tag(self, names, node, message, local, user, date, parent=None,
132 def _tag(self, names, node, message, local, user, date, parent=None,
133 extra={}):
133 extra={}):
134 use_dirstate = parent is None
134 use_dirstate = parent is None
135
135
136 if isinstance(names, str):
136 if isinstance(names, str):
137 allchars = names
137 allchars = names
138 names = (names,)
138 names = (names,)
139 else:
139 else:
140 allchars = ''.join(names)
140 allchars = ''.join(names)
141 for c in self.tag_disallowed:
141 for c in self.tag_disallowed:
142 if c in allchars:
142 if c in allchars:
143 raise util.Abort(_('%r cannot be used in a tag name') % c)
143 raise util.Abort(_('%r cannot be used in a tag name') % c)
144
144
145 for name in names:
145 for name in names:
146 self.hook('pretag', throw=True, node=hex(node), tag=name,
146 self.hook('pretag', throw=True, node=hex(node), tag=name,
147 local=local)
147 local=local)
148
148
149 def writetags(fp, names, munge, prevtags):
149 def writetags(fp, names, munge, prevtags):
150 fp.seek(0, 2)
150 fp.seek(0, 2)
151 if prevtags and prevtags[-1] != '\n':
151 if prevtags and prevtags[-1] != '\n':
152 fp.write('\n')
152 fp.write('\n')
153 for name in names:
153 for name in names:
154 m = munge and munge(name) or name
154 m = munge and munge(name) or name
155 if self._tagstypecache and name in self._tagstypecache:
155 if self._tagstypecache and name in self._tagstypecache:
156 old = self.tagscache.get(name, nullid)
156 old = self.tagscache.get(name, nullid)
157 fp.write('%s %s\n' % (hex(old), m))
157 fp.write('%s %s\n' % (hex(old), m))
158 fp.write('%s %s\n' % (hex(node), m))
158 fp.write('%s %s\n' % (hex(node), m))
159 fp.close()
159 fp.close()
160
160
161 prevtags = ''
161 prevtags = ''
162 if local:
162 if local:
163 try:
163 try:
164 fp = self.opener('localtags', 'r+')
164 fp = self.opener('localtags', 'r+')
165 except IOError:
165 except IOError:
166 fp = self.opener('localtags', 'a')
166 fp = self.opener('localtags', 'a')
167 else:
167 else:
168 prevtags = fp.read()
168 prevtags = fp.read()
169
169
170 # local tags are stored in the current charset
170 # local tags are stored in the current charset
171 writetags(fp, names, None, prevtags)
171 writetags(fp, names, None, prevtags)
172 for name in names:
172 for name in names:
173 self.hook('tag', node=hex(node), tag=name, local=local)
173 self.hook('tag', node=hex(node), tag=name, local=local)
174 return
174 return
175
175
176 if use_dirstate:
176 if use_dirstate:
177 try:
177 try:
178 fp = self.wfile('.hgtags', 'rb+')
178 fp = self.wfile('.hgtags', 'rb+')
179 except IOError:
179 except IOError:
180 fp = self.wfile('.hgtags', 'ab')
180 fp = self.wfile('.hgtags', 'ab')
181 else:
181 else:
182 prevtags = fp.read()
182 prevtags = fp.read()
183 else:
183 else:
184 try:
184 try:
185 prevtags = self.filectx('.hgtags', parent).data()
185 prevtags = self.filectx('.hgtags', parent).data()
186 except error.LookupError:
186 except error.LookupError:
187 pass
187 pass
188 fp = self.wfile('.hgtags', 'wb')
188 fp = self.wfile('.hgtags', 'wb')
189 if prevtags:
189 if prevtags:
190 fp.write(prevtags)
190 fp.write(prevtags)
191
191
192 # committed tags are stored in UTF-8
192 # committed tags are stored in UTF-8
193 writetags(fp, names, encoding.fromlocal, prevtags)
193 writetags(fp, names, encoding.fromlocal, prevtags)
194
194
195 if use_dirstate and '.hgtags' not in self.dirstate:
195 if use_dirstate and '.hgtags' not in self.dirstate:
196 self.add(['.hgtags'])
196 self.add(['.hgtags'])
197
197
198 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
198 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
199 extra=extra)
199 extra=extra)
200
200
201 for name in names:
201 for name in names:
202 self.hook('tag', node=hex(node), tag=name, local=local)
202 self.hook('tag', node=hex(node), tag=name, local=local)
203
203
204 return tagnode
204 return tagnode
205
205
206 def tag(self, names, node, message, local, user, date):
206 def tag(self, names, node, message, local, user, date):
207 '''tag a revision with one or more symbolic names.
207 '''tag a revision with one or more symbolic names.
208
208
209 names is a list of strings or, when adding a single tag, names may be a
209 names is a list of strings or, when adding a single tag, names may be a
210 string.
210 string.
211
211
212 if local is True, the tags are stored in a per-repository file.
212 if local is True, the tags are stored in a per-repository file.
213 otherwise, they are stored in the .hgtags file, and a new
213 otherwise, they are stored in the .hgtags file, and a new
214 changeset is committed with the change.
214 changeset is committed with the change.
215
215
216 keyword arguments:
216 keyword arguments:
217
217
218 local: whether to store tags in non-version-controlled file
218 local: whether to store tags in non-version-controlled file
219 (default False)
219 (default False)
220
220
221 message: commit message to use if committing
221 message: commit message to use if committing
222
222
223 user: name of user to use if committing
223 user: name of user to use if committing
224
224
225 date: date tuple to use if committing'''
225 date: date tuple to use if committing'''
226
226
227 for x in self.status()[:5]:
227 for x in self.status()[:5]:
228 if '.hgtags' in x:
228 if '.hgtags' in x:
229 raise util.Abort(_('working copy of .hgtags is changed '
229 raise util.Abort(_('working copy of .hgtags is changed '
230 '(please commit .hgtags manually)'))
230 '(please commit .hgtags manually)'))
231
231
232 self.tags() # instantiate the cache
232 self.tags() # instantiate the cache
233 self._tag(names, node, message, local, user, date)
233 self._tag(names, node, message, local, user, date)
234
234
235 def tags(self):
235 def tags(self):
236 '''return a mapping of tag to node'''
236 '''return a mapping of tag to node'''
237 if self.tagscache:
237 if self.tagscache:
238 return self.tagscache
238 return self.tagscache
239
239
240 globaltags = {}
240 globaltags = {}
241 tagtypes = {}
241 tagtypes = {}
242
242
243 def readtags(lines, fn, tagtype):
243 def readtags(lines, fn, tagtype):
244 filetags = {}
244 filetags = {}
245 count = 0
245 count = 0
246
246
247 def warn(msg):
247 def warn(msg):
248 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
248 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
249
249
250 for l in lines:
250 for l in lines:
251 count += 1
251 count += 1
252 if not l:
252 if not l:
253 continue
253 continue
254 s = l.split(" ", 1)
254 s = l.split(" ", 1)
255 if len(s) != 2:
255 if len(s) != 2:
256 warn(_("cannot parse entry"))
256 warn(_("cannot parse entry"))
257 continue
257 continue
258 node, key = s
258 node, key = s
259 key = encoding.tolocal(key.strip()) # stored in UTF-8
259 key = encoding.tolocal(key.strip()) # stored in UTF-8
260 try:
260 try:
261 bin_n = bin(node)
261 bin_n = bin(node)
262 except TypeError:
262 except TypeError:
263 warn(_("node '%s' is not well formed") % node)
263 warn(_("node '%s' is not well formed") % node)
264 continue
264 continue
265 if bin_n not in self.changelog.nodemap:
265 if bin_n not in self.changelog.nodemap:
266 warn(_("tag '%s' refers to unknown node") % key)
266 warn(_("tag '%s' refers to unknown node") % key)
267 continue
267 continue
268
268
269 h = []
269 h = []
270 if key in filetags:
270 if key in filetags:
271 n, h = filetags[key]
271 n, h = filetags[key]
272 h.append(n)
272 h.append(n)
273 filetags[key] = (bin_n, h)
273 filetags[key] = (bin_n, h)
274
274
275 for k, nh in filetags.iteritems():
275 for k, nh in filetags.iteritems():
276 if k not in globaltags:
276 if k not in globaltags:
277 globaltags[k] = nh
277 globaltags[k] = nh
278 tagtypes[k] = tagtype
278 tagtypes[k] = tagtype
279 continue
279 continue
280
280
281 # we prefer the global tag if:
281 # we prefer the global tag if:
282 # it supercedes us OR
282 # it supercedes us OR
283 # mutual supercedes and it has a higher rank
283 # mutual supercedes and it has a higher rank
284 # otherwise we win because we're tip-most
284 # otherwise we win because we're tip-most
285 an, ah = nh
285 an, ah = nh
286 bn, bh = globaltags[k]
286 bn, bh = globaltags[k]
287 if (bn != an and an in bh and
287 if (bn != an and an in bh and
288 (bn not in ah or len(bh) > len(ah))):
288 (bn not in ah or len(bh) > len(ah))):
289 an = bn
289 an = bn
290 ah.extend([n for n in bh if n not in ah])
290 ah.extend([n for n in bh if n not in ah])
291 globaltags[k] = an, ah
291 globaltags[k] = an, ah
292 tagtypes[k] = tagtype
292 tagtypes[k] = tagtype
293
293
294 # read the tags file from each head, ending with the tip
294 # read the tags file from each head, ending with the tip
295 f = None
295 f = None
296 for rev, node, fnode in self._hgtagsnodes():
296 for rev, node, fnode in self._hgtagsnodes():
297 f = (f and f.filectx(fnode) or
297 f = (f and f.filectx(fnode) or
298 self.filectx('.hgtags', fileid=fnode))
298 self.filectx('.hgtags', fileid=fnode))
299 readtags(f.data().splitlines(), f, "global")
299 readtags(f.data().splitlines(), f, "global")
300
300
301 try:
301 try:
302 data = encoding.fromlocal(self.opener("localtags").read())
302 data = encoding.fromlocal(self.opener("localtags").read())
303 # localtags are stored in the local character set
303 # localtags are stored in the local character set
304 # while the internal tag table is stored in UTF-8
304 # while the internal tag table is stored in UTF-8
305 readtags(data.splitlines(), "localtags", "local")
305 readtags(data.splitlines(), "localtags", "local")
306 except IOError:
306 except IOError:
307 pass
307 pass
308
308
309 self.tagscache = {}
309 self.tagscache = {}
310 self._tagstypecache = {}
310 self._tagstypecache = {}
311 for k, nh in globaltags.iteritems():
311 for k, nh in globaltags.iteritems():
312 n = nh[0]
312 n = nh[0]
313 if n != nullid:
313 if n != nullid:
314 self.tagscache[k] = n
314 self.tagscache[k] = n
315 self._tagstypecache[k] = tagtypes[k]
315 self._tagstypecache[k] = tagtypes[k]
316 self.tagscache['tip'] = self.changelog.tip()
316 self.tagscache['tip'] = self.changelog.tip()
317 return self.tagscache
317 return self.tagscache
318
318
319 def tagtype(self, tagname):
319 def tagtype(self, tagname):
320 '''
320 '''
321 return the type of the given tag. result can be:
321 return the type of the given tag. result can be:
322
322
323 'local' : a local tag
323 'local' : a local tag
324 'global' : a global tag
324 'global' : a global tag
325 None : tag does not exist
325 None : tag does not exist
326 '''
326 '''
327
327
328 self.tags()
328 self.tags()
329
329
330 return self._tagstypecache.get(tagname)
330 return self._tagstypecache.get(tagname)
331
331
332 def _hgtagsnodes(self):
332 def _hgtagsnodes(self):
333 last = {}
333 last = {}
334 ret = []
334 ret = []
335 for node in reversed(self.heads()):
335 for node in reversed(self.heads()):
336 c = self[node]
336 c = self[node]
337 rev = c.rev()
337 rev = c.rev()
338 try:
338 try:
339 fnode = c.filenode('.hgtags')
339 fnode = c.filenode('.hgtags')
340 except error.LookupError:
340 except error.LookupError:
341 continue
341 continue
342 ret.append((rev, node, fnode))
342 ret.append((rev, node, fnode))
343 if fnode in last:
343 if fnode in last:
344 ret[last[fnode]] = None
344 ret[last[fnode]] = None
345 last[fnode] = len(ret) - 1
345 last[fnode] = len(ret) - 1
346 return [item for item in ret if item]
346 return [item for item in ret if item]
347
347
348 def tagslist(self):
348 def tagslist(self):
349 '''return a list of tags ordered by revision'''
349 '''return a list of tags ordered by revision'''
350 l = []
350 l = []
351 for t, n in self.tags().iteritems():
351 for t, n in self.tags().iteritems():
352 try:
352 try:
353 r = self.changelog.rev(n)
353 r = self.changelog.rev(n)
354 except:
354 except:
355 r = -2 # sort to the beginning of the list if unknown
355 r = -2 # sort to the beginning of the list if unknown
356 l.append((r, t, n))
356 l.append((r, t, n))
357 return [(t, n) for r, t, n in sorted(l)]
357 return [(t, n) for r, t, n in sorted(l)]
358
358
359 def nodetags(self, node):
359 def nodetags(self, node):
360 '''return the tags associated with a node'''
360 '''return the tags associated with a node'''
361 if not self.nodetagscache:
361 if not self.nodetagscache:
362 self.nodetagscache = {}
362 self.nodetagscache = {}
363 for t, n in self.tags().iteritems():
363 for t, n in self.tags().iteritems():
364 self.nodetagscache.setdefault(n, []).append(t)
364 self.nodetagscache.setdefault(n, []).append(t)
365 return self.nodetagscache.get(node, [])
365 return self.nodetagscache.get(node, [])
366
366
367 def _branchtags(self, partial, lrev):
367 def _branchtags(self, partial, lrev):
368 # TODO: rename this function?
368 # TODO: rename this function?
369 tiprev = len(self) - 1
369 tiprev = len(self) - 1
370 if lrev != tiprev:
370 if lrev != tiprev:
371 self._updatebranchcache(partial, lrev+1, tiprev+1)
371 self._updatebranchcache(partial, lrev+1, tiprev+1)
372 self._writebranchcache(partial, self.changelog.tip(), tiprev)
372 self._writebranchcache(partial, self.changelog.tip(), tiprev)
373
373
374 return partial
374 return partial
375
375
376 def _branchheads(self):
376 def _branchheads(self):
377 tip = self.changelog.tip()
377 tip = self.changelog.tip()
378 if self.branchcache is not None and self._branchcachetip == tip:
378 if self.branchcache is not None and self._branchcachetip == tip:
379 return self.branchcache
379 return self.branchcache
380
380
381 oldtip = self._branchcachetip
381 oldtip = self._branchcachetip
382 self._branchcachetip = tip
382 self._branchcachetip = tip
383 if self.branchcache is None:
383 if self.branchcache is None:
384 self.branchcache = {} # avoid recursion in changectx
384 self.branchcache = {} # avoid recursion in changectx
385 else:
385 else:
386 self.branchcache.clear() # keep using the same dict
386 self.branchcache.clear() # keep using the same dict
387 if oldtip is None or oldtip not in self.changelog.nodemap:
387 if oldtip is None or oldtip not in self.changelog.nodemap:
388 partial, last, lrev = self._readbranchcache()
388 partial, last, lrev = self._readbranchcache()
389 else:
389 else:
390 lrev = self.changelog.rev(oldtip)
390 lrev = self.changelog.rev(oldtip)
391 partial = self._ubranchcache
391 partial = self._ubranchcache
392
392
393 self._branchtags(partial, lrev)
393 self._branchtags(partial, lrev)
394 # this private cache holds all heads (not just tips)
394 # this private cache holds all heads (not just tips)
395 self._ubranchcache = partial
395 self._ubranchcache = partial
396
396
397 # the branch cache is stored on disk as UTF-8, but in the local
397 # the branch cache is stored on disk as UTF-8, but in the local
398 # charset internally
398 # charset internally
399 for k, v in partial.iteritems():
399 for k, v in partial.iteritems():
400 self.branchcache[encoding.tolocal(k)] = v
400 self.branchcache[encoding.tolocal(k)] = v
401 return self.branchcache
401 return self.branchcache
402
402
403
403
404 def branchtags(self):
404 def branchtags(self):
405 '''return a dict where branch names map to the tipmost head of
405 '''return a dict where branch names map to the tipmost head of
406 the branch, open heads come before closed'''
406 the branch, open heads come before closed'''
407 bt = {}
407 bt = {}
408 for bn, heads in self._branchheads().iteritems():
408 for bn, heads in self._branchheads().iteritems():
409 head = None
409 head = None
410 for i in range(len(heads)-1, -1, -1):
410 for i in range(len(heads)-1, -1, -1):
411 h = heads[i]
411 h = heads[i]
412 if 'close' not in self.changelog.read(h)[5]:
412 if 'close' not in self.changelog.read(h)[5]:
413 head = h
413 head = h
414 break
414 break
415 # no open heads were found
415 # no open heads were found
416 if head is None:
416 if head is None:
417 head = heads[-1]
417 head = heads[-1]
418 bt[bn] = head
418 bt[bn] = head
419 return bt
419 return bt
420
420
421
421
422 def _readbranchcache(self):
422 def _readbranchcache(self):
423 partial = {}
423 partial = {}
424 try:
424 try:
425 f = self.opener("branchheads.cache")
425 f = self.opener("branchheads.cache")
426 lines = f.read().split('\n')
426 lines = f.read().split('\n')
427 f.close()
427 f.close()
428 except (IOError, OSError):
428 except (IOError, OSError):
429 return {}, nullid, nullrev
429 return {}, nullid, nullrev
430
430
431 try:
431 try:
432 last, lrev = lines.pop(0).split(" ", 1)
432 last, lrev = lines.pop(0).split(" ", 1)
433 last, lrev = bin(last), int(lrev)
433 last, lrev = bin(last), int(lrev)
434 if lrev >= len(self) or self[lrev].node() != last:
434 if lrev >= len(self) or self[lrev].node() != last:
435 # invalidate the cache
435 # invalidate the cache
436 raise ValueError('invalidating branch cache (tip differs)')
436 raise ValueError('invalidating branch cache (tip differs)')
437 for l in lines:
437 for l in lines:
438 if not l: continue
438 if not l: continue
439 node, label = l.split(" ", 1)
439 node, label = l.split(" ", 1)
440 partial.setdefault(label.strip(), []).append(bin(node))
440 partial.setdefault(label.strip(), []).append(bin(node))
441 except KeyboardInterrupt:
441 except KeyboardInterrupt:
442 raise
442 raise
443 except Exception, inst:
443 except Exception, inst:
444 if self.ui.debugflag:
444 if self.ui.debugflag:
445 self.ui.warn(str(inst), '\n')
445 self.ui.warn(str(inst), '\n')
446 partial, last, lrev = {}, nullid, nullrev
446 partial, last, lrev = {}, nullid, nullrev
447 return partial, last, lrev
447 return partial, last, lrev
448
448
449 def _writebranchcache(self, branches, tip, tiprev):
449 def _writebranchcache(self, branches, tip, tiprev):
450 try:
450 try:
451 f = self.opener("branchheads.cache", "w", atomictemp=True)
451 f = self.opener("branchheads.cache", "w", atomictemp=True)
452 f.write("%s %s\n" % (hex(tip), tiprev))
452 f.write("%s %s\n" % (hex(tip), tiprev))
453 for label, nodes in branches.iteritems():
453 for label, nodes in branches.iteritems():
454 for node in nodes:
454 for node in nodes:
455 f.write("%s %s\n" % (hex(node), label))
455 f.write("%s %s\n" % (hex(node), label))
456 f.rename()
456 f.rename()
457 except (IOError, OSError):
457 except (IOError, OSError):
458 pass
458 pass
459
459
460 def _updatebranchcache(self, partial, start, end):
460 def _updatebranchcache(self, partial, start, end):
461 for r in xrange(start, end):
461 for r in xrange(start, end):
462 c = self[r]
462 c = self[r]
463 b = c.branch()
463 b = c.branch()
464 bheads = partial.setdefault(b, [])
464 bheads = partial.setdefault(b, [])
465 bheads.append(c.node())
465 bheads.append(c.node())
466 for p in c.parents():
466 for p in c.parents():
467 pn = p.node()
467 pn = p.node()
468 if pn in bheads:
468 if pn in bheads:
469 bheads.remove(pn)
469 bheads.remove(pn)
470
470
471 def lookup(self, key):
471 def lookup(self, key):
472 if isinstance(key, int):
472 if isinstance(key, int):
473 return self.changelog.node(key)
473 return self.changelog.node(key)
474 elif key == '.':
474 elif key == '.':
475 return self.dirstate.parents()[0]
475 return self.dirstate.parents()[0]
476 elif key == 'null':
476 elif key == 'null':
477 return nullid
477 return nullid
478 elif key == 'tip':
478 elif key == 'tip':
479 return self.changelog.tip()
479 return self.changelog.tip()
480 n = self.changelog._match(key)
480 n = self.changelog._match(key)
481 if n:
481 if n:
482 return n
482 return n
483 if key in self.tags():
483 if key in self.tags():
484 return self.tags()[key]
484 return self.tags()[key]
485 if key in self.branchtags():
485 if key in self.branchtags():
486 return self.branchtags()[key]
486 return self.branchtags()[key]
487 n = self.changelog._partialmatch(key)
487 n = self.changelog._partialmatch(key)
488 if n:
488 if n:
489 return n
489 return n
490 try:
490 try:
491 if len(key) == 20:
491 if len(key) == 20:
492 key = hex(key)
492 key = hex(key)
493 except:
493 except:
494 pass
494 pass
495 raise error.RepoError(_("unknown revision '%s'") % key)
495 raise error.RepoError(_("unknown revision '%s'") % key)
496
496
497 def local(self):
497 def local(self):
498 return True
498 return True
499
499
500 def join(self, f):
500 def join(self, f):
501 return os.path.join(self.path, f)
501 return os.path.join(self.path, f)
502
502
503 def wjoin(self, f):
503 def wjoin(self, f):
504 return os.path.join(self.root, f)
504 return os.path.join(self.root, f)
505
505
506 def rjoin(self, f):
506 def rjoin(self, f):
507 return os.path.join(self.root, util.pconvert(f))
507 return os.path.join(self.root, util.pconvert(f))
508
508
509 def file(self, f):
509 def file(self, f):
510 if f[0] == '/':
510 if f[0] == '/':
511 f = f[1:]
511 f = f[1:]
512 return filelog.filelog(self.sopener, f)
512 return filelog.filelog(self.sopener, f)
513
513
514 def changectx(self, changeid):
514 def changectx(self, changeid):
515 return self[changeid]
515 return self[changeid]
516
516
517 def parents(self, changeid=None):
517 def parents(self, changeid=None):
518 '''get list of changectxs for parents of changeid'''
518 '''get list of changectxs for parents of changeid'''
519 return self[changeid].parents()
519 return self[changeid].parents()
520
520
521 def filectx(self, path, changeid=None, fileid=None):
521 def filectx(self, path, changeid=None, fileid=None):
522 """changeid can be a changeset revision, node, or tag.
522 """changeid can be a changeset revision, node, or tag.
523 fileid can be a file revision or node."""
523 fileid can be a file revision or node."""
524 return context.filectx(self, path, changeid, fileid)
524 return context.filectx(self, path, changeid, fileid)
525
525
526 def getcwd(self):
526 def getcwd(self):
527 return self.dirstate.getcwd()
527 return self.dirstate.getcwd()
528
528
529 def pathto(self, f, cwd=None):
529 def pathto(self, f, cwd=None):
530 return self.dirstate.pathto(f, cwd)
530 return self.dirstate.pathto(f, cwd)
531
531
532 def wfile(self, f, mode='r'):
532 def wfile(self, f, mode='r'):
533 return self.wopener(f, mode)
533 return self.wopener(f, mode)
534
534
535 def _link(self, f):
535 def _link(self, f):
536 return os.path.islink(self.wjoin(f))
536 return os.path.islink(self.wjoin(f))
537
537
538 def _filter(self, filter, filename, data):
538 def _filter(self, filter, filename, data):
539 if filter not in self.filterpats:
539 if filter not in self.filterpats:
540 l = []
540 l = []
541 for pat, cmd in self.ui.configitems(filter):
541 for pat, cmd in self.ui.configitems(filter):
542 if cmd == '!':
542 if cmd == '!':
543 continue
543 continue
544 mf = util.matcher(self.root, "", [pat], [], [])[1]
544 mf = util.matcher(self.root, "", [pat], [], [])[1]
545 fn = None
545 fn = None
546 params = cmd
546 params = cmd
547 for name, filterfn in self._datafilters.iteritems():
547 for name, filterfn in self._datafilters.iteritems():
548 if cmd.startswith(name):
548 if cmd.startswith(name):
549 fn = filterfn
549 fn = filterfn
550 params = cmd[len(name):].lstrip()
550 params = cmd[len(name):].lstrip()
551 break
551 break
552 if not fn:
552 if not fn:
553 fn = lambda s, c, **kwargs: util.filter(s, c)
553 fn = lambda s, c, **kwargs: util.filter(s, c)
554 # Wrap old filters not supporting keyword arguments
554 # Wrap old filters not supporting keyword arguments
555 if not inspect.getargspec(fn)[2]:
555 if not inspect.getargspec(fn)[2]:
556 oldfn = fn
556 oldfn = fn
557 fn = lambda s, c, **kwargs: oldfn(s, c)
557 fn = lambda s, c, **kwargs: oldfn(s, c)
558 l.append((mf, fn, params))
558 l.append((mf, fn, params))
559 self.filterpats[filter] = l
559 self.filterpats[filter] = l
560
560
561 for mf, fn, cmd in self.filterpats[filter]:
561 for mf, fn, cmd in self.filterpats[filter]:
562 if mf(filename):
562 if mf(filename):
563 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
563 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
564 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
564 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
565 break
565 break
566
566
567 return data
567 return data
568
568
569 def adddatafilter(self, name, filter):
569 def adddatafilter(self, name, filter):
570 self._datafilters[name] = filter
570 self._datafilters[name] = filter
571
571
572 def wread(self, filename):
572 def wread(self, filename):
573 if self._link(filename):
573 if self._link(filename):
574 data = os.readlink(self.wjoin(filename))
574 data = os.readlink(self.wjoin(filename))
575 else:
575 else:
576 data = self.wopener(filename, 'r').read()
576 data = self.wopener(filename, 'r').read()
577 return self._filter("encode", filename, data)
577 return self._filter("encode", filename, data)
578
578
579 def wwrite(self, filename, data, flags):
579 def wwrite(self, filename, data, flags):
580 data = self._filter("decode", filename, data)
580 data = self._filter("decode", filename, data)
581 try:
581 try:
582 os.unlink(self.wjoin(filename))
582 os.unlink(self.wjoin(filename))
583 except OSError:
583 except OSError:
584 pass
584 pass
585 if 'l' in flags:
585 if 'l' in flags:
586 self.wopener.symlink(data, filename)
586 self.wopener.symlink(data, filename)
587 else:
587 else:
588 self.wopener(filename, 'w').write(data)
588 self.wopener(filename, 'w').write(data)
589 if 'x' in flags:
589 if 'x' in flags:
590 util.set_flags(self.wjoin(filename), False, True)
590 util.set_flags(self.wjoin(filename), False, True)
591
591
592 def wwritedata(self, filename, data):
592 def wwritedata(self, filename, data):
593 return self._filter("decode", filename, data)
593 return self._filter("decode", filename, data)
594
594
595 def transaction(self):
595 def transaction(self):
596 tr = self._transref and self._transref() or None
596 tr = self._transref and self._transref() or None
597 if tr and tr.running():
597 if tr and tr.running():
598 return tr.nest()
598 return tr.nest()
599
599
600 # abort here if the journal already exists
600 # abort here if the journal already exists
601 if os.path.exists(self.sjoin("journal")):
601 if os.path.exists(self.sjoin("journal")):
602 raise error.RepoError(_("journal already exists - run hg recover"))
602 raise error.RepoError(_("journal already exists - run hg recover"))
603
603
604 # save dirstate for rollback
604 # save dirstate for rollback
605 try:
605 try:
606 ds = self.opener("dirstate").read()
606 ds = self.opener("dirstate").read()
607 except IOError:
607 except IOError:
608 ds = ""
608 ds = ""
609 self.opener("journal.dirstate", "w").write(ds)
609 self.opener("journal.dirstate", "w").write(ds)
610 self.opener("journal.branch", "w").write(self.dirstate.branch())
610 self.opener("journal.branch", "w").write(self.dirstate.branch())
611
611
612 renames = [(self.sjoin("journal"), self.sjoin("undo")),
612 renames = [(self.sjoin("journal"), self.sjoin("undo")),
613 (self.join("journal.dirstate"), self.join("undo.dirstate")),
613 (self.join("journal.dirstate"), self.join("undo.dirstate")),
614 (self.join("journal.branch"), self.join("undo.branch"))]
614 (self.join("journal.branch"), self.join("undo.branch"))]
615 tr = transaction.transaction(self.ui.warn, self.sopener,
615 tr = transaction.transaction(self.ui.warn, self.sopener,
616 self.sjoin("journal"),
616 self.sjoin("journal"),
617 aftertrans(renames),
617 aftertrans(renames),
618 self.store.createmode)
618 self.store.createmode)
619 self._transref = weakref.ref(tr)
619 self._transref = weakref.ref(tr)
620 return tr
620 return tr
621
621
622 def recover(self):
622 def recover(self):
623 lock = self.lock()
623 lock = self.lock()
624 try:
624 try:
625 if os.path.exists(self.sjoin("journal")):
625 if os.path.exists(self.sjoin("journal")):
626 self.ui.status(_("rolling back interrupted transaction\n"))
626 self.ui.status(_("rolling back interrupted transaction\n"))
627 transaction.rollback(self.sopener, self.sjoin("journal"), self.ui.warn)
627 transaction.rollback(self.sopener, self.sjoin("journal"), self.ui.warn)
628 self.invalidate()
628 self.invalidate()
629 return True
629 return True
630 else:
630 else:
631 self.ui.warn(_("no interrupted transaction available\n"))
631 self.ui.warn(_("no interrupted transaction available\n"))
632 return False
632 return False
633 finally:
633 finally:
634 lock.release()
634 lock.release()
635
635
636 def rollback(self):
636 def rollback(self):
637 wlock = lock = None
637 wlock = lock = None
638 try:
638 try:
639 wlock = self.wlock()
639 wlock = self.wlock()
640 lock = self.lock()
640 lock = self.lock()
641 if os.path.exists(self.sjoin("undo")):
641 if os.path.exists(self.sjoin("undo")):
642 self.ui.status(_("rolling back last transaction\n"))
642 self.ui.status(_("rolling back last transaction\n"))
643 transaction.rollback(self.sopener, self.sjoin("undo"), self.ui.warn)
643 transaction.rollback(self.sopener, self.sjoin("undo"), self.ui.warn)
644 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
644 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
645 try:
645 try:
646 branch = self.opener("undo.branch").read()
646 branch = self.opener("undo.branch").read()
647 self.dirstate.setbranch(branch)
647 self.dirstate.setbranch(branch)
648 except IOError:
648 except IOError:
649 self.ui.warn(_("Named branch could not be reset, "
649 self.ui.warn(_("Named branch could not be reset, "
650 "current branch still is: %s\n")
650 "current branch still is: %s\n")
651 % encoding.tolocal(self.dirstate.branch()))
651 % encoding.tolocal(self.dirstate.branch()))
652 self.invalidate()
652 self.invalidate()
653 self.dirstate.invalidate()
653 self.dirstate.invalidate()
654 else:
654 else:
655 self.ui.warn(_("no rollback information available\n"))
655 self.ui.warn(_("no rollback information available\n"))
656 finally:
656 finally:
657 release(lock, wlock)
657 release(lock, wlock)
658
658
659 def invalidate(self):
659 def invalidate(self):
660 for a in "changelog manifest".split():
660 for a in "changelog manifest".split():
661 if a in self.__dict__:
661 if a in self.__dict__:
662 delattr(self, a)
662 delattr(self, a)
663 self.tagscache = None
663 self.tagscache = None
664 self._tagstypecache = None
664 self._tagstypecache = None
665 self.nodetagscache = None
665 self.nodetagscache = None
666 self.branchcache = None
666 self.branchcache = None
667 self._ubranchcache = None
667 self._ubranchcache = None
668 self._branchcachetip = None
668 self._branchcachetip = None
669
669
670 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
670 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
671 try:
671 try:
672 l = lock.lock(lockname, 0, releasefn, desc=desc)
672 l = lock.lock(lockname, 0, releasefn, desc=desc)
673 except error.LockHeld, inst:
673 except error.LockHeld, inst:
674 if not wait:
674 if not wait:
675 raise
675 raise
676 self.ui.warn(_("waiting for lock on %s held by %r\n") %
676 self.ui.warn(_("waiting for lock on %s held by %r\n") %
677 (desc, inst.locker))
677 (desc, inst.locker))
678 # default to 600 seconds timeout
678 # default to 600 seconds timeout
679 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
679 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
680 releasefn, desc=desc)
680 releasefn, desc=desc)
681 if acquirefn:
681 if acquirefn:
682 acquirefn()
682 acquirefn()
683 return l
683 return l
684
684
685 def lock(self, wait=True):
685 def lock(self, wait=True):
686 l = self._lockref and self._lockref()
686 l = self._lockref and self._lockref()
687 if l is not None and l.held:
687 if l is not None and l.held:
688 l.lock()
688 l.lock()
689 return l
689 return l
690
690
691 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
691 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
692 _('repository %s') % self.origroot)
692 _('repository %s') % self.origroot)
693 self._lockref = weakref.ref(l)
693 self._lockref = weakref.ref(l)
694 return l
694 return l
695
695
696 def wlock(self, wait=True):
696 def wlock(self, wait=True):
697 l = self._wlockref and self._wlockref()
697 l = self._wlockref and self._wlockref()
698 if l is not None and l.held:
698 if l is not None and l.held:
699 l.lock()
699 l.lock()
700 return l
700 return l
701
701
702 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
702 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
703 self.dirstate.invalidate, _('working directory of %s') %
703 self.dirstate.invalidate, _('working directory of %s') %
704 self.origroot)
704 self.origroot)
705 self._wlockref = weakref.ref(l)
705 self._wlockref = weakref.ref(l)
706 return l
706 return l
707
707
708 def filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
708 def filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
709 """
709 """
710 commit an individual file as part of a larger transaction
710 commit an individual file as part of a larger transaction
711 """
711 """
712
712
713 fname = fctx.path()
713 fname = fctx.path()
714 text = fctx.data()
714 text = fctx.data()
715 flog = self.file(fname)
715 flog = self.file(fname)
716 fparent1 = manifest1.get(fname, nullid)
716 fparent1 = manifest1.get(fname, nullid)
717 fparent2 = manifest2.get(fname, nullid)
717 fparent2 = manifest2.get(fname, nullid)
718
718
719 meta = {}
719 meta = {}
720 copy = fctx.renamed()
720 copy = fctx.renamed()
721 if copy and copy[0] != fname:
721 if copy and copy[0] != fname:
722 # Mark the new revision of this file as a copy of another
722 # Mark the new revision of this file as a copy of another
723 # file. This copy data will effectively act as a parent
723 # file. This copy data will effectively act as a parent
724 # of this new revision. If this is a merge, the first
724 # of this new revision. If this is a merge, the first
725 # parent will be the nullid (meaning "look up the copy data")
725 # parent will be the nullid (meaning "look up the copy data")
726 # and the second one will be the other parent. For example:
726 # and the second one will be the other parent. For example:
727 #
727 #
728 # 0 --- 1 --- 3 rev1 changes file foo
728 # 0 --- 1 --- 3 rev1 changes file foo
729 # \ / rev2 renames foo to bar and changes it
729 # \ / rev2 renames foo to bar and changes it
730 # \- 2 -/ rev3 should have bar with all changes and
730 # \- 2 -/ rev3 should have bar with all changes and
731 # should record that bar descends from
731 # should record that bar descends from
732 # bar in rev2 and foo in rev1
732 # bar in rev2 and foo in rev1
733 #
733 #
734 # this allows this merge to succeed:
734 # this allows this merge to succeed:
735 #
735 #
736 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
736 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
737 # \ / merging rev3 and rev4 should use bar@rev2
737 # \ / merging rev3 and rev4 should use bar@rev2
738 # \- 2 --- 4 as the merge base
738 # \- 2 --- 4 as the merge base
739 #
739 #
740
740
741 cfname = copy[0]
741 cfname = copy[0]
742 crev = manifest1.get(cfname)
742 crev = manifest1.get(cfname)
743 newfparent = fparent2
743 newfparent = fparent2
744
744
745 if manifest2: # branch merge
745 if manifest2: # branch merge
746 if fparent2 == nullid or crev is None: # copied on remote side
746 if fparent2 == nullid or crev is None: # copied on remote side
747 if cfname in manifest2:
747 if cfname in manifest2:
748 crev = manifest2[cfname]
748 crev = manifest2[cfname]
749 newfparent = fparent1
749 newfparent = fparent1
750
750
751 # find source in nearest ancestor if we've lost track
751 # find source in nearest ancestor if we've lost track
752 if not crev:
752 if not crev:
753 self.ui.debug(_(" %s: searching for copy revision for %s\n") %
753 self.ui.debug(_(" %s: searching for copy revision for %s\n") %
754 (fname, cfname))
754 (fname, cfname))
755 for ancestor in self['.'].ancestors():
755 for ancestor in self['.'].ancestors():
756 if cfname in ancestor:
756 if cfname in ancestor:
757 crev = ancestor[cfname].filenode()
757 crev = ancestor[cfname].filenode()
758 break
758 break
759
759
760 self.ui.debug(_(" %s: copy %s:%s\n") % (fname, cfname, hex(crev)))
760 self.ui.debug(_(" %s: copy %s:%s\n") % (fname, cfname, hex(crev)))
761 meta["copy"] = cfname
761 meta["copy"] = cfname
762 meta["copyrev"] = hex(crev)
762 meta["copyrev"] = hex(crev)
763 fparent1, fparent2 = nullid, newfparent
763 fparent1, fparent2 = nullid, newfparent
764 elif fparent2 != nullid:
764 elif fparent2 != nullid:
765 # is one parent an ancestor of the other?
765 # is one parent an ancestor of the other?
766 fparentancestor = flog.ancestor(fparent1, fparent2)
766 fparentancestor = flog.ancestor(fparent1, fparent2)
767 if fparentancestor == fparent1:
767 if fparentancestor == fparent1:
768 fparent1, fparent2 = fparent2, nullid
768 fparent1, fparent2 = fparent2, nullid
769 elif fparentancestor == fparent2:
769 elif fparentancestor == fparent2:
770 fparent2 = nullid
770 fparent2 = nullid
771
771
772 # is the file unmodified from the parent? report existing entry
772 # is the file unmodified from the parent? report existing entry
773 if fparent2 == nullid and not flog.cmp(fparent1, text) and not meta:
773 if fparent2 == nullid and not flog.cmp(fparent1, text) and not meta:
774 return fparent1
774 return fparent1
775
775
776 changelist.append(fname)
776 changelist.append(fname)
777 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
777 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
778
778
779 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
779 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
780 if p1 is None:
780 if p1 is None:
781 p1, p2 = self.dirstate.parents()
781 p1, p2 = self.dirstate.parents()
782 return self.commit(files=files, text=text, user=user, date=date,
782 return self.commit(files=files, text=text, user=user, date=date,
783 p1=p1, p2=p2, extra=extra, empty_ok=True)
783 p1=p1, p2=p2, extra=extra, empty_ok=True)
784
784
785 def commit(self, files=None, text="", user=None, date=None,
785 def commit(self, files=None, text="", user=None, date=None,
786 match=None, force=False, force_editor=False,
786 match=None, force=False, force_editor=False,
787 p1=None, p2=None, extra={}, empty_ok=False):
787 p1=None, p2=None, extra={}, empty_ok=False):
788 wlock = lock = None
788 wlock = lock = None
789 if extra.get("close"):
789 if extra.get("close"):
790 force = True
790 force = True
791 if files:
791 if files:
792 files = list(set(files))
792 files = list(set(files))
793 try:
793 try:
794 wlock = self.wlock()
794 wlock = self.wlock()
795 lock = self.lock()
795 lock = self.lock()
796 use_dirstate = (p1 is None) # not rawcommit
796 use_dirstate = (p1 is None) # not rawcommit
797
797
798 if use_dirstate:
798 if use_dirstate:
799 p1, p2 = self.dirstate.parents()
799 p1, p2 = self.dirstate.parents()
800 update_dirstate = True
800 update_dirstate = True
801
801
802 if (not force and p2 != nullid and
802 if (not force and p2 != nullid and
803 (match and (match.files() or match.anypats()))):
803 (match and (match.files() or match.anypats()))):
804 raise util.Abort(_('cannot partially commit a merge '
804 raise util.Abort(_('cannot partially commit a merge '
805 '(do not specify files or patterns)'))
805 '(do not specify files or patterns)'))
806
806
807 if files:
807 if files:
808 modified, removed = [], []
808 modified, removed = [], []
809 for f in files:
809 for f in files:
810 s = self.dirstate[f]
810 s = self.dirstate[f]
811 if s in 'nma':
811 if s in 'nma':
812 modified.append(f)
812 modified.append(f)
813 elif s == 'r':
813 elif s == 'r':
814 removed.append(f)
814 removed.append(f)
815 else:
815 else:
816 self.ui.warn(_("%s not tracked!\n") % f)
816 self.ui.warn(_("%s not tracked!\n") % f)
817 changes = [modified, [], removed, [], []]
817 changes = [modified, [], removed, [], []]
818 else:
818 else:
819 changes = self.status(match=match)
819 changes = self.status(match=match)
820 else:
820 else:
821 p1, p2 = p1, p2 or nullid
821 p1, p2 = p1, p2 or nullid
822 update_dirstate = (self.dirstate.parents()[0] == p1)
822 update_dirstate = (self.dirstate.parents()[0] == p1)
823 changes = [files, [], [], [], []]
823 changes = [files, [], [], [], []]
824
824
825 ms = merge_.mergestate(self)
825 ms = merge_.mergestate(self)
826 for f in changes[0]:
826 for f in changes[0]:
827 if f in ms and ms[f] == 'u':
827 if f in ms and ms[f] == 'u':
828 raise util.Abort(_("unresolved merge conflicts "
828 raise util.Abort(_("unresolved merge conflicts "
829 "(see hg resolve)"))
829 "(see hg resolve)"))
830 wctx = context.workingctx(self, (p1, p2), text, user, date,
830 wctx = context.workingctx(self, (p1, p2), text, user, date,
831 extra, changes)
831 extra, changes)
832 r = self._commitctx(wctx, force, force_editor, empty_ok,
832 r = self._commitctx(wctx, force, force_editor, empty_ok,
833 use_dirstate, update_dirstate)
833 use_dirstate, update_dirstate)
834 ms.reset()
834 ms.reset()
835 return r
835 return r
836
836
837 finally:
837 finally:
838 release(lock, wlock)
838 release(lock, wlock)
839
839
840 def commitctx(self, ctx):
840 def commitctx(self, ctx):
841 """Add a new revision to current repository.
841 """Add a new revision to current repository.
842
842
843 Revision information is passed in the context.memctx argument.
843 Revision information is passed in the context.memctx argument.
844 commitctx() does not touch the working directory.
844 commitctx() does not touch the working directory.
845 """
845 """
846 wlock = lock = None
846 wlock = lock = None
847 try:
847 try:
848 wlock = self.wlock()
848 wlock = self.wlock()
849 lock = self.lock()
849 lock = self.lock()
850 return self._commitctx(ctx, force=True, force_editor=False,
850 return self._commitctx(ctx, force=True, force_editor=False,
851 empty_ok=True, use_dirstate=False,
851 empty_ok=True, use_dirstate=False,
852 update_dirstate=False)
852 update_dirstate=False)
853 finally:
853 finally:
854 release(lock, wlock)
854 release(lock, wlock)
855
855
856 def _commitctx(self, wctx, force=False, force_editor=False, empty_ok=False,
856 def _commitctx(self, wctx, force=False, force_editor=False, empty_ok=False,
857 use_dirstate=True, update_dirstate=True):
857 use_dirstate=True, update_dirstate=True):
858 tr = None
858 tr = None
859 valid = 0 # don't save the dirstate if this isn't set
859 valid = 0 # don't save the dirstate if this isn't set
860 try:
860 try:
861 commit = sorted(wctx.modified() + wctx.added())
861 commit = sorted(wctx.modified() + wctx.added())
862 remove = wctx.removed()
862 remove = wctx.removed()
863 extra = wctx.extra().copy()
863 extra = wctx.extra().copy()
864 branchname = extra['branch']
864 branchname = extra['branch']
865 user = wctx.user()
865 user = wctx.user()
866 text = wctx.description()
866 text = wctx.description()
867
867
868 p1, p2 = [p.node() for p in wctx.parents()]
868 p1, p2 = [p.node() for p in wctx.parents()]
869 c1 = self.changelog.read(p1)
869 c1 = self.changelog.read(p1)
870 c2 = self.changelog.read(p2)
870 c2 = self.changelog.read(p2)
871 m1 = self.manifest.read(c1[0]).copy()
871 m1 = self.manifest.read(c1[0]).copy()
872 m2 = self.manifest.read(c2[0])
872 m2 = self.manifest.read(c2[0])
873
873
874 if use_dirstate:
874 if use_dirstate:
875 oldname = c1[5].get("branch") # stored in UTF-8
875 oldname = c1[5].get("branch") # stored in UTF-8
876 if (not commit and not remove and not force and p2 == nullid
876 if (not commit and not remove and not force and p2 == nullid
877 and branchname == oldname):
877 and branchname == oldname):
878 self.ui.status(_("nothing changed\n"))
878 self.ui.status(_("nothing changed\n"))
879 return None
879 return None
880
880
881 xp1 = hex(p1)
881 xp1 = hex(p1)
882 if p2 == nullid: xp2 = ''
882 if p2 == nullid: xp2 = ''
883 else: xp2 = hex(p2)
883 else: xp2 = hex(p2)
884
884
885 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
885 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
886
886
887 tr = self.transaction()
887 tr = self.transaction()
888 trp = weakref.proxy(tr)
888 trp = weakref.proxy(tr)
889
889
890 # check in files
890 # check in files
891 new = {}
891 new = {}
892 changed = []
892 changed = []
893 linkrev = len(self)
893 linkrev = len(self)
894 for f in commit:
894 for f in commit:
895 self.ui.note(f + "\n")
895 self.ui.note(f + "\n")
896 try:
896 try:
897 fctx = wctx.filectx(f)
897 fctx = wctx.filectx(f)
898 newflags = fctx.flags()
898 newflags = fctx.flags()
899 new[f] = self.filecommit(fctx, m1, m2, linkrev, trp, changed)
899 new[f] = self.filecommit(fctx, m1, m2, linkrev, trp, changed)
900 if ((not changed or changed[-1] != f) and
900 if ((not changed or changed[-1] != f) and
901 m2.get(f) != new[f]):
901 m2.get(f) != new[f]):
902 # mention the file in the changelog if some
902 # mention the file in the changelog if some
903 # flag changed, even if there was no content
903 # flag changed, even if there was no content
904 # change.
904 # change.
905 if m1.flags(f) != newflags:
905 if m1.flags(f) != newflags:
906 changed.append(f)
906 changed.append(f)
907 m1.set(f, newflags)
907 m1.set(f, newflags)
908 if use_dirstate:
908 if use_dirstate:
909 self.dirstate.normal(f)
909 self.dirstate.normal(f)
910
910
911 except (OSError, IOError):
911 except (OSError, IOError):
912 if use_dirstate:
912 if use_dirstate:
913 self.ui.warn(_("trouble committing %s!\n") % f)
913 self.ui.warn(_("trouble committing %s!\n") % f)
914 raise
914 raise
915 else:
915 else:
916 remove.append(f)
916 remove.append(f)
917
917
918 updated, added = [], []
918 updated, added = [], []
919 for f in sorted(changed):
919 for f in sorted(changed):
920 if f in m1 or f in m2:
920 if f in m1 or f in m2:
921 updated.append(f)
921 updated.append(f)
922 else:
922 else:
923 added.append(f)
923 added.append(f)
924
924
925 # update manifest
925 # update manifest
926 m1.update(new)
926 m1.update(new)
927 removed = [f for f in sorted(remove) if f in m1 or f in m2]
927 removed = [f for f in sorted(remove) if f in m1 or f in m2]
928 removed1 = []
928 removed1 = []
929
929
930 for f in removed:
930 for f in removed:
931 if f in m1:
931 if f in m1:
932 del m1[f]
932 del m1[f]
933 removed1.append(f)
933 removed1.append(f)
934 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
934 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
935 (new, removed1))
935 (new, removed1))
936
936
937 # add changeset
937 # add changeset
938 if (not empty_ok and not text) or force_editor:
938 if (not empty_ok and not text) or force_editor:
939 edittext = []
939 edittext = []
940 if text:
940 if text:
941 edittext.append(text)
941 edittext.append(text)
942 edittext.append("")
942 edittext.append("")
943 edittext.append("") # Empty line between message and comments.
943 edittext.append("") # Empty line between message and comments.
944 edittext.append(_("HG: Enter commit message."
944 edittext.append(_("HG: Enter commit message."
945 " Lines beginning with 'HG:' are removed."))
945 " Lines beginning with 'HG:' are removed."))
946 edittext.append("HG: --")
946 edittext.append("HG: --")
947 edittext.append(_("HG: user: %s") % user)
947 edittext.append(_("HG: user: %s") % user)
948 if p2 != nullid:
948 if p2 != nullid:
949 edittext.append(_("HG: branch merge"))
949 edittext.append(_("HG: branch merge"))
950 if branchname:
950 if branchname:
951 edittext.append(_("HG: branch '%s'")
951 edittext.append(_("HG: branch '%s'")
952 % encoding.tolocal(branchname))
952 % encoding.tolocal(branchname))
953 edittext.extend([_("HG: added %s") % f for f in added])
953 edittext.extend([_("HG: added %s") % f for f in added])
954 edittext.extend([_("HG: changed %s") % f for f in updated])
954 edittext.extend([_("HG: changed %s") % f for f in updated])
955 edittext.extend([_("HG: removed %s") % f for f in removed])
955 edittext.extend([_("HG: removed %s") % f for f in removed])
956 if not added and not updated and not removed:
956 if not added and not updated and not removed:
957 edittext.append(_("HG: no files changed"))
957 edittext.append(_("HG: no files changed"))
958 edittext.append("")
958 edittext.append("")
959 # run editor in the repository root
959 # run editor in the repository root
960 olddir = os.getcwd()
960 olddir = os.getcwd()
961 os.chdir(self.root)
961 os.chdir(self.root)
962 text = self.ui.edit("\n".join(edittext), user)
962 text = self.ui.edit("\n".join(edittext), user)
963 os.chdir(olddir)
963 os.chdir(olddir)
964
964
965 lines = [line.rstrip() for line in text.rstrip().splitlines()]
965 lines = [line.rstrip() for line in text.rstrip().splitlines()]
966 while lines and not lines[0]:
966 while lines and not lines[0]:
967 del lines[0]
967 del lines[0]
968 if not lines and use_dirstate:
968 if not lines and use_dirstate:
969 raise util.Abort(_("empty commit message"))
969 raise util.Abort(_("empty commit message"))
970 text = '\n'.join(lines)
970 text = '\n'.join(lines)
971
971
972 self.changelog.delayupdate()
972 self.changelog.delayupdate()
973 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
973 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
974 user, wctx.date(), extra)
974 user, wctx.date(), extra)
975 p = lambda: self.changelog.writepending() and self.root or ""
975 p = lambda: self.changelog.writepending() and self.root or ""
976 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
976 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
977 parent2=xp2, pending=p)
977 parent2=xp2, pending=p)
978 self.changelog.finalize(trp)
978 self.changelog.finalize(trp)
979 tr.close()
979 tr.close()
980
980
981 if self.branchcache:
981 if self.branchcache:
982 self.branchtags()
982 self.branchtags()
983
983
984 if use_dirstate or update_dirstate:
984 if use_dirstate or update_dirstate:
985 self.dirstate.setparents(n)
985 self.dirstate.setparents(n)
986 if use_dirstate:
986 if use_dirstate:
987 for f in removed:
987 for f in removed:
988 self.dirstate.forget(f)
988 self.dirstate.forget(f)
989 valid = 1 # our dirstate updates are complete
989 valid = 1 # our dirstate updates are complete
990
990
991 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
991 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
992 return n
992 return n
993 finally:
993 finally:
994 if not valid: # don't save our updated dirstate
994 if not valid: # don't save our updated dirstate
995 self.dirstate.invalidate()
995 self.dirstate.invalidate()
996 del tr
996 del tr
997
997
998 def walk(self, match, node=None):
998 def walk(self, match, node=None):
999 '''
999 '''
1000 walk recursively through the directory tree or a given
1000 walk recursively through the directory tree or a given
1001 changeset, finding all files matched by the match
1001 changeset, finding all files matched by the match
1002 function
1002 function
1003 '''
1003 '''
1004 return self[node].walk(match)
1004 return self[node].walk(match)
1005
1005
1006 def status(self, node1='.', node2=None, match=None,
1006 def status(self, node1='.', node2=None, match=None,
1007 ignored=False, clean=False, unknown=False):
1007 ignored=False, clean=False, unknown=False):
1008 """return status of files between two nodes or node and working directory
1008 """return status of files between two nodes or node and working directory
1009
1009
1010 If node1 is None, use the first dirstate parent instead.
1010 If node1 is None, use the first dirstate parent instead.
1011 If node2 is None, compare node1 with working directory.
1011 If node2 is None, compare node1 with working directory.
1012 """
1012 """
1013
1013
1014 def mfmatches(ctx):
1014 def mfmatches(ctx):
1015 mf = ctx.manifest().copy()
1015 mf = ctx.manifest().copy()
1016 for fn in mf.keys():
1016 for fn in mf.keys():
1017 if not match(fn):
1017 if not match(fn):
1018 del mf[fn]
1018 del mf[fn]
1019 return mf
1019 return mf
1020
1020
1021 if isinstance(node1, context.changectx):
1021 if isinstance(node1, context.changectx):
1022 ctx1 = node1
1022 ctx1 = node1
1023 else:
1023 else:
1024 ctx1 = self[node1]
1024 ctx1 = self[node1]
1025 if isinstance(node2, context.changectx):
1025 if isinstance(node2, context.changectx):
1026 ctx2 = node2
1026 ctx2 = node2
1027 else:
1027 else:
1028 ctx2 = self[node2]
1028 ctx2 = self[node2]
1029
1029
1030 working = ctx2.rev() is None
1030 working = ctx2.rev() is None
1031 parentworking = working and ctx1 == self['.']
1031 parentworking = working and ctx1 == self['.']
1032 match = match or match_.always(self.root, self.getcwd())
1032 match = match or match_.always(self.root, self.getcwd())
1033 listignored, listclean, listunknown = ignored, clean, unknown
1033 listignored, listclean, listunknown = ignored, clean, unknown
1034
1034
1035 # load earliest manifest first for caching reasons
1035 # load earliest manifest first for caching reasons
1036 if not working and ctx2.rev() < ctx1.rev():
1036 if not working and ctx2.rev() < ctx1.rev():
1037 ctx2.manifest()
1037 ctx2.manifest()
1038
1038
1039 if not parentworking:
1039 if not parentworking:
1040 def bad(f, msg):
1040 def bad(f, msg):
1041 if f not in ctx1:
1041 if f not in ctx1:
1042 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1042 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1043 return False
1043 return False
1044 match.bad = bad
1044 match.bad = bad
1045
1045
1046 if working: # we need to scan the working dir
1046 if working: # we need to scan the working dir
1047 s = self.dirstate.status(match, listignored, listclean, listunknown)
1047 s = self.dirstate.status(match, listignored, listclean, listunknown)
1048 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1048 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1049
1049
1050 # check for any possibly clean files
1050 # check for any possibly clean files
1051 if parentworking and cmp:
1051 if parentworking and cmp:
1052 fixup = []
1052 fixup = []
1053 # do a full compare of any files that might have changed
1053 # do a full compare of any files that might have changed
1054 for f in cmp:
1054 for f in cmp:
1055 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1055 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1056 or ctx1[f].cmp(ctx2[f].data())):
1056 or ctx1[f].cmp(ctx2[f].data())):
1057 modified.append(f)
1057 modified.append(f)
1058 else:
1058 else:
1059 fixup.append(f)
1059 fixup.append(f)
1060
1060
1061 if listclean:
1061 if listclean:
1062 clean += fixup
1062 clean += fixup
1063
1063
1064 # update dirstate for files that are actually clean
1064 # update dirstate for files that are actually clean
1065 if fixup:
1065 if fixup:
1066 wlock = None
1066 wlock = None
1067 try:
1067 try:
1068 try:
1068 try:
1069 # updating the dirstate is optional
1069 # updating the dirstate is optional
1070 # so we don't wait on the lock
1070 # so we don't wait on the lock
1071 wlock = self.wlock(False)
1071 wlock = self.wlock(False)
1072 for f in fixup:
1072 for f in fixup:
1073 self.dirstate.normal(f)
1073 self.dirstate.normal(f)
1074 except error.LockError:
1074 except error.LockError:
1075 pass
1075 pass
1076 finally:
1076 finally:
1077 release(wlock)
1077 release(wlock)
1078
1078
1079 if not parentworking:
1079 if not parentworking:
1080 mf1 = mfmatches(ctx1)
1080 mf1 = mfmatches(ctx1)
1081 if working:
1081 if working:
1082 # we are comparing working dir against non-parent
1082 # we are comparing working dir against non-parent
1083 # generate a pseudo-manifest for the working dir
1083 # generate a pseudo-manifest for the working dir
1084 mf2 = mfmatches(self['.'])
1084 mf2 = mfmatches(self['.'])
1085 for f in cmp + modified + added:
1085 for f in cmp + modified + added:
1086 mf2[f] = None
1086 mf2[f] = None
1087 mf2.set(f, ctx2.flags(f))
1087 mf2.set(f, ctx2.flags(f))
1088 for f in removed:
1088 for f in removed:
1089 if f in mf2:
1089 if f in mf2:
1090 del mf2[f]
1090 del mf2[f]
1091 else:
1091 else:
1092 # we are comparing two revisions
1092 # we are comparing two revisions
1093 deleted, unknown, ignored = [], [], []
1093 deleted, unknown, ignored = [], [], []
1094 mf2 = mfmatches(ctx2)
1094 mf2 = mfmatches(ctx2)
1095
1095
1096 modified, added, clean = [], [], []
1096 modified, added, clean = [], [], []
1097 for fn in mf2:
1097 for fn in mf2:
1098 if fn in mf1:
1098 if fn in mf1:
1099 if (mf1.flags(fn) != mf2.flags(fn) or
1099 if (mf1.flags(fn) != mf2.flags(fn) or
1100 (mf1[fn] != mf2[fn] and
1100 (mf1[fn] != mf2[fn] and
1101 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1101 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1102 modified.append(fn)
1102 modified.append(fn)
1103 elif listclean:
1103 elif listclean:
1104 clean.append(fn)
1104 clean.append(fn)
1105 del mf1[fn]
1105 del mf1[fn]
1106 else:
1106 else:
1107 added.append(fn)
1107 added.append(fn)
1108 removed = mf1.keys()
1108 removed = mf1.keys()
1109
1109
1110 r = modified, added, removed, deleted, unknown, ignored, clean
1110 r = modified, added, removed, deleted, unknown, ignored, clean
1111 [l.sort() for l in r]
1111 [l.sort() for l in r]
1112 return r
1112 return r
1113
1113
1114 def add(self, list):
1114 def add(self, list):
1115 wlock = self.wlock()
1115 wlock = self.wlock()
1116 try:
1116 try:
1117 rejected = []
1117 rejected = []
1118 for f in list:
1118 for f in list:
1119 p = self.wjoin(f)
1119 p = self.wjoin(f)
1120 try:
1120 try:
1121 st = os.lstat(p)
1121 st = os.lstat(p)
1122 except:
1122 except:
1123 self.ui.warn(_("%s does not exist!\n") % f)
1123 self.ui.warn(_("%s does not exist!\n") % f)
1124 rejected.append(f)
1124 rejected.append(f)
1125 continue
1125 continue
1126 if st.st_size > 10000000:
1126 if st.st_size > 10000000:
1127 self.ui.warn(_("%s: files over 10MB may cause memory and"
1127 self.ui.warn(_("%s: files over 10MB may cause memory and"
1128 " performance problems\n"
1128 " performance problems\n"
1129 "(use 'hg revert %s' to unadd the file)\n")
1129 "(use 'hg revert %s' to unadd the file)\n")
1130 % (f, f))
1130 % (f, f))
1131 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1131 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1132 self.ui.warn(_("%s not added: only files and symlinks "
1132 self.ui.warn(_("%s not added: only files and symlinks "
1133 "supported currently\n") % f)
1133 "supported currently\n") % f)
1134 rejected.append(p)
1134 rejected.append(p)
1135 elif self.dirstate[f] in 'amn':
1135 elif self.dirstate[f] in 'amn':
1136 self.ui.warn(_("%s already tracked!\n") % f)
1136 self.ui.warn(_("%s already tracked!\n") % f)
1137 elif self.dirstate[f] == 'r':
1137 elif self.dirstate[f] == 'r':
1138 self.dirstate.normallookup(f)
1138 self.dirstate.normallookup(f)
1139 else:
1139 else:
1140 self.dirstate.add(f)
1140 self.dirstate.add(f)
1141 return rejected
1141 return rejected
1142 finally:
1142 finally:
1143 wlock.release()
1143 wlock.release()
1144
1144
1145 def forget(self, list):
1145 def forget(self, list):
1146 wlock = self.wlock()
1146 wlock = self.wlock()
1147 try:
1147 try:
1148 for f in list:
1148 for f in list:
1149 if self.dirstate[f] != 'a':
1149 if self.dirstate[f] != 'a':
1150 self.ui.warn(_("%s not added!\n") % f)
1150 self.ui.warn(_("%s not added!\n") % f)
1151 else:
1151 else:
1152 self.dirstate.forget(f)
1152 self.dirstate.forget(f)
1153 finally:
1153 finally:
1154 wlock.release()
1154 wlock.release()
1155
1155
1156 def remove(self, list, unlink=False):
1156 def remove(self, list, unlink=False):
1157 wlock = None
1157 wlock = None
1158 try:
1158 try:
1159 if unlink:
1159 if unlink:
1160 for f in list:
1160 for f in list:
1161 try:
1161 try:
1162 util.unlink(self.wjoin(f))
1162 util.unlink(self.wjoin(f))
1163 except OSError, inst:
1163 except OSError, inst:
1164 if inst.errno != errno.ENOENT:
1164 if inst.errno != errno.ENOENT:
1165 raise
1165 raise
1166 wlock = self.wlock()
1166 wlock = self.wlock()
1167 for f in list:
1167 for f in list:
1168 if unlink and os.path.exists(self.wjoin(f)):
1168 if unlink and os.path.exists(self.wjoin(f)):
1169 self.ui.warn(_("%s still exists!\n") % f)
1169 self.ui.warn(_("%s still exists!\n") % f)
1170 elif self.dirstate[f] == 'a':
1170 elif self.dirstate[f] == 'a':
1171 self.dirstate.forget(f)
1171 self.dirstate.forget(f)
1172 elif f not in self.dirstate:
1172 elif f not in self.dirstate:
1173 self.ui.warn(_("%s not tracked!\n") % f)
1173 self.ui.warn(_("%s not tracked!\n") % f)
1174 else:
1174 else:
1175 self.dirstate.remove(f)
1175 self.dirstate.remove(f)
1176 finally:
1176 finally:
1177 release(wlock)
1177 release(wlock)
1178
1178
1179 def undelete(self, list):
1179 def undelete(self, list):
1180 manifests = [self.manifest.read(self.changelog.read(p)[0])
1180 manifests = [self.manifest.read(self.changelog.read(p)[0])
1181 for p in self.dirstate.parents() if p != nullid]
1181 for p in self.dirstate.parents() if p != nullid]
1182 wlock = self.wlock()
1182 wlock = self.wlock()
1183 try:
1183 try:
1184 for f in list:
1184 for f in list:
1185 if self.dirstate[f] != 'r':
1185 if self.dirstate[f] != 'r':
1186 self.ui.warn(_("%s not removed!\n") % f)
1186 self.ui.warn(_("%s not removed!\n") % f)
1187 else:
1187 else:
1188 m = f in manifests[0] and manifests[0] or manifests[1]
1188 m = f in manifests[0] and manifests[0] or manifests[1]
1189 t = self.file(f).read(m[f])
1189 t = self.file(f).read(m[f])
1190 self.wwrite(f, t, m.flags(f))
1190 self.wwrite(f, t, m.flags(f))
1191 self.dirstate.normal(f)
1191 self.dirstate.normal(f)
1192 finally:
1192 finally:
1193 wlock.release()
1193 wlock.release()
1194
1194
1195 def copy(self, source, dest):
1195 def copy(self, source, dest):
1196 p = self.wjoin(dest)
1196 p = self.wjoin(dest)
1197 if not (os.path.exists(p) or os.path.islink(p)):
1197 if not (os.path.exists(p) or os.path.islink(p)):
1198 self.ui.warn(_("%s does not exist!\n") % dest)
1198 self.ui.warn(_("%s does not exist!\n") % dest)
1199 elif not (os.path.isfile(p) or os.path.islink(p)):
1199 elif not (os.path.isfile(p) or os.path.islink(p)):
1200 self.ui.warn(_("copy failed: %s is not a file or a "
1200 self.ui.warn(_("copy failed: %s is not a file or a "
1201 "symbolic link\n") % dest)
1201 "symbolic link\n") % dest)
1202 else:
1202 else:
1203 wlock = self.wlock()
1203 wlock = self.wlock()
1204 try:
1204 try:
1205 if self.dirstate[dest] in '?r':
1205 if self.dirstate[dest] in '?r':
1206 self.dirstate.add(dest)
1206 self.dirstate.add(dest)
1207 self.dirstate.copy(source, dest)
1207 self.dirstate.copy(source, dest)
1208 finally:
1208 finally:
1209 wlock.release()
1209 wlock.release()
1210
1210
1211 def heads(self, start=None, closed=True):
1211 def heads(self, start=None, closed=True):
1212 heads = self.changelog.heads(start)
1212 heads = self.changelog.heads(start)
1213 def display(head):
1213 def display(head):
1214 if closed:
1214 if closed:
1215 return True
1215 return True
1216 extras = self.changelog.read(head)[5]
1216 extras = self.changelog.read(head)[5]
1217 return ('close' not in extras)
1217 return ('close' not in extras)
1218 # sort the output in rev descending order
1218 # sort the output in rev descending order
1219 heads = [(-self.changelog.rev(h), h) for h in heads if display(h)]
1219 heads = [(-self.changelog.rev(h), h) for h in heads if display(h)]
1220 return [n for (r, n) in sorted(heads)]
1220 return [n for (r, n) in sorted(heads)]
1221
1221
1222 def branchheads(self, branch=None, start=None, closed=True):
1222 def branchheads(self, branch=None, start=None, closed=True):
1223 if branch is None:
1223 if branch is None:
1224 branch = self[None].branch()
1224 branch = self[None].branch()
1225 branches = self._branchheads()
1225 branches = self._branchheads()
1226 if branch not in branches:
1226 if branch not in branches:
1227 return []
1227 return []
1228 bheads = branches[branch]
1228 bheads = branches[branch]
1229 # the cache returns heads ordered lowest to highest
1229 # the cache returns heads ordered lowest to highest
1230 bheads.reverse()
1230 bheads.reverse()
1231 if start is not None:
1231 if start is not None:
1232 # filter out the heads that cannot be reached from startrev
1232 # filter out the heads that cannot be reached from startrev
1233 bheads = self.changelog.nodesbetween([start], bheads)[2]
1233 bheads = self.changelog.nodesbetween([start], bheads)[2]
1234 if not closed:
1234 if not closed:
1235 bheads = [h for h in bheads if
1235 bheads = [h for h in bheads if
1236 ('close' not in self.changelog.read(h)[5])]
1236 ('close' not in self.changelog.read(h)[5])]
1237 return bheads
1237 return bheads
1238
1238
1239 def branches(self, nodes):
1239 def branches(self, nodes):
1240 if not nodes:
1240 if not nodes:
1241 nodes = [self.changelog.tip()]
1241 nodes = [self.changelog.tip()]
1242 b = []
1242 b = []
1243 for n in nodes:
1243 for n in nodes:
1244 t = n
1244 t = n
1245 while 1:
1245 while 1:
1246 p = self.changelog.parents(n)
1246 p = self.changelog.parents(n)
1247 if p[1] != nullid or p[0] == nullid:
1247 if p[1] != nullid or p[0] == nullid:
1248 b.append((t, n, p[0], p[1]))
1248 b.append((t, n, p[0], p[1]))
1249 break
1249 break
1250 n = p[0]
1250 n = p[0]
1251 return b
1251 return b
1252
1252
1253 def between(self, pairs):
1253 def between(self, pairs):
1254 r = []
1254 r = []
1255
1255
1256 for top, bottom in pairs:
1256 for top, bottom in pairs:
1257 n, l, i = top, [], 0
1257 n, l, i = top, [], 0
1258 f = 1
1258 f = 1
1259
1259
1260 while n != bottom and n != nullid:
1260 while n != bottom and n != nullid:
1261 p = self.changelog.parents(n)[0]
1261 p = self.changelog.parents(n)[0]
1262 if i == f:
1262 if i == f:
1263 l.append(n)
1263 l.append(n)
1264 f = f * 2
1264 f = f * 2
1265 n = p
1265 n = p
1266 i += 1
1266 i += 1
1267
1267
1268 r.append(l)
1268 r.append(l)
1269
1269
1270 return r
1270 return r
1271
1271
1272 def findincoming(self, remote, base=None, heads=None, force=False):
1272 def findincoming(self, remote, base=None, heads=None, force=False):
1273 """Return list of roots of the subsets of missing nodes from remote
1273 """Return list of roots of the subsets of missing nodes from remote
1274
1274
1275 If base dict is specified, assume that these nodes and their parents
1275 If base dict is specified, assume that these nodes and their parents
1276 exist on the remote side and that no child of a node of base exists
1276 exist on the remote side and that no child of a node of base exists
1277 in both remote and self.
1277 in both remote and self.
1278 Furthermore base will be updated to include the nodes that exists
1278 Furthermore base will be updated to include the nodes that exists
1279 in self and remote but no children exists in self and remote.
1279 in self and remote but no children exists in self and remote.
1280 If a list of heads is specified, return only nodes which are heads
1280 If a list of heads is specified, return only nodes which are heads
1281 or ancestors of these heads.
1281 or ancestors of these heads.
1282
1282
1283 All the ancestors of base are in self and in remote.
1283 All the ancestors of base are in self and in remote.
1284 All the descendants of the list returned are missing in self.
1284 All the descendants of the list returned are missing in self.
1285 (and so we know that the rest of the nodes are missing in remote, see
1285 (and so we know that the rest of the nodes are missing in remote, see
1286 outgoing)
1286 outgoing)
1287 """
1287 """
1288 return self.findcommonincoming(remote, base, heads, force)[1]
1288 return self.findcommonincoming(remote, base, heads, force)[1]
1289
1289
1290 def findcommonincoming(self, remote, base=None, heads=None, force=False):
1290 def findcommonincoming(self, remote, base=None, heads=None, force=False):
1291 """Return a tuple (common, missing roots, heads) used to identify
1291 """Return a tuple (common, missing roots, heads) used to identify
1292 missing nodes from remote.
1292 missing nodes from remote.
1293
1293
1294 If base dict is specified, assume that these nodes and their parents
1294 If base dict is specified, assume that these nodes and their parents
1295 exist on the remote side and that no child of a node of base exists
1295 exist on the remote side and that no child of a node of base exists
1296 in both remote and self.
1296 in both remote and self.
1297 Furthermore base will be updated to include the nodes that exists
1297 Furthermore base will be updated to include the nodes that exists
1298 in self and remote but no children exists in self and remote.
1298 in self and remote but no children exists in self and remote.
1299 If a list of heads is specified, return only nodes which are heads
1299 If a list of heads is specified, return only nodes which are heads
1300 or ancestors of these heads.
1300 or ancestors of these heads.
1301
1301
1302 All the ancestors of base are in self and in remote.
1302 All the ancestors of base are in self and in remote.
1303 """
1303 """
1304 m = self.changelog.nodemap
1304 m = self.changelog.nodemap
1305 search = []
1305 search = []
1306 fetch = set()
1306 fetch = set()
1307 seen = set()
1307 seen = set()
1308 seenbranch = set()
1308 seenbranch = set()
1309 if base == None:
1309 if base == None:
1310 base = {}
1310 base = {}
1311
1311
1312 if not heads:
1312 if not heads:
1313 heads = remote.heads()
1313 heads = remote.heads()
1314
1314
1315 if self.changelog.tip() == nullid:
1315 if self.changelog.tip() == nullid:
1316 base[nullid] = 1
1316 base[nullid] = 1
1317 if heads != [nullid]:
1317 if heads != [nullid]:
1318 return [nullid], [nullid], list(heads)
1318 return [nullid], [nullid], list(heads)
1319 return [nullid], [], []
1319 return [nullid], [], []
1320
1320
1321 # assume we're closer to the tip than the root
1321 # assume we're closer to the tip than the root
1322 # and start by examining the heads
1322 # and start by examining the heads
1323 self.ui.status(_("searching for changes\n"))
1323 self.ui.status(_("searching for changes\n"))
1324
1324
1325 unknown = []
1325 unknown = []
1326 for h in heads:
1326 for h in heads:
1327 if h not in m:
1327 if h not in m:
1328 unknown.append(h)
1328 unknown.append(h)
1329 else:
1329 else:
1330 base[h] = 1
1330 base[h] = 1
1331
1331
1332 heads = unknown
1332 heads = unknown
1333 if not unknown:
1333 if not unknown:
1334 return base.keys(), [], []
1334 return base.keys(), [], []
1335
1335
1336 req = set(unknown)
1336 req = set(unknown)
1337 reqcnt = 0
1337 reqcnt = 0
1338
1338
1339 # search through remote branches
1339 # search through remote branches
1340 # a 'branch' here is a linear segment of history, with four parts:
1340 # a 'branch' here is a linear segment of history, with four parts:
1341 # head, root, first parent, second parent
1341 # head, root, first parent, second parent
1342 # (a branch always has two parents (or none) by definition)
1342 # (a branch always has two parents (or none) by definition)
1343 unknown = remote.branches(unknown)
1343 unknown = remote.branches(unknown)
1344 while unknown:
1344 while unknown:
1345 r = []
1345 r = []
1346 while unknown:
1346 while unknown:
1347 n = unknown.pop(0)
1347 n = unknown.pop(0)
1348 if n[0] in seen:
1348 if n[0] in seen:
1349 continue
1349 continue
1350
1350
1351 self.ui.debug(_("examining %s:%s\n")
1351 self.ui.debug(_("examining %s:%s\n")
1352 % (short(n[0]), short(n[1])))
1352 % (short(n[0]), short(n[1])))
1353 if n[0] == nullid: # found the end of the branch
1353 if n[0] == nullid: # found the end of the branch
1354 pass
1354 pass
1355 elif n in seenbranch:
1355 elif n in seenbranch:
1356 self.ui.debug(_("branch already found\n"))
1356 self.ui.debug(_("branch already found\n"))
1357 continue
1357 continue
1358 elif n[1] and n[1] in m: # do we know the base?
1358 elif n[1] and n[1] in m: # do we know the base?
1359 self.ui.debug(_("found incomplete branch %s:%s\n")
1359 self.ui.debug(_("found incomplete branch %s:%s\n")
1360 % (short(n[0]), short(n[1])))
1360 % (short(n[0]), short(n[1])))
1361 search.append(n[0:2]) # schedule branch range for scanning
1361 search.append(n[0:2]) # schedule branch range for scanning
1362 seenbranch.add(n)
1362 seenbranch.add(n)
1363 else:
1363 else:
1364 if n[1] not in seen and n[1] not in fetch:
1364 if n[1] not in seen and n[1] not in fetch:
1365 if n[2] in m and n[3] in m:
1365 if n[2] in m and n[3] in m:
1366 self.ui.debug(_("found new changeset %s\n") %
1366 self.ui.debug(_("found new changeset %s\n") %
1367 short(n[1]))
1367 short(n[1]))
1368 fetch.add(n[1]) # earliest unknown
1368 fetch.add(n[1]) # earliest unknown
1369 for p in n[2:4]:
1369 for p in n[2:4]:
1370 if p in m:
1370 if p in m:
1371 base[p] = 1 # latest known
1371 base[p] = 1 # latest known
1372
1372
1373 for p in n[2:4]:
1373 for p in n[2:4]:
1374 if p not in req and p not in m:
1374 if p not in req and p not in m:
1375 r.append(p)
1375 r.append(p)
1376 req.add(p)
1376 req.add(p)
1377 seen.add(n[0])
1377 seen.add(n[0])
1378
1378
1379 if r:
1379 if r:
1380 reqcnt += 1
1380 reqcnt += 1
1381 self.ui.debug(_("request %d: %s\n") %
1381 self.ui.debug(_("request %d: %s\n") %
1382 (reqcnt, " ".join(map(short, r))))
1382 (reqcnt, " ".join(map(short, r))))
1383 for p in xrange(0, len(r), 10):
1383 for p in xrange(0, len(r), 10):
1384 for b in remote.branches(r[p:p+10]):
1384 for b in remote.branches(r[p:p+10]):
1385 self.ui.debug(_("received %s:%s\n") %
1385 self.ui.debug(_("received %s:%s\n") %
1386 (short(b[0]), short(b[1])))
1386 (short(b[0]), short(b[1])))
1387 unknown.append(b)
1387 unknown.append(b)
1388
1388
1389 # do binary search on the branches we found
1389 # do binary search on the branches we found
1390 while search:
1390 while search:
1391 newsearch = []
1391 newsearch = []
1392 reqcnt += 1
1392 reqcnt += 1
1393 for n, l in zip(search, remote.between(search)):
1393 for n, l in zip(search, remote.between(search)):
1394 l.append(n[1])
1394 l.append(n[1])
1395 p = n[0]
1395 p = n[0]
1396 f = 1
1396 f = 1
1397 for i in l:
1397 for i in l:
1398 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1398 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1399 if i in m:
1399 if i in m:
1400 if f <= 2:
1400 if f <= 2:
1401 self.ui.debug(_("found new branch changeset %s\n") %
1401 self.ui.debug(_("found new branch changeset %s\n") %
1402 short(p))
1402 short(p))
1403 fetch.add(p)
1403 fetch.add(p)
1404 base[i] = 1
1404 base[i] = 1
1405 else:
1405 else:
1406 self.ui.debug(_("narrowed branch search to %s:%s\n")
1406 self.ui.debug(_("narrowed branch search to %s:%s\n")
1407 % (short(p), short(i)))
1407 % (short(p), short(i)))
1408 newsearch.append((p, i))
1408 newsearch.append((p, i))
1409 break
1409 break
1410 p, f = i, f * 2
1410 p, f = i, f * 2
1411 search = newsearch
1411 search = newsearch
1412
1412
1413 # sanity check our fetch list
1413 # sanity check our fetch list
1414 for f in fetch:
1414 for f in fetch:
1415 if f in m:
1415 if f in m:
1416 raise error.RepoError(_("already have changeset ")
1416 raise error.RepoError(_("already have changeset ")
1417 + short(f[:4]))
1417 + short(f[:4]))
1418
1418
1419 if base.keys() == [nullid]:
1419 if base.keys() == [nullid]:
1420 if force:
1420 if force:
1421 self.ui.warn(_("warning: repository is unrelated\n"))
1421 self.ui.warn(_("warning: repository is unrelated\n"))
1422 else:
1422 else:
1423 raise util.Abort(_("repository is unrelated"))
1423 raise util.Abort(_("repository is unrelated"))
1424
1424
1425 self.ui.debug(_("found new changesets starting at ") +
1425 self.ui.debug(_("found new changesets starting at ") +
1426 " ".join([short(f) for f in fetch]) + "\n")
1426 " ".join([short(f) for f in fetch]) + "\n")
1427
1427
1428 self.ui.debug(_("%d total queries\n") % reqcnt)
1428 self.ui.debug(_("%d total queries\n") % reqcnt)
1429
1429
1430 return base.keys(), list(fetch), heads
1430 return base.keys(), list(fetch), heads
1431
1431
1432 def findoutgoing(self, remote, base=None, heads=None, force=False):
1432 def findoutgoing(self, remote, base=None, heads=None, force=False):
1433 """Return list of nodes that are roots of subsets not in remote
1433 """Return list of nodes that are roots of subsets not in remote
1434
1434
1435 If base dict is specified, assume that these nodes and their parents
1435 If base dict is specified, assume that these nodes and their parents
1436 exist on the remote side.
1436 exist on the remote side.
1437 If a list of heads is specified, return only nodes which are heads
1437 If a list of heads is specified, return only nodes which are heads
1438 or ancestors of these heads, and return a second element which
1438 or ancestors of these heads, and return a second element which
1439 contains all remote heads which get new children.
1439 contains all remote heads which get new children.
1440 """
1440 """
1441 if base == None:
1441 if base == None:
1442 base = {}
1442 base = {}
1443 self.findincoming(remote, base, heads, force=force)
1443 self.findincoming(remote, base, heads, force=force)
1444
1444
1445 self.ui.debug(_("common changesets up to ")
1445 self.ui.debug(_("common changesets up to ")
1446 + " ".join(map(short, base.keys())) + "\n")
1446 + " ".join(map(short, base.keys())) + "\n")
1447
1447
1448 remain = set(self.changelog.nodemap)
1448 remain = set(self.changelog.nodemap)
1449
1449
1450 # prune everything remote has from the tree
1450 # prune everything remote has from the tree
1451 remain.remove(nullid)
1451 remain.remove(nullid)
1452 remove = base.keys()
1452 remove = base.keys()
1453 while remove:
1453 while remove:
1454 n = remove.pop(0)
1454 n = remove.pop(0)
1455 if n in remain:
1455 if n in remain:
1456 remain.remove(n)
1456 remain.remove(n)
1457 for p in self.changelog.parents(n):
1457 for p in self.changelog.parents(n):
1458 remove.append(p)
1458 remove.append(p)
1459
1459
1460 # find every node whose parents have been pruned
1460 # find every node whose parents have been pruned
1461 subset = []
1461 subset = []
1462 # find every remote head that will get new children
1462 # find every remote head that will get new children
1463 updated_heads = {}
1463 updated_heads = {}
1464 for n in remain:
1464 for n in remain:
1465 p1, p2 = self.changelog.parents(n)
1465 p1, p2 = self.changelog.parents(n)
1466 if p1 not in remain and p2 not in remain:
1466 if p1 not in remain and p2 not in remain:
1467 subset.append(n)
1467 subset.append(n)
1468 if heads:
1468 if heads:
1469 if p1 in heads:
1469 if p1 in heads:
1470 updated_heads[p1] = True
1470 updated_heads[p1] = True
1471 if p2 in heads:
1471 if p2 in heads:
1472 updated_heads[p2] = True
1472 updated_heads[p2] = True
1473
1473
1474 # this is the set of all roots we have to push
1474 # this is the set of all roots we have to push
1475 if heads:
1475 if heads:
1476 return subset, updated_heads.keys()
1476 return subset, updated_heads.keys()
1477 else:
1477 else:
1478 return subset
1478 return subset
1479
1479
1480 def pull(self, remote, heads=None, force=False):
1480 def pull(self, remote, heads=None, force=False):
1481 lock = self.lock()
1481 lock = self.lock()
1482 try:
1482 try:
1483 common, fetch, rheads = self.findcommonincoming(remote, heads=heads,
1483 common, fetch, rheads = self.findcommonincoming(remote, heads=heads,
1484 force=force)
1484 force=force)
1485 if fetch == [nullid]:
1485 if fetch == [nullid]:
1486 self.ui.status(_("requesting all changes\n"))
1486 self.ui.status(_("requesting all changes\n"))
1487
1487
1488 if not fetch:
1488 if not fetch:
1489 self.ui.status(_("no changes found\n"))
1489 self.ui.status(_("no changes found\n"))
1490 return 0
1490 return 0
1491
1491
1492 if heads is None and remote.capable('changegroupsubset'):
1492 if heads is None and remote.capable('changegroupsubset'):
1493 heads = rheads
1493 heads = rheads
1494
1494
1495 if heads is None:
1495 if heads is None:
1496 cg = remote.changegroup(fetch, 'pull')
1496 cg = remote.changegroup(fetch, 'pull')
1497 else:
1497 else:
1498 if not remote.capable('changegroupsubset'):
1498 if not remote.capable('changegroupsubset'):
1499 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1499 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1500 cg = remote.changegroupsubset(fetch, heads, 'pull')
1500 cg = remote.changegroupsubset(fetch, heads, 'pull')
1501 return self.addchangegroup(cg, 'pull', remote.url())
1501 return self.addchangegroup(cg, 'pull', remote.url())
1502 finally:
1502 finally:
1503 lock.release()
1503 lock.release()
1504
1504
1505 def push(self, remote, force=False, revs=None):
1505 def push(self, remote, force=False, revs=None):
1506 # there are two ways to push to remote repo:
1506 # there are two ways to push to remote repo:
1507 #
1507 #
1508 # addchangegroup assumes local user can lock remote
1508 # addchangegroup assumes local user can lock remote
1509 # repo (local filesystem, old ssh servers).
1509 # repo (local filesystem, old ssh servers).
1510 #
1510 #
1511 # unbundle assumes local user cannot lock remote repo (new ssh
1511 # unbundle assumes local user cannot lock remote repo (new ssh
1512 # servers, http servers).
1512 # servers, http servers).
1513
1513
1514 if remote.capable('unbundle'):
1514 if remote.capable('unbundle'):
1515 return self.push_unbundle(remote, force, revs)
1515 return self.push_unbundle(remote, force, revs)
1516 return self.push_addchangegroup(remote, force, revs)
1516 return self.push_addchangegroup(remote, force, revs)
1517
1517
1518 def prepush(self, remote, force, revs):
1518 def prepush(self, remote, force, revs):
1519 common = {}
1519 common = {}
1520 remote_heads = remote.heads()
1520 remote_heads = remote.heads()
1521 inc = self.findincoming(remote, common, remote_heads, force=force)
1521 inc = self.findincoming(remote, common, remote_heads, force=force)
1522
1522
1523 update, updated_heads = self.findoutgoing(remote, common, remote_heads)
1523 update, updated_heads = self.findoutgoing(remote, common, remote_heads)
1524 if revs is not None:
1524 if revs is not None:
1525 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1525 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1526 else:
1526 else:
1527 bases, heads = update, self.changelog.heads()
1527 bases, heads = update, self.changelog.heads()
1528
1528
1529 if not bases:
1529 if not bases:
1530 self.ui.status(_("no changes found\n"))
1530 self.ui.status(_("no changes found\n"))
1531 return None, 1
1531 return None, 1
1532 elif not force:
1532 elif not force:
1533 # check if we're creating new remote heads
1533 # check if we're creating new remote heads
1534 # to be a remote head after push, node must be either
1534 # to be a remote head after push, node must be either
1535 # - unknown locally
1535 # - unknown locally
1536 # - a local outgoing head descended from update
1536 # - a local outgoing head descended from update
1537 # - a remote head that's known locally and not
1537 # - a remote head that's known locally and not
1538 # ancestral to an outgoing head
1538 # ancestral to an outgoing head
1539
1539
1540 warn = 0
1540 warn = 0
1541
1541
1542 if remote_heads == [nullid]:
1542 if remote_heads == [nullid]:
1543 warn = 0
1543 warn = 0
1544 elif not revs and len(heads) > len(remote_heads):
1544 elif not revs and len(heads) > len(remote_heads):
1545 warn = 1
1545 warn = 1
1546 else:
1546 else:
1547 newheads = list(heads)
1547 newheads = list(heads)
1548 for r in remote_heads:
1548 for r in remote_heads:
1549 if r in self.changelog.nodemap:
1549 if r in self.changelog.nodemap:
1550 desc = self.changelog.heads(r, heads)
1550 desc = self.changelog.heads(r, heads)
1551 l = [h for h in heads if h in desc]
1551 l = [h for h in heads if h in desc]
1552 if not l:
1552 if not l:
1553 newheads.append(r)
1553 newheads.append(r)
1554 else:
1554 else:
1555 newheads.append(r)
1555 newheads.append(r)
1556 if len(newheads) > len(remote_heads):
1556 if len(newheads) > len(remote_heads):
1557 warn = 1
1557 warn = 1
1558
1558
1559 if warn:
1559 if warn:
1560 self.ui.warn(_("abort: push creates new remote heads!\n"))
1560 self.ui.warn(_("abort: push creates new remote heads!\n"))
1561 self.ui.status(_("(did you forget to merge?"
1561 self.ui.status(_("(did you forget to merge?"
1562 " use push -f to force)\n"))
1562 " use push -f to force)\n"))
1563 return None, 0
1563 return None, 0
1564 elif inc:
1564 elif inc:
1565 self.ui.warn(_("note: unsynced remote changes!\n"))
1565 self.ui.warn(_("note: unsynced remote changes!\n"))
1566
1566
1567
1567
1568 if revs is None:
1568 if revs is None:
1569 # use the fast path, no race possible on push
1569 # use the fast path, no race possible on push
1570 cg = self._changegroup(common.keys(), 'push')
1570 cg = self._changegroup(common.keys(), 'push')
1571 else:
1571 else:
1572 cg = self.changegroupsubset(update, revs, 'push')
1572 cg = self.changegroupsubset(update, revs, 'push')
1573 return cg, remote_heads
1573 return cg, remote_heads
1574
1574
1575 def push_addchangegroup(self, remote, force, revs):
1575 def push_addchangegroup(self, remote, force, revs):
1576 lock = remote.lock()
1576 lock = remote.lock()
1577 try:
1577 try:
1578 ret = self.prepush(remote, force, revs)
1578 ret = self.prepush(remote, force, revs)
1579 if ret[0] is not None:
1579 if ret[0] is not None:
1580 cg, remote_heads = ret
1580 cg, remote_heads = ret
1581 return remote.addchangegroup(cg, 'push', self.url())
1581 return remote.addchangegroup(cg, 'push', self.url())
1582 return ret[1]
1582 return ret[1]
1583 finally:
1583 finally:
1584 lock.release()
1584 lock.release()
1585
1585
1586 def push_unbundle(self, remote, force, revs):
1586 def push_unbundle(self, remote, force, revs):
1587 # local repo finds heads on server, finds out what revs it
1587 # local repo finds heads on server, finds out what revs it
1588 # must push. once revs transferred, if server finds it has
1588 # must push. once revs transferred, if server finds it has
1589 # different heads (someone else won commit/push race), server
1589 # different heads (someone else won commit/push race), server
1590 # aborts.
1590 # aborts.
1591
1591
1592 ret = self.prepush(remote, force, revs)
1592 ret = self.prepush(remote, force, revs)
1593 if ret[0] is not None:
1593 if ret[0] is not None:
1594 cg, remote_heads = ret
1594 cg, remote_heads = ret
1595 if force: remote_heads = ['force']
1595 if force: remote_heads = ['force']
1596 return remote.unbundle(cg, remote_heads, 'push')
1596 return remote.unbundle(cg, remote_heads, 'push')
1597 return ret[1]
1597 return ret[1]
1598
1598
1599 def changegroupinfo(self, nodes, source):
1599 def changegroupinfo(self, nodes, source):
1600 if self.ui.verbose or source == 'bundle':
1600 if self.ui.verbose or source == 'bundle':
1601 self.ui.status(_("%d changesets found\n") % len(nodes))
1601 self.ui.status(_("%d changesets found\n") % len(nodes))
1602 if self.ui.debugflag:
1602 if self.ui.debugflag:
1603 self.ui.debug(_("list of changesets:\n"))
1603 self.ui.debug(_("list of changesets:\n"))
1604 for node in nodes:
1604 for node in nodes:
1605 self.ui.debug("%s\n" % hex(node))
1605 self.ui.debug("%s\n" % hex(node))
1606
1606
1607 def changegroupsubset(self, bases, heads, source, extranodes=None):
1607 def changegroupsubset(self, bases, heads, source, extranodes=None):
1608 """This function generates a changegroup consisting of all the nodes
1608 """This function generates a changegroup consisting of all the nodes
1609 that are descendents of any of the bases, and ancestors of any of
1609 that are descendents of any of the bases, and ancestors of any of
1610 the heads.
1610 the heads.
1611
1611
1612 It is fairly complex as determining which filenodes and which
1612 It is fairly complex as determining which filenodes and which
1613 manifest nodes need to be included for the changeset to be complete
1613 manifest nodes need to be included for the changeset to be complete
1614 is non-trivial.
1614 is non-trivial.
1615
1615
1616 Another wrinkle is doing the reverse, figuring out which changeset in
1616 Another wrinkle is doing the reverse, figuring out which changeset in
1617 the changegroup a particular filenode or manifestnode belongs to.
1617 the changegroup a particular filenode or manifestnode belongs to.
1618
1618
1619 The caller can specify some nodes that must be included in the
1619 The caller can specify some nodes that must be included in the
1620 changegroup using the extranodes argument. It should be a dict
1620 changegroup using the extranodes argument. It should be a dict
1621 where the keys are the filenames (or 1 for the manifest), and the
1621 where the keys are the filenames (or 1 for the manifest), and the
1622 values are lists of (node, linknode) tuples, where node is a wanted
1622 values are lists of (node, linknode) tuples, where node is a wanted
1623 node and linknode is the changelog node that should be transmitted as
1623 node and linknode is the changelog node that should be transmitted as
1624 the linkrev.
1624 the linkrev.
1625 """
1625 """
1626
1626
1627 if extranodes is None:
1627 if extranodes is None:
1628 # can we go through the fast path ?
1628 # can we go through the fast path ?
1629 heads.sort()
1629 heads.sort()
1630 allheads = self.heads()
1630 allheads = self.heads()
1631 allheads.sort()
1631 allheads.sort()
1632 if heads == allheads:
1632 if heads == allheads:
1633 common = []
1633 common = []
1634 # parents of bases are known from both sides
1634 # parents of bases are known from both sides
1635 for n in bases:
1635 for n in bases:
1636 for p in self.changelog.parents(n):
1636 for p in self.changelog.parents(n):
1637 if p != nullid:
1637 if p != nullid:
1638 common.append(p)
1638 common.append(p)
1639 return self._changegroup(common, source)
1639 return self._changegroup(common, source)
1640
1640
1641 self.hook('preoutgoing', throw=True, source=source)
1641 self.hook('preoutgoing', throw=True, source=source)
1642
1642
1643 # Set up some initial variables
1643 # Set up some initial variables
1644 # Make it easy to refer to self.changelog
1644 # Make it easy to refer to self.changelog
1645 cl = self.changelog
1645 cl = self.changelog
1646 # msng is short for missing - compute the list of changesets in this
1646 # msng is short for missing - compute the list of changesets in this
1647 # changegroup.
1647 # changegroup.
1648 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1648 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1649 self.changegroupinfo(msng_cl_lst, source)
1649 self.changegroupinfo(msng_cl_lst, source)
1650 # Some bases may turn out to be superfluous, and some heads may be
1650 # Some bases may turn out to be superfluous, and some heads may be
1651 # too. nodesbetween will return the minimal set of bases and heads
1651 # too. nodesbetween will return the minimal set of bases and heads
1652 # necessary to re-create the changegroup.
1652 # necessary to re-create the changegroup.
1653
1653
1654 # Known heads are the list of heads that it is assumed the recipient
1654 # Known heads are the list of heads that it is assumed the recipient
1655 # of this changegroup will know about.
1655 # of this changegroup will know about.
1656 knownheads = {}
1656 knownheads = {}
1657 # We assume that all parents of bases are known heads.
1657 # We assume that all parents of bases are known heads.
1658 for n in bases:
1658 for n in bases:
1659 for p in cl.parents(n):
1659 for p in cl.parents(n):
1660 if p != nullid:
1660 if p != nullid:
1661 knownheads[p] = 1
1661 knownheads[p] = 1
1662 knownheads = knownheads.keys()
1662 knownheads = knownheads.keys()
1663 if knownheads:
1663 if knownheads:
1664 # Now that we know what heads are known, we can compute which
1664 # Now that we know what heads are known, we can compute which
1665 # changesets are known. The recipient must know about all
1665 # changesets are known. The recipient must know about all
1666 # changesets required to reach the known heads from the null
1666 # changesets required to reach the known heads from the null
1667 # changeset.
1667 # changeset.
1668 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1668 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1669 junk = None
1669 junk = None
1670 # Transform the list into a set.
1670 # Transform the list into a set.
1671 has_cl_set = set(has_cl_set)
1671 has_cl_set = set(has_cl_set)
1672 else:
1672 else:
1673 # If there were no known heads, the recipient cannot be assumed to
1673 # If there were no known heads, the recipient cannot be assumed to
1674 # know about any changesets.
1674 # know about any changesets.
1675 has_cl_set = set()
1675 has_cl_set = set()
1676
1676
1677 # Make it easy to refer to self.manifest
1677 # Make it easy to refer to self.manifest
1678 mnfst = self.manifest
1678 mnfst = self.manifest
1679 # We don't know which manifests are missing yet
1679 # We don't know which manifests are missing yet
1680 msng_mnfst_set = {}
1680 msng_mnfst_set = {}
1681 # Nor do we know which filenodes are missing.
1681 # Nor do we know which filenodes are missing.
1682 msng_filenode_set = {}
1682 msng_filenode_set = {}
1683
1683
1684 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1684 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1685 junk = None
1685 junk = None
1686
1686
1687 # A changeset always belongs to itself, so the changenode lookup
1687 # A changeset always belongs to itself, so the changenode lookup
1688 # function for a changenode is identity.
1688 # function for a changenode is identity.
1689 def identity(x):
1689 def identity(x):
1690 return x
1690 return x
1691
1691
1692 # A function generating function. Sets up an environment for the
1692 # A function generating function. Sets up an environment for the
1693 # inner function.
1693 # inner function.
1694 def cmp_by_rev_func(revlog):
1694 def cmp_by_rev_func(revlog):
1695 # Compare two nodes by their revision number in the environment's
1695 # Compare two nodes by their revision number in the environment's
1696 # revision history. Since the revision number both represents the
1696 # revision history. Since the revision number both represents the
1697 # most efficient order to read the nodes in, and represents a
1697 # most efficient order to read the nodes in, and represents a
1698 # topological sorting of the nodes, this function is often useful.
1698 # topological sorting of the nodes, this function is often useful.
1699 def cmp_by_rev(a, b):
1699 def cmp_by_rev(a, b):
1700 return cmp(revlog.rev(a), revlog.rev(b))
1700 return cmp(revlog.rev(a), revlog.rev(b))
1701 return cmp_by_rev
1701 return cmp_by_rev
1702
1702
1703 # If we determine that a particular file or manifest node must be a
1703 # If we determine that a particular file or manifest node must be a
1704 # node that the recipient of the changegroup will already have, we can
1704 # node that the recipient of the changegroup will already have, we can
1705 # also assume the recipient will have all the parents. This function
1705 # also assume the recipient will have all the parents. This function
1706 # prunes them from the set of missing nodes.
1706 # prunes them from the set of missing nodes.
1707 def prune_parents(revlog, hasset, msngset):
1707 def prune_parents(revlog, hasset, msngset):
1708 haslst = hasset.keys()
1708 haslst = hasset.keys()
1709 haslst.sort(cmp_by_rev_func(revlog))
1709 haslst.sort(cmp_by_rev_func(revlog))
1710 for node in haslst:
1710 for node in haslst:
1711 parentlst = [p for p in revlog.parents(node) if p != nullid]
1711 parentlst = [p for p in revlog.parents(node) if p != nullid]
1712 while parentlst:
1712 while parentlst:
1713 n = parentlst.pop()
1713 n = parentlst.pop()
1714 if n not in hasset:
1714 if n not in hasset:
1715 hasset[n] = 1
1715 hasset[n] = 1
1716 p = [p for p in revlog.parents(n) if p != nullid]
1716 p = [p for p in revlog.parents(n) if p != nullid]
1717 parentlst.extend(p)
1717 parentlst.extend(p)
1718 for n in hasset:
1718 for n in hasset:
1719 msngset.pop(n, None)
1719 msngset.pop(n, None)
1720
1720
1721 # This is a function generating function used to set up an environment
1721 # This is a function generating function used to set up an environment
1722 # for the inner function to execute in.
1722 # for the inner function to execute in.
1723 def manifest_and_file_collector(changedfileset):
1723 def manifest_and_file_collector(changedfileset):
1724 # This is an information gathering function that gathers
1724 # This is an information gathering function that gathers
1725 # information from each changeset node that goes out as part of
1725 # information from each changeset node that goes out as part of
1726 # the changegroup. The information gathered is a list of which
1726 # the changegroup. The information gathered is a list of which
1727 # manifest nodes are potentially required (the recipient may
1727 # manifest nodes are potentially required (the recipient may
1728 # already have them) and total list of all files which were
1728 # already have them) and total list of all files which were
1729 # changed in any changeset in the changegroup.
1729 # changed in any changeset in the changegroup.
1730 #
1730 #
1731 # We also remember the first changenode we saw any manifest
1731 # We also remember the first changenode we saw any manifest
1732 # referenced by so we can later determine which changenode 'owns'
1732 # referenced by so we can later determine which changenode 'owns'
1733 # the manifest.
1733 # the manifest.
1734 def collect_manifests_and_files(clnode):
1734 def collect_manifests_and_files(clnode):
1735 c = cl.read(clnode)
1735 c = cl.read(clnode)
1736 for f in c[3]:
1736 for f in c[3]:
1737 # This is to make sure we only have one instance of each
1737 # This is to make sure we only have one instance of each
1738 # filename string for each filename.
1738 # filename string for each filename.
1739 changedfileset.setdefault(f, f)
1739 changedfileset.setdefault(f, f)
1740 msng_mnfst_set.setdefault(c[0], clnode)
1740 msng_mnfst_set.setdefault(c[0], clnode)
1741 return collect_manifests_and_files
1741 return collect_manifests_and_files
1742
1742
1743 # Figure out which manifest nodes (of the ones we think might be part
1743 # Figure out which manifest nodes (of the ones we think might be part
1744 # of the changegroup) the recipient must know about and remove them
1744 # of the changegroup) the recipient must know about and remove them
1745 # from the changegroup.
1745 # from the changegroup.
1746 def prune_manifests():
1746 def prune_manifests():
1747 has_mnfst_set = {}
1747 has_mnfst_set = {}
1748 for n in msng_mnfst_set:
1748 for n in msng_mnfst_set:
1749 # If a 'missing' manifest thinks it belongs to a changenode
1749 # If a 'missing' manifest thinks it belongs to a changenode
1750 # the recipient is assumed to have, obviously the recipient
1750 # the recipient is assumed to have, obviously the recipient
1751 # must have that manifest.
1751 # must have that manifest.
1752 linknode = cl.node(mnfst.linkrev(mnfst.rev(n)))
1752 linknode = cl.node(mnfst.linkrev(mnfst.rev(n)))
1753 if linknode in has_cl_set:
1753 if linknode in has_cl_set:
1754 has_mnfst_set[n] = 1
1754 has_mnfst_set[n] = 1
1755 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1755 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1756
1756
1757 # Use the information collected in collect_manifests_and_files to say
1757 # Use the information collected in collect_manifests_and_files to say
1758 # which changenode any manifestnode belongs to.
1758 # which changenode any manifestnode belongs to.
1759 def lookup_manifest_link(mnfstnode):
1759 def lookup_manifest_link(mnfstnode):
1760 return msng_mnfst_set[mnfstnode]
1760 return msng_mnfst_set[mnfstnode]
1761
1761
1762 # A function generating function that sets up the initial environment
1762 # A function generating function that sets up the initial environment
1763 # the inner function.
1763 # the inner function.
1764 def filenode_collector(changedfiles):
1764 def filenode_collector(changedfiles):
1765 next_rev = [0]
1765 next_rev = [0]
1766 # This gathers information from each manifestnode included in the
1766 # This gathers information from each manifestnode included in the
1767 # changegroup about which filenodes the manifest node references
1767 # changegroup about which filenodes the manifest node references
1768 # so we can include those in the changegroup too.
1768 # so we can include those in the changegroup too.
1769 #
1769 #
1770 # It also remembers which changenode each filenode belongs to. It
1770 # It also remembers which changenode each filenode belongs to. It
1771 # does this by assuming the a filenode belongs to the changenode
1771 # does this by assuming the a filenode belongs to the changenode
1772 # the first manifest that references it belongs to.
1772 # the first manifest that references it belongs to.
1773 def collect_msng_filenodes(mnfstnode):
1773 def collect_msng_filenodes(mnfstnode):
1774 r = mnfst.rev(mnfstnode)
1774 r = mnfst.rev(mnfstnode)
1775 if r == next_rev[0]:
1775 if r == next_rev[0]:
1776 # If the last rev we looked at was the one just previous,
1776 # If the last rev we looked at was the one just previous,
1777 # we only need to see a diff.
1777 # we only need to see a diff.
1778 deltamf = mnfst.readdelta(mnfstnode)
1778 deltamf = mnfst.readdelta(mnfstnode)
1779 # For each line in the delta
1779 # For each line in the delta
1780 for f, fnode in deltamf.iteritems():
1780 for f, fnode in deltamf.iteritems():
1781 f = changedfiles.get(f, None)
1781 f = changedfiles.get(f, None)
1782 # And if the file is in the list of files we care
1782 # And if the file is in the list of files we care
1783 # about.
1783 # about.
1784 if f is not None:
1784 if f is not None:
1785 # Get the changenode this manifest belongs to
1785 # Get the changenode this manifest belongs to
1786 clnode = msng_mnfst_set[mnfstnode]
1786 clnode = msng_mnfst_set[mnfstnode]
1787 # Create the set of filenodes for the file if
1787 # Create the set of filenodes for the file if
1788 # there isn't one already.
1788 # there isn't one already.
1789 ndset = msng_filenode_set.setdefault(f, {})
1789 ndset = msng_filenode_set.setdefault(f, {})
1790 # And set the filenode's changelog node to the
1790 # And set the filenode's changelog node to the
1791 # manifest's if it hasn't been set already.
1791 # manifest's if it hasn't been set already.
1792 ndset.setdefault(fnode, clnode)
1792 ndset.setdefault(fnode, clnode)
1793 else:
1793 else:
1794 # Otherwise we need a full manifest.
1794 # Otherwise we need a full manifest.
1795 m = mnfst.read(mnfstnode)
1795 m = mnfst.read(mnfstnode)
1796 # For every file in we care about.
1796 # For every file in we care about.
1797 for f in changedfiles:
1797 for f in changedfiles:
1798 fnode = m.get(f, None)
1798 fnode = m.get(f, None)
1799 # If it's in the manifest
1799 # If it's in the manifest
1800 if fnode is not None:
1800 if fnode is not None:
1801 # See comments above.
1801 # See comments above.
1802 clnode = msng_mnfst_set[mnfstnode]
1802 clnode = msng_mnfst_set[mnfstnode]
1803 ndset = msng_filenode_set.setdefault(f, {})
1803 ndset = msng_filenode_set.setdefault(f, {})
1804 ndset.setdefault(fnode, clnode)
1804 ndset.setdefault(fnode, clnode)
1805 # Remember the revision we hope to see next.
1805 # Remember the revision we hope to see next.
1806 next_rev[0] = r + 1
1806 next_rev[0] = r + 1
1807 return collect_msng_filenodes
1807 return collect_msng_filenodes
1808
1808
1809 # We have a list of filenodes we think we need for a file, lets remove
1809 # We have a list of filenodes we think we need for a file, lets remove
1810 # all those we know the recipient must have.
1810 # all those we know the recipient must have.
1811 def prune_filenodes(f, filerevlog):
1811 def prune_filenodes(f, filerevlog):
1812 msngset = msng_filenode_set[f]
1812 msngset = msng_filenode_set[f]
1813 hasset = {}
1813 hasset = {}
1814 # If a 'missing' filenode thinks it belongs to a changenode we
1814 # If a 'missing' filenode thinks it belongs to a changenode we
1815 # assume the recipient must have, then the recipient must have
1815 # assume the recipient must have, then the recipient must have
1816 # that filenode.
1816 # that filenode.
1817 for n in msngset:
1817 for n in msngset:
1818 clnode = cl.node(filerevlog.linkrev(filerevlog.rev(n)))
1818 clnode = cl.node(filerevlog.linkrev(filerevlog.rev(n)))
1819 if clnode in has_cl_set:
1819 if clnode in has_cl_set:
1820 hasset[n] = 1
1820 hasset[n] = 1
1821 prune_parents(filerevlog, hasset, msngset)
1821 prune_parents(filerevlog, hasset, msngset)
1822
1822
1823 # A function generator function that sets up the a context for the
1823 # A function generator function that sets up the a context for the
1824 # inner function.
1824 # inner function.
1825 def lookup_filenode_link_func(fname):
1825 def lookup_filenode_link_func(fname):
1826 msngset = msng_filenode_set[fname]
1826 msngset = msng_filenode_set[fname]
1827 # Lookup the changenode the filenode belongs to.
1827 # Lookup the changenode the filenode belongs to.
1828 def lookup_filenode_link(fnode):
1828 def lookup_filenode_link(fnode):
1829 return msngset[fnode]
1829 return msngset[fnode]
1830 return lookup_filenode_link
1830 return lookup_filenode_link
1831
1831
1832 # Add the nodes that were explicitly requested.
1832 # Add the nodes that were explicitly requested.
1833 def add_extra_nodes(name, nodes):
1833 def add_extra_nodes(name, nodes):
1834 if not extranodes or name not in extranodes:
1834 if not extranodes or name not in extranodes:
1835 return
1835 return
1836
1836
1837 for node, linknode in extranodes[name]:
1837 for node, linknode in extranodes[name]:
1838 if node not in nodes:
1838 if node not in nodes:
1839 nodes[node] = linknode
1839 nodes[node] = linknode
1840
1840
1841 # Now that we have all theses utility functions to help out and
1841 # Now that we have all theses utility functions to help out and
1842 # logically divide up the task, generate the group.
1842 # logically divide up the task, generate the group.
1843 def gengroup():
1843 def gengroup():
1844 # The set of changed files starts empty.
1844 # The set of changed files starts empty.
1845 changedfiles = {}
1845 changedfiles = {}
1846 # Create a changenode group generator that will call our functions
1846 # Create a changenode group generator that will call our functions
1847 # back to lookup the owning changenode and collect information.
1847 # back to lookup the owning changenode and collect information.
1848 group = cl.group(msng_cl_lst, identity,
1848 group = cl.group(msng_cl_lst, identity,
1849 manifest_and_file_collector(changedfiles))
1849 manifest_and_file_collector(changedfiles))
1850 for chnk in group:
1850 for chnk in group:
1851 yield chnk
1851 yield chnk
1852
1852
1853 # The list of manifests has been collected by the generator
1853 # The list of manifests has been collected by the generator
1854 # calling our functions back.
1854 # calling our functions back.
1855 prune_manifests()
1855 prune_manifests()
1856 add_extra_nodes(1, msng_mnfst_set)
1856 add_extra_nodes(1, msng_mnfst_set)
1857 msng_mnfst_lst = msng_mnfst_set.keys()
1857 msng_mnfst_lst = msng_mnfst_set.keys()
1858 # Sort the manifestnodes by revision number.
1858 # Sort the manifestnodes by revision number.
1859 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1859 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1860 # Create a generator for the manifestnodes that calls our lookup
1860 # Create a generator for the manifestnodes that calls our lookup
1861 # and data collection functions back.
1861 # and data collection functions back.
1862 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1862 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1863 filenode_collector(changedfiles))
1863 filenode_collector(changedfiles))
1864 for chnk in group:
1864 for chnk in group:
1865 yield chnk
1865 yield chnk
1866
1866
1867 # These are no longer needed, dereference and toss the memory for
1867 # These are no longer needed, dereference and toss the memory for
1868 # them.
1868 # them.
1869 msng_mnfst_lst = None
1869 msng_mnfst_lst = None
1870 msng_mnfst_set.clear()
1870 msng_mnfst_set.clear()
1871
1871
1872 if extranodes:
1872 if extranodes:
1873 for fname in extranodes:
1873 for fname in extranodes:
1874 if isinstance(fname, int):
1874 if isinstance(fname, int):
1875 continue
1875 continue
1876 msng_filenode_set.setdefault(fname, {})
1876 msng_filenode_set.setdefault(fname, {})
1877 changedfiles[fname] = 1
1877 changedfiles[fname] = 1
1878 # Go through all our files in order sorted by name.
1878 # Go through all our files in order sorted by name.
1879 for fname in sorted(changedfiles):
1879 for fname in sorted(changedfiles):
1880 filerevlog = self.file(fname)
1880 filerevlog = self.file(fname)
1881 if not len(filerevlog):
1881 if not len(filerevlog):
1882 raise util.Abort(_("empty or missing revlog for %s") % fname)
1882 raise util.Abort(_("empty or missing revlog for %s") % fname)
1883 # Toss out the filenodes that the recipient isn't really
1883 # Toss out the filenodes that the recipient isn't really
1884 # missing.
1884 # missing.
1885 if fname in msng_filenode_set:
1885 if fname in msng_filenode_set:
1886 prune_filenodes(fname, filerevlog)
1886 prune_filenodes(fname, filerevlog)
1887 add_extra_nodes(fname, msng_filenode_set[fname])
1887 add_extra_nodes(fname, msng_filenode_set[fname])
1888 msng_filenode_lst = msng_filenode_set[fname].keys()
1888 msng_filenode_lst = msng_filenode_set[fname].keys()
1889 else:
1889 else:
1890 msng_filenode_lst = []
1890 msng_filenode_lst = []
1891 # If any filenodes are left, generate the group for them,
1891 # If any filenodes are left, generate the group for them,
1892 # otherwise don't bother.
1892 # otherwise don't bother.
1893 if len(msng_filenode_lst) > 0:
1893 if len(msng_filenode_lst) > 0:
1894 yield changegroup.chunkheader(len(fname))
1894 yield changegroup.chunkheader(len(fname))
1895 yield fname
1895 yield fname
1896 # Sort the filenodes by their revision #
1896 # Sort the filenodes by their revision #
1897 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1897 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1898 # Create a group generator and only pass in a changenode
1898 # Create a group generator and only pass in a changenode
1899 # lookup function as we need to collect no information
1899 # lookup function as we need to collect no information
1900 # from filenodes.
1900 # from filenodes.
1901 group = filerevlog.group(msng_filenode_lst,
1901 group = filerevlog.group(msng_filenode_lst,
1902 lookup_filenode_link_func(fname))
1902 lookup_filenode_link_func(fname))
1903 for chnk in group:
1903 for chnk in group:
1904 yield chnk
1904 yield chnk
1905 if fname in msng_filenode_set:
1905 if fname in msng_filenode_set:
1906 # Don't need this anymore, toss it to free memory.
1906 # Don't need this anymore, toss it to free memory.
1907 del msng_filenode_set[fname]
1907 del msng_filenode_set[fname]
1908 # Signal that no more groups are left.
1908 # Signal that no more groups are left.
1909 yield changegroup.closechunk()
1909 yield changegroup.closechunk()
1910
1910
1911 if msng_cl_lst:
1911 if msng_cl_lst:
1912 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1912 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1913
1913
1914 return util.chunkbuffer(gengroup())
1914 return util.chunkbuffer(gengroup())
1915
1915
1916 def changegroup(self, basenodes, source):
1916 def changegroup(self, basenodes, source):
1917 # to avoid a race we use changegroupsubset() (issue1320)
1917 # to avoid a race we use changegroupsubset() (issue1320)
1918 return self.changegroupsubset(basenodes, self.heads(), source)
1918 return self.changegroupsubset(basenodes, self.heads(), source)
1919
1919
1920 def _changegroup(self, common, source):
1920 def _changegroup(self, common, source):
1921 """Generate a changegroup of all nodes that we have that a recipient
1921 """Generate a changegroup of all nodes that we have that a recipient
1922 doesn't.
1922 doesn't.
1923
1923
1924 This is much easier than the previous function as we can assume that
1924 This is much easier than the previous function as we can assume that
1925 the recipient has any changenode we aren't sending them.
1925 the recipient has any changenode we aren't sending them.
1926
1926
1927 common is the set of common nodes between remote and self"""
1927 common is the set of common nodes between remote and self"""
1928
1928
1929 self.hook('preoutgoing', throw=True, source=source)
1929 self.hook('preoutgoing', throw=True, source=source)
1930
1930
1931 cl = self.changelog
1931 cl = self.changelog
1932 nodes = cl.findmissing(common)
1932 nodes = cl.findmissing(common)
1933 revset = set([cl.rev(n) for n in nodes])
1933 revset = set([cl.rev(n) for n in nodes])
1934 self.changegroupinfo(nodes, source)
1934 self.changegroupinfo(nodes, source)
1935
1935
1936 def identity(x):
1936 def identity(x):
1937 return x
1937 return x
1938
1938
1939 def gennodelst(log):
1939 def gennodelst(log):
1940 for r in log:
1940 for r in log:
1941 if log.linkrev(r) in revset:
1941 if log.linkrev(r) in revset:
1942 yield log.node(r)
1942 yield log.node(r)
1943
1943
1944 def changed_file_collector(changedfileset):
1944 def changed_file_collector(changedfileset):
1945 def collect_changed_files(clnode):
1945 def collect_changed_files(clnode):
1946 c = cl.read(clnode)
1946 c = cl.read(clnode)
1947 for fname in c[3]:
1947 for fname in c[3]:
1948 changedfileset[fname] = 1
1948 changedfileset[fname] = 1
1949 return collect_changed_files
1949 return collect_changed_files
1950
1950
1951 def lookuprevlink_func(revlog):
1951 def lookuprevlink_func(revlog):
1952 def lookuprevlink(n):
1952 def lookuprevlink(n):
1953 return cl.node(revlog.linkrev(revlog.rev(n)))
1953 return cl.node(revlog.linkrev(revlog.rev(n)))
1954 return lookuprevlink
1954 return lookuprevlink
1955
1955
1956 def gengroup():
1956 def gengroup():
1957 # construct a list of all changed files
1957 # construct a list of all changed files
1958 changedfiles = {}
1958 changedfiles = {}
1959
1959
1960 for chnk in cl.group(nodes, identity,
1960 for chnk in cl.group(nodes, identity,
1961 changed_file_collector(changedfiles)):
1961 changed_file_collector(changedfiles)):
1962 yield chnk
1962 yield chnk
1963
1963
1964 mnfst = self.manifest
1964 mnfst = self.manifest
1965 nodeiter = gennodelst(mnfst)
1965 nodeiter = gennodelst(mnfst)
1966 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1966 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1967 yield chnk
1967 yield chnk
1968
1968
1969 for fname in sorted(changedfiles):
1969 for fname in sorted(changedfiles):
1970 filerevlog = self.file(fname)
1970 filerevlog = self.file(fname)
1971 if not len(filerevlog):
1971 if not len(filerevlog):
1972 raise util.Abort(_("empty or missing revlog for %s") % fname)
1972 raise util.Abort(_("empty or missing revlog for %s") % fname)
1973 nodeiter = gennodelst(filerevlog)
1973 nodeiter = gennodelst(filerevlog)
1974 nodeiter = list(nodeiter)
1974 nodeiter = list(nodeiter)
1975 if nodeiter:
1975 if nodeiter:
1976 yield changegroup.chunkheader(len(fname))
1976 yield changegroup.chunkheader(len(fname))
1977 yield fname
1977 yield fname
1978 lookup = lookuprevlink_func(filerevlog)
1978 lookup = lookuprevlink_func(filerevlog)
1979 for chnk in filerevlog.group(nodeiter, lookup):
1979 for chnk in filerevlog.group(nodeiter, lookup):
1980 yield chnk
1980 yield chnk
1981
1981
1982 yield changegroup.closechunk()
1982 yield changegroup.closechunk()
1983
1983
1984 if nodes:
1984 if nodes:
1985 self.hook('outgoing', node=hex(nodes[0]), source=source)
1985 self.hook('outgoing', node=hex(nodes[0]), source=source)
1986
1986
1987 return util.chunkbuffer(gengroup())
1987 return util.chunkbuffer(gengroup())
1988
1988
1989 def addchangegroup(self, source, srctype, url, emptyok=False):
1989 def addchangegroup(self, source, srctype, url, emptyok=False):
1990 """add changegroup to repo.
1990 """add changegroup to repo.
1991
1991
1992 return values:
1992 return values:
1993 - nothing changed or no source: 0
1993 - nothing changed or no source: 0
1994 - more heads than before: 1+added heads (2..n)
1994 - more heads than before: 1+added heads (2..n)
1995 - less heads than before: -1-removed heads (-2..-n)
1995 - less heads than before: -1-removed heads (-2..-n)
1996 - number of heads stays the same: 1
1996 - number of heads stays the same: 1
1997 """
1997 """
1998 def csmap(x):
1998 def csmap(x):
1999 self.ui.debug(_("add changeset %s\n") % short(x))
1999 self.ui.debug(_("add changeset %s\n") % short(x))
2000 return len(cl)
2000 return len(cl)
2001
2001
2002 def revmap(x):
2002 def revmap(x):
2003 return cl.rev(x)
2003 return cl.rev(x)
2004
2004
2005 if not source:
2005 if not source:
2006 return 0
2006 return 0
2007
2007
2008 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2008 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2009
2009
2010 changesets = files = revisions = 0
2010 changesets = files = revisions = 0
2011
2011
2012 # write changelog data to temp files so concurrent readers will not see
2012 # write changelog data to temp files so concurrent readers will not see
2013 # inconsistent view
2013 # inconsistent view
2014 cl = self.changelog
2014 cl = self.changelog
2015 cl.delayupdate()
2015 cl.delayupdate()
2016 oldheads = len(cl.heads())
2016 oldheads = len(cl.heads())
2017
2017
2018 tr = self.transaction()
2018 tr = self.transaction()
2019 try:
2019 try:
2020 trp = weakref.proxy(tr)
2020 trp = weakref.proxy(tr)
2021 # pull off the changeset group
2021 # pull off the changeset group
2022 self.ui.status(_("adding changesets\n"))
2022 self.ui.status(_("adding changesets\n"))
2023 cor = len(cl) - 1
2023 cor = len(cl) - 1
2024 chunkiter = changegroup.chunkiter(source)
2024 chunkiter = changegroup.chunkiter(source)
2025 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
2025 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
2026 raise util.Abort(_("received changelog group is empty"))
2026 raise util.Abort(_("received changelog group is empty"))
2027 cnr = len(cl) - 1
2027 cnr = len(cl) - 1
2028 changesets = cnr - cor
2028 changesets = cnr - cor
2029
2029
2030 # pull off the manifest group
2030 # pull off the manifest group
2031 self.ui.status(_("adding manifests\n"))
2031 self.ui.status(_("adding manifests\n"))
2032 chunkiter = changegroup.chunkiter(source)
2032 chunkiter = changegroup.chunkiter(source)
2033 # no need to check for empty manifest group here:
2033 # no need to check for empty manifest group here:
2034 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2034 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2035 # no new manifest will be created and the manifest group will
2035 # no new manifest will be created and the manifest group will
2036 # be empty during the pull
2036 # be empty during the pull
2037 self.manifest.addgroup(chunkiter, revmap, trp)
2037 self.manifest.addgroup(chunkiter, revmap, trp)
2038
2038
2039 # process the files
2039 # process the files
2040 self.ui.status(_("adding file changes\n"))
2040 self.ui.status(_("adding file changes\n"))
2041 while 1:
2041 while 1:
2042 f = changegroup.getchunk(source)
2042 f = changegroup.getchunk(source)
2043 if not f:
2043 if not f:
2044 break
2044 break
2045 self.ui.debug(_("adding %s revisions\n") % f)
2045 self.ui.debug(_("adding %s revisions\n") % f)
2046 fl = self.file(f)
2046 fl = self.file(f)
2047 o = len(fl)
2047 o = len(fl)
2048 chunkiter = changegroup.chunkiter(source)
2048 chunkiter = changegroup.chunkiter(source)
2049 if fl.addgroup(chunkiter, revmap, trp) is None:
2049 if fl.addgroup(chunkiter, revmap, trp) is None:
2050 raise util.Abort(_("received file revlog group is empty"))
2050 raise util.Abort(_("received file revlog group is empty"))
2051 revisions += len(fl) - o
2051 revisions += len(fl) - o
2052 files += 1
2052 files += 1
2053
2053
2054 newheads = len(self.changelog.heads())
2054 newheads = len(self.changelog.heads())
2055 heads = ""
2055 heads = ""
2056 if oldheads and newheads != oldheads:
2056 if oldheads and newheads != oldheads:
2057 heads = _(" (%+d heads)") % (newheads - oldheads)
2057 heads = _(" (%+d heads)") % (newheads - oldheads)
2058
2058
2059 self.ui.status(_("added %d changesets"
2059 self.ui.status(_("added %d changesets"
2060 " with %d changes to %d files%s\n")
2060 " with %d changes to %d files%s\n")
2061 % (changesets, revisions, files, heads))
2061 % (changesets, revisions, files, heads))
2062
2062
2063 if changesets > 0:
2063 if changesets > 0:
2064 p = lambda: self.changelog.writepending() and self.root or ""
2064 p = lambda: self.changelog.writepending() and self.root or ""
2065 self.hook('pretxnchangegroup', throw=True,
2065 self.hook('pretxnchangegroup', throw=True,
2066 node=hex(self.changelog.node(cor+1)), source=srctype,
2066 node=hex(self.changelog.node(cor+1)), source=srctype,
2067 url=url, pending=p)
2067 url=url, pending=p)
2068
2068
2069 # make changelog see real files again
2069 # make changelog see real files again
2070 cl.finalize(trp)
2070 cl.finalize(trp)
2071
2071
2072 tr.close()
2072 tr.close()
2073 finally:
2073 finally:
2074 del tr
2074 del tr
2075
2075
2076 if changesets > 0:
2076 if changesets > 0:
2077 # forcefully update the on-disk branch cache
2077 # forcefully update the on-disk branch cache
2078 self.ui.debug(_("updating the branch cache\n"))
2078 self.ui.debug(_("updating the branch cache\n"))
2079 self.branchtags()
2079 self.branchtags()
2080 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
2080 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
2081 source=srctype, url=url)
2081 source=srctype, url=url)
2082
2082
2083 for i in xrange(cor + 1, cnr + 1):
2083 for i in xrange(cor + 1, cnr + 1):
2084 self.hook("incoming", node=hex(self.changelog.node(i)),
2084 self.hook("incoming", node=hex(self.changelog.node(i)),
2085 source=srctype, url=url)
2085 source=srctype, url=url)
2086
2086
2087 # never return 0 here:
2087 # never return 0 here:
2088 if newheads < oldheads:
2088 if newheads < oldheads:
2089 return newheads - oldheads - 1
2089 return newheads - oldheads - 1
2090 else:
2090 else:
2091 return newheads - oldheads + 1
2091 return newheads - oldheads + 1
2092
2092
2093
2093
2094 def stream_in(self, remote):
2094 def stream_in(self, remote):
2095 fp = remote.stream_out()
2095 fp = remote.stream_out()
2096 l = fp.readline()
2096 l = fp.readline()
2097 try:
2097 try:
2098 resp = int(l)
2098 resp = int(l)
2099 except ValueError:
2099 except ValueError:
2100 raise error.ResponseError(
2100 raise error.ResponseError(
2101 _('Unexpected response from remote server:'), l)
2101 _('Unexpected response from remote server:'), l)
2102 if resp == 1:
2102 if resp == 1:
2103 raise util.Abort(_('operation forbidden by server'))
2103 raise util.Abort(_('operation forbidden by server'))
2104 elif resp == 2:
2104 elif resp == 2:
2105 raise util.Abort(_('locking the remote repository failed'))
2105 raise util.Abort(_('locking the remote repository failed'))
2106 elif resp != 0:
2106 elif resp != 0:
2107 raise util.Abort(_('the server sent an unknown error code'))
2107 raise util.Abort(_('the server sent an unknown error code'))
2108 self.ui.status(_('streaming all changes\n'))
2108 self.ui.status(_('streaming all changes\n'))
2109 l = fp.readline()
2109 l = fp.readline()
2110 try:
2110 try:
2111 total_files, total_bytes = map(int, l.split(' ', 1))
2111 total_files, total_bytes = map(int, l.split(' ', 1))
2112 except (ValueError, TypeError):
2112 except (ValueError, TypeError):
2113 raise error.ResponseError(
2113 raise error.ResponseError(
2114 _('Unexpected response from remote server:'), l)
2114 _('Unexpected response from remote server:'), l)
2115 self.ui.status(_('%d files to transfer, %s of data\n') %
2115 self.ui.status(_('%d files to transfer, %s of data\n') %
2116 (total_files, util.bytecount(total_bytes)))
2116 (total_files, util.bytecount(total_bytes)))
2117 start = time.time()
2117 start = time.time()
2118 for i in xrange(total_files):
2118 for i in xrange(total_files):
2119 # XXX doesn't support '\n' or '\r' in filenames
2119 # XXX doesn't support '\n' or '\r' in filenames
2120 l = fp.readline()
2120 l = fp.readline()
2121 try:
2121 try:
2122 name, size = l.split('\0', 1)
2122 name, size = l.split('\0', 1)
2123 size = int(size)
2123 size = int(size)
2124 except (ValueError, TypeError):
2124 except (ValueError, TypeError):
2125 raise error.ResponseError(
2125 raise error.ResponseError(
2126 _('Unexpected response from remote server:'), l)
2126 _('Unexpected response from remote server:'), l)
2127 self.ui.debug(_('adding %s (%s)\n') % (name, util.bytecount(size)))
2127 self.ui.debug(_('adding %s (%s)\n') % (name, util.bytecount(size)))
2128 ofp = self.sopener(name, 'w')
2128 ofp = self.sopener(name, 'w')
2129 for chunk in util.filechunkiter(fp, limit=size):
2129 for chunk in util.filechunkiter(fp, limit=size):
2130 ofp.write(chunk)
2130 ofp.write(chunk)
2131 ofp.close()
2131 ofp.close()
2132 elapsed = time.time() - start
2132 elapsed = time.time() - start
2133 if elapsed <= 0:
2133 if elapsed <= 0:
2134 elapsed = 0.001
2134 elapsed = 0.001
2135 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2135 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2136 (util.bytecount(total_bytes), elapsed,
2136 (util.bytecount(total_bytes), elapsed,
2137 util.bytecount(total_bytes / elapsed)))
2137 util.bytecount(total_bytes / elapsed)))
2138 self.invalidate()
2138 self.invalidate()
2139 return len(self.heads()) + 1
2139 return len(self.heads()) + 1
2140
2140
2141 def clone(self, remote, heads=[], stream=False):
2141 def clone(self, remote, heads=[], stream=False):
2142 '''clone remote repository.
2142 '''clone remote repository.
2143
2143
2144 keyword arguments:
2144 keyword arguments:
2145 heads: list of revs to clone (forces use of pull)
2145 heads: list of revs to clone (forces use of pull)
2146 stream: use streaming clone if possible'''
2146 stream: use streaming clone if possible'''
2147
2147
2148 # now, all clients that can request uncompressed clones can
2148 # now, all clients that can request uncompressed clones can
2149 # read repo formats supported by all servers that can serve
2149 # read repo formats supported by all servers that can serve
2150 # them.
2150 # them.
2151
2151
2152 # if revlog format changes, client will have to check version
2152 # if revlog format changes, client will have to check version
2153 # and format flags on "stream" capability, and use
2153 # and format flags on "stream" capability, and use
2154 # uncompressed only if compatible.
2154 # uncompressed only if compatible.
2155
2155
2156 if stream and not heads and remote.capable('stream'):
2156 if stream and not heads and remote.capable('stream'):
2157 return self.stream_in(remote)
2157 return self.stream_in(remote)
2158 return self.pull(remote, heads)
2158 return self.pull(remote, heads)
2159
2159
2160 # used to avoid circular references so destructors work
2160 # used to avoid circular references so destructors work
2161 def aftertrans(files):
2161 def aftertrans(files):
2162 renamefiles = [tuple(t) for t in files]
2162 renamefiles = [tuple(t) for t in files]
2163 def a():
2163 def a():
2164 for src, dest in renamefiles:
2164 for src, dest in renamefiles:
2165 util.rename(src, dest)
2165 util.rename(src, dest)
2166 return a
2166 return a
2167
2167
2168 def instance(ui, path, create):
2168 def instance(ui, path, create):
2169 return localrepository(ui, util.drop_scheme('file', path), create)
2169 return localrepository(ui, util.drop_scheme('file', path), create)
2170
2170
2171 def islocal(path):
2171 def islocal(path):
2172 return True
2172 return True
@@ -1,127 +1,128 b''
1 # lock.py - simple locking scheme for mercurial
1 # lock.py - simple locking scheme for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 import errno, os, socket, time, util, error
8 import util, error
9 import errno, os, socket, time
9 import warnings
10 import warnings
10
11
11 class lock(object):
12 class lock(object):
12 # lock is symlink on platforms that support it, file on others.
13 # lock is symlink on platforms that support it, file on others.
13
14
14 # symlink is used because create of directory entry and contents
15 # symlink is used because create of directory entry and contents
15 # are atomic even over nfs.
16 # are atomic even over nfs.
16
17
17 # old-style lock: symlink to pid
18 # old-style lock: symlink to pid
18 # new-style lock: symlink to hostname:pid
19 # new-style lock: symlink to hostname:pid
19
20
20 _host = None
21 _host = None
21
22
22 def __init__(self, file, timeout=-1, releasefn=None, desc=None):
23 def __init__(self, file, timeout=-1, releasefn=None, desc=None):
23 self.f = file
24 self.f = file
24 self.held = 0
25 self.held = 0
25 self.timeout = timeout
26 self.timeout = timeout
26 self.releasefn = releasefn
27 self.releasefn = releasefn
27 self.desc = desc
28 self.desc = desc
28 self.lock()
29 self.lock()
29
30
30 def __del__(self):
31 def __del__(self):
31 if self.held:
32 if self.held:
32 warnings.warn("use lock.release instead of del lock",
33 warnings.warn("use lock.release instead of del lock",
33 category=DeprecationWarning,
34 category=DeprecationWarning,
34 stacklevel=2)
35 stacklevel=2)
35
36
36 # ensure the lock will be removed
37 # ensure the lock will be removed
37 # even if recursive locking did occur
38 # even if recursive locking did occur
38 self.held = 1
39 self.held = 1
39
40
40 self.release()
41 self.release()
41
42
42 def lock(self):
43 def lock(self):
43 timeout = self.timeout
44 timeout = self.timeout
44 while 1:
45 while 1:
45 try:
46 try:
46 self.trylock()
47 self.trylock()
47 return 1
48 return 1
48 except error.LockHeld, inst:
49 except error.LockHeld, inst:
49 if timeout != 0:
50 if timeout != 0:
50 time.sleep(1)
51 time.sleep(1)
51 if timeout > 0:
52 if timeout > 0:
52 timeout -= 1
53 timeout -= 1
53 continue
54 continue
54 raise error.LockHeld(errno.ETIMEDOUT, inst.filename, self.desc,
55 raise error.LockHeld(errno.ETIMEDOUT, inst.filename, self.desc,
55 inst.locker)
56 inst.locker)
56
57
57 def trylock(self):
58 def trylock(self):
58 if self.held:
59 if self.held:
59 self.held += 1
60 self.held += 1
60 return
61 return
61 if lock._host is None:
62 if lock._host is None:
62 lock._host = socket.gethostname()
63 lock._host = socket.gethostname()
63 lockname = '%s:%s' % (lock._host, os.getpid())
64 lockname = '%s:%s' % (lock._host, os.getpid())
64 while not self.held:
65 while not self.held:
65 try:
66 try:
66 util.makelock(lockname, self.f)
67 util.makelock(lockname, self.f)
67 self.held = 1
68 self.held = 1
68 except (OSError, IOError), why:
69 except (OSError, IOError), why:
69 if why.errno == errno.EEXIST:
70 if why.errno == errno.EEXIST:
70 locker = self.testlock()
71 locker = self.testlock()
71 if locker is not None:
72 if locker is not None:
72 raise error.LockHeld(errno.EAGAIN, self.f, self.desc,
73 raise error.LockHeld(errno.EAGAIN, self.f, self.desc,
73 locker)
74 locker)
74 else:
75 else:
75 raise error.LockUnavailable(why.errno, why.strerror,
76 raise error.LockUnavailable(why.errno, why.strerror,
76 why.filename, self.desc)
77 why.filename, self.desc)
77
78
78 def testlock(self):
79 def testlock(self):
79 """return id of locker if lock is valid, else None.
80 """return id of locker if lock is valid, else None.
80
81
81 If old-style lock, we cannot tell what machine locker is on.
82 If old-style lock, we cannot tell what machine locker is on.
82 with new-style lock, if locker is on this machine, we can
83 with new-style lock, if locker is on this machine, we can
83 see if locker is alive. If locker is on this machine but
84 see if locker is alive. If locker is on this machine but
84 not alive, we can safely break lock.
85 not alive, we can safely break lock.
85
86
86 The lock file is only deleted when None is returned.
87 The lock file is only deleted when None is returned.
87
88
88 """
89 """
89 locker = util.readlock(self.f)
90 locker = util.readlock(self.f)
90 try:
91 try:
91 host, pid = locker.split(":", 1)
92 host, pid = locker.split(":", 1)
92 except ValueError:
93 except ValueError:
93 return locker
94 return locker
94 if host != lock._host:
95 if host != lock._host:
95 return locker
96 return locker
96 try:
97 try:
97 pid = int(pid)
98 pid = int(pid)
98 except:
99 except:
99 return locker
100 return locker
100 if util.testpid(pid):
101 if util.testpid(pid):
101 return locker
102 return locker
102 # if locker dead, break lock. must do this with another lock
103 # if locker dead, break lock. must do this with another lock
103 # held, or can race and break valid lock.
104 # held, or can race and break valid lock.
104 try:
105 try:
105 l = lock(self.f + '.break')
106 l = lock(self.f + '.break')
106 l.trylock()
107 l.trylock()
107 os.unlink(self.f)
108 os.unlink(self.f)
108 l.release()
109 l.release()
109 except error.LockError:
110 except error.LockError:
110 return locker
111 return locker
111
112
112 def release(self):
113 def release(self):
113 if self.held > 1:
114 if self.held > 1:
114 self.held -= 1
115 self.held -= 1
115 elif self.held is 1:
116 elif self.held is 1:
116 self.held = 0
117 self.held = 0
117 if self.releasefn:
118 if self.releasefn:
118 self.releasefn()
119 self.releasefn()
119 try:
120 try:
120 os.unlink(self.f)
121 os.unlink(self.f)
121 except: pass
122 except: pass
122
123
123 def release(*locks):
124 def release(*locks):
124 for lock in locks:
125 for lock in locks:
125 if lock is not None:
126 if lock is not None:
126 lock.release()
127 lock.release()
127
128
@@ -1,170 +1,170 b''
1 # mail.py - mail sending bits for mercurial
1 # mail.py - mail sending bits for mercurial
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from i18n import _
8 from i18n import _
9 import util, encoding
9 import os, smtplib, socket
10 import os, smtplib, socket
10 import email.Header, email.MIMEText, email.Utils
11 import email.Header, email.MIMEText, email.Utils
11 import util, encoding
12
12
13 def _smtp(ui):
13 def _smtp(ui):
14 '''build an smtp connection and return a function to send mail'''
14 '''build an smtp connection and return a function to send mail'''
15 local_hostname = ui.config('smtp', 'local_hostname')
15 local_hostname = ui.config('smtp', 'local_hostname')
16 s = smtplib.SMTP(local_hostname=local_hostname)
16 s = smtplib.SMTP(local_hostname=local_hostname)
17 mailhost = ui.config('smtp', 'host')
17 mailhost = ui.config('smtp', 'host')
18 if not mailhost:
18 if not mailhost:
19 raise util.Abort(_('no [smtp]host in hgrc - cannot send mail'))
19 raise util.Abort(_('no [smtp]host in hgrc - cannot send mail'))
20 mailport = int(ui.config('smtp', 'port', 25))
20 mailport = int(ui.config('smtp', 'port', 25))
21 ui.note(_('sending mail: smtp host %s, port %s\n') %
21 ui.note(_('sending mail: smtp host %s, port %s\n') %
22 (mailhost, mailport))
22 (mailhost, mailport))
23 s.connect(host=mailhost, port=mailport)
23 s.connect(host=mailhost, port=mailport)
24 if ui.configbool('smtp', 'tls'):
24 if ui.configbool('smtp', 'tls'):
25 if not hasattr(socket, 'ssl'):
25 if not hasattr(socket, 'ssl'):
26 raise util.Abort(_("can't use TLS: Python SSL support "
26 raise util.Abort(_("can't use TLS: Python SSL support "
27 "not installed"))
27 "not installed"))
28 ui.note(_('(using tls)\n'))
28 ui.note(_('(using tls)\n'))
29 s.ehlo()
29 s.ehlo()
30 s.starttls()
30 s.starttls()
31 s.ehlo()
31 s.ehlo()
32 username = ui.config('smtp', 'username')
32 username = ui.config('smtp', 'username')
33 password = ui.config('smtp', 'password')
33 password = ui.config('smtp', 'password')
34 if username and not password:
34 if username and not password:
35 password = ui.getpass()
35 password = ui.getpass()
36 if username and password:
36 if username and password:
37 ui.note(_('(authenticating to mail server as %s)\n') %
37 ui.note(_('(authenticating to mail server as %s)\n') %
38 (username))
38 (username))
39 s.login(username, password)
39 s.login(username, password)
40
40
41 def send(sender, recipients, msg):
41 def send(sender, recipients, msg):
42 try:
42 try:
43 return s.sendmail(sender, recipients, msg)
43 return s.sendmail(sender, recipients, msg)
44 except smtplib.SMTPRecipientsRefused, inst:
44 except smtplib.SMTPRecipientsRefused, inst:
45 recipients = [r[1] for r in inst.recipients.values()]
45 recipients = [r[1] for r in inst.recipients.values()]
46 raise util.Abort('\n' + '\n'.join(recipients))
46 raise util.Abort('\n' + '\n'.join(recipients))
47 except smtplib.SMTPException, inst:
47 except smtplib.SMTPException, inst:
48 raise util.Abort(inst)
48 raise util.Abort(inst)
49
49
50 return send
50 return send
51
51
52 def _sendmail(ui, sender, recipients, msg):
52 def _sendmail(ui, sender, recipients, msg):
53 '''send mail using sendmail.'''
53 '''send mail using sendmail.'''
54 program = ui.config('email', 'method')
54 program = ui.config('email', 'method')
55 cmdline = '%s -f %s %s' % (program, util.email(sender),
55 cmdline = '%s -f %s %s' % (program, util.email(sender),
56 ' '.join(map(util.email, recipients)))
56 ' '.join(map(util.email, recipients)))
57 ui.note(_('sending mail: %s\n') % cmdline)
57 ui.note(_('sending mail: %s\n') % cmdline)
58 fp = util.popen(cmdline, 'w')
58 fp = util.popen(cmdline, 'w')
59 fp.write(msg)
59 fp.write(msg)
60 ret = fp.close()
60 ret = fp.close()
61 if ret:
61 if ret:
62 raise util.Abort('%s %s' % (
62 raise util.Abort('%s %s' % (
63 os.path.basename(program.split(None, 1)[0]),
63 os.path.basename(program.split(None, 1)[0]),
64 util.explain_exit(ret)[0]))
64 util.explain_exit(ret)[0]))
65
65
66 def connect(ui):
66 def connect(ui):
67 '''make a mail connection. return a function to send mail.
67 '''make a mail connection. return a function to send mail.
68 call as sendmail(sender, list-of-recipients, msg).'''
68 call as sendmail(sender, list-of-recipients, msg).'''
69 if ui.config('email', 'method', 'smtp') == 'smtp':
69 if ui.config('email', 'method', 'smtp') == 'smtp':
70 return _smtp(ui)
70 return _smtp(ui)
71 return lambda s, r, m: _sendmail(ui, s, r, m)
71 return lambda s, r, m: _sendmail(ui, s, r, m)
72
72
73 def sendmail(ui, sender, recipients, msg):
73 def sendmail(ui, sender, recipients, msg):
74 send = connect(ui)
74 send = connect(ui)
75 return send(sender, recipients, msg)
75 return send(sender, recipients, msg)
76
76
77 def validateconfig(ui):
77 def validateconfig(ui):
78 '''determine if we have enough config data to try sending email.'''
78 '''determine if we have enough config data to try sending email.'''
79 method = ui.config('email', 'method', 'smtp')
79 method = ui.config('email', 'method', 'smtp')
80 if method == 'smtp':
80 if method == 'smtp':
81 if not ui.config('smtp', 'host'):
81 if not ui.config('smtp', 'host'):
82 raise util.Abort(_('smtp specified as email transport, '
82 raise util.Abort(_('smtp specified as email transport, '
83 'but no smtp host configured'))
83 'but no smtp host configured'))
84 else:
84 else:
85 if not util.find_exe(method):
85 if not util.find_exe(method):
86 raise util.Abort(_('%r specified as email transport, '
86 raise util.Abort(_('%r specified as email transport, '
87 'but not in PATH') % method)
87 'but not in PATH') % method)
88
88
89 def mimetextpatch(s, subtype='plain', display=False):
89 def mimetextpatch(s, subtype='plain', display=False):
90 '''If patch in utf-8 transfer-encode it.'''
90 '''If patch in utf-8 transfer-encode it.'''
91 if not display:
91 if not display:
92 for cs in ('us-ascii', 'utf-8'):
92 for cs in ('us-ascii', 'utf-8'):
93 try:
93 try:
94 s.decode(cs)
94 s.decode(cs)
95 return email.MIMEText.MIMEText(s, subtype, cs)
95 return email.MIMEText.MIMEText(s, subtype, cs)
96 except UnicodeDecodeError:
96 except UnicodeDecodeError:
97 pass
97 pass
98 return email.MIMEText.MIMEText(s, subtype)
98 return email.MIMEText.MIMEText(s, subtype)
99
99
100 def _charsets(ui):
100 def _charsets(ui):
101 '''Obtains charsets to send mail parts not containing patches.'''
101 '''Obtains charsets to send mail parts not containing patches.'''
102 charsets = [cs.lower() for cs in ui.configlist('email', 'charsets')]
102 charsets = [cs.lower() for cs in ui.configlist('email', 'charsets')]
103 fallbacks = [encoding.fallbackencoding.lower(),
103 fallbacks = [encoding.fallbackencoding.lower(),
104 encoding.encoding.lower(), 'utf-8']
104 encoding.encoding.lower(), 'utf-8']
105 for cs in fallbacks: # util.unique does not keep order
105 for cs in fallbacks: # util.unique does not keep order
106 if cs not in charsets:
106 if cs not in charsets:
107 charsets.append(cs)
107 charsets.append(cs)
108 return [cs for cs in charsets if not cs.endswith('ascii')]
108 return [cs for cs in charsets if not cs.endswith('ascii')]
109
109
110 def _encode(ui, s, charsets):
110 def _encode(ui, s, charsets):
111 '''Returns (converted) string, charset tuple.
111 '''Returns (converted) string, charset tuple.
112 Finds out best charset by cycling through sendcharsets in descending
112 Finds out best charset by cycling through sendcharsets in descending
113 order. Tries both encoding and fallbackencoding for input. Only as
113 order. Tries both encoding and fallbackencoding for input. Only as
114 last resort send as is in fake ascii.
114 last resort send as is in fake ascii.
115 Caveat: Do not use for mail parts containing patches!'''
115 Caveat: Do not use for mail parts containing patches!'''
116 try:
116 try:
117 s.decode('ascii')
117 s.decode('ascii')
118 except UnicodeDecodeError:
118 except UnicodeDecodeError:
119 sendcharsets = charsets or _charsets(ui)
119 sendcharsets = charsets or _charsets(ui)
120 for ics in (encoding.encoding, encoding.fallbackencoding):
120 for ics in (encoding.encoding, encoding.fallbackencoding):
121 try:
121 try:
122 u = s.decode(ics)
122 u = s.decode(ics)
123 except UnicodeDecodeError:
123 except UnicodeDecodeError:
124 continue
124 continue
125 for ocs in sendcharsets:
125 for ocs in sendcharsets:
126 try:
126 try:
127 return u.encode(ocs), ocs
127 return u.encode(ocs), ocs
128 except UnicodeEncodeError:
128 except UnicodeEncodeError:
129 pass
129 pass
130 except LookupError:
130 except LookupError:
131 ui.warn(_('ignoring invalid sendcharset: %s\n') % ocs)
131 ui.warn(_('ignoring invalid sendcharset: %s\n') % ocs)
132 # if ascii, or all conversion attempts fail, send (broken) ascii
132 # if ascii, or all conversion attempts fail, send (broken) ascii
133 return s, 'us-ascii'
133 return s, 'us-ascii'
134
134
135 def headencode(ui, s, charsets=None, display=False):
135 def headencode(ui, s, charsets=None, display=False):
136 '''Returns RFC-2047 compliant header from given string.'''
136 '''Returns RFC-2047 compliant header from given string.'''
137 if not display:
137 if not display:
138 # split into words?
138 # split into words?
139 s, cs = _encode(ui, s, charsets)
139 s, cs = _encode(ui, s, charsets)
140 return str(email.Header.Header(s, cs))
140 return str(email.Header.Header(s, cs))
141 return s
141 return s
142
142
143 def addressencode(ui, address, charsets=None, display=False):
143 def addressencode(ui, address, charsets=None, display=False):
144 '''Turns address into RFC-2047 compliant header.'''
144 '''Turns address into RFC-2047 compliant header.'''
145 if display or not address:
145 if display or not address:
146 return address or ''
146 return address or ''
147 name, addr = email.Utils.parseaddr(address)
147 name, addr = email.Utils.parseaddr(address)
148 name = headencode(ui, name, charsets)
148 name = headencode(ui, name, charsets)
149 try:
149 try:
150 acc, dom = addr.split('@')
150 acc, dom = addr.split('@')
151 acc = acc.encode('ascii')
151 acc = acc.encode('ascii')
152 dom = dom.encode('idna')
152 dom = dom.encode('idna')
153 addr = '%s@%s' % (acc, dom)
153 addr = '%s@%s' % (acc, dom)
154 except UnicodeDecodeError:
154 except UnicodeDecodeError:
155 raise util.Abort(_('invalid email address: %s') % addr)
155 raise util.Abort(_('invalid email address: %s') % addr)
156 except ValueError:
156 except ValueError:
157 try:
157 try:
158 # too strict?
158 # too strict?
159 addr = addr.encode('ascii')
159 addr = addr.encode('ascii')
160 except UnicodeDecodeError:
160 except UnicodeDecodeError:
161 raise util.Abort(_('invalid local address: %s') % addr)
161 raise util.Abort(_('invalid local address: %s') % addr)
162 return email.Utils.formataddr((name, addr))
162 return email.Utils.formataddr((name, addr))
163
163
164 def mimeencode(ui, s, charsets=None, display=False):
164 def mimeencode(ui, s, charsets=None, display=False):
165 '''creates mime text object, encodes it if needed, and sets
165 '''creates mime text object, encodes it if needed, and sets
166 charset and transfer-encoding accordingly.'''
166 charset and transfer-encoding accordingly.'''
167 cs = 'us-ascii'
167 cs = 'us-ascii'
168 if not display:
168 if not display:
169 s, cs = _encode(ui, s, charsets)
169 s, cs = _encode(ui, s, charsets)
170 return email.MIMEText.MIMEText(s, 'plain', cs)
170 return email.MIMEText.MIMEText(s, 'plain', cs)
@@ -1,200 +1,201 b''
1 # manifest.py - manifest revision class for mercurial
1 # manifest.py - manifest revision class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from i18n import _
8 from i18n import _
9 import array, struct, mdiff, parsers, util, error, revlog
9 import mdiff, parsers, util, error, revlog
10 import array, struct
10
11
11 class manifestdict(dict):
12 class manifestdict(dict):
12 def __init__(self, mapping=None, flags=None):
13 def __init__(self, mapping=None, flags=None):
13 if mapping is None: mapping = {}
14 if mapping is None: mapping = {}
14 if flags is None: flags = {}
15 if flags is None: flags = {}
15 dict.__init__(self, mapping)
16 dict.__init__(self, mapping)
16 self._flags = flags
17 self._flags = flags
17 def flags(self, f):
18 def flags(self, f):
18 return self._flags.get(f, "")
19 return self._flags.get(f, "")
19 def set(self, f, flags):
20 def set(self, f, flags):
20 self._flags[f] = flags
21 self._flags[f] = flags
21 def copy(self):
22 def copy(self):
22 return manifestdict(dict.copy(self), dict.copy(self._flags))
23 return manifestdict(dict.copy(self), dict.copy(self._flags))
23
24
24 class manifest(revlog.revlog):
25 class manifest(revlog.revlog):
25 def __init__(self, opener):
26 def __init__(self, opener):
26 self.mapcache = None
27 self.mapcache = None
27 self.listcache = None
28 self.listcache = None
28 revlog.revlog.__init__(self, opener, "00manifest.i")
29 revlog.revlog.__init__(self, opener, "00manifest.i")
29
30
30 def parse(self, lines):
31 def parse(self, lines):
31 mfdict = manifestdict()
32 mfdict = manifestdict()
32 parsers.parse_manifest(mfdict, mfdict._flags, lines)
33 parsers.parse_manifest(mfdict, mfdict._flags, lines)
33 return mfdict
34 return mfdict
34
35
35 def readdelta(self, node):
36 def readdelta(self, node):
36 r = self.rev(node)
37 r = self.rev(node)
37 return self.parse(mdiff.patchtext(self.revdiff(r - 1, r)))
38 return self.parse(mdiff.patchtext(self.revdiff(r - 1, r)))
38
39
39 def read(self, node):
40 def read(self, node):
40 if node == revlog.nullid:
41 if node == revlog.nullid:
41 return manifestdict() # don't upset local cache
42 return manifestdict() # don't upset local cache
42 if self.mapcache and self.mapcache[0] == node:
43 if self.mapcache and self.mapcache[0] == node:
43 return self.mapcache[1]
44 return self.mapcache[1]
44 text = self.revision(node)
45 text = self.revision(node)
45 self.listcache = array.array('c', text)
46 self.listcache = array.array('c', text)
46 mapping = self.parse(text)
47 mapping = self.parse(text)
47 self.mapcache = (node, mapping)
48 self.mapcache = (node, mapping)
48 return mapping
49 return mapping
49
50
50 def _search(self, m, s, lo=0, hi=None):
51 def _search(self, m, s, lo=0, hi=None):
51 '''return a tuple (start, end) that says where to find s within m.
52 '''return a tuple (start, end) that says where to find s within m.
52
53
53 If the string is found m[start:end] are the line containing
54 If the string is found m[start:end] are the line containing
54 that string. If start == end the string was not found and
55 that string. If start == end the string was not found and
55 they indicate the proper sorted insertion point. This was
56 they indicate the proper sorted insertion point. This was
56 taken from bisect_left, and modified to find line start/end as
57 taken from bisect_left, and modified to find line start/end as
57 it goes along.
58 it goes along.
58
59
59 m should be a buffer or a string
60 m should be a buffer or a string
60 s is a string'''
61 s is a string'''
61 def advance(i, c):
62 def advance(i, c):
62 while i < lenm and m[i] != c:
63 while i < lenm and m[i] != c:
63 i += 1
64 i += 1
64 return i
65 return i
65 if not s:
66 if not s:
66 return (lo, lo)
67 return (lo, lo)
67 lenm = len(m)
68 lenm = len(m)
68 if not hi:
69 if not hi:
69 hi = lenm
70 hi = lenm
70 while lo < hi:
71 while lo < hi:
71 mid = (lo + hi) // 2
72 mid = (lo + hi) // 2
72 start = mid
73 start = mid
73 while start > 0 and m[start-1] != '\n':
74 while start > 0 and m[start-1] != '\n':
74 start -= 1
75 start -= 1
75 end = advance(start, '\0')
76 end = advance(start, '\0')
76 if m[start:end] < s:
77 if m[start:end] < s:
77 # we know that after the null there are 40 bytes of sha1
78 # we know that after the null there are 40 bytes of sha1
78 # this translates to the bisect lo = mid + 1
79 # this translates to the bisect lo = mid + 1
79 lo = advance(end + 40, '\n') + 1
80 lo = advance(end + 40, '\n') + 1
80 else:
81 else:
81 # this translates to the bisect hi = mid
82 # this translates to the bisect hi = mid
82 hi = start
83 hi = start
83 end = advance(lo, '\0')
84 end = advance(lo, '\0')
84 found = m[lo:end]
85 found = m[lo:end]
85 if cmp(s, found) == 0:
86 if cmp(s, found) == 0:
86 # we know that after the null there are 40 bytes of sha1
87 # we know that after the null there are 40 bytes of sha1
87 end = advance(end + 40, '\n')
88 end = advance(end + 40, '\n')
88 return (lo, end+1)
89 return (lo, end+1)
89 else:
90 else:
90 return (lo, lo)
91 return (lo, lo)
91
92
92 def find(self, node, f):
93 def find(self, node, f):
93 '''look up entry for a single file efficiently.
94 '''look up entry for a single file efficiently.
94 return (node, flags) pair if found, (None, None) if not.'''
95 return (node, flags) pair if found, (None, None) if not.'''
95 if self.mapcache and node == self.mapcache[0]:
96 if self.mapcache and node == self.mapcache[0]:
96 return self.mapcache[1].get(f), self.mapcache[1].flags(f)
97 return self.mapcache[1].get(f), self.mapcache[1].flags(f)
97 text = self.revision(node)
98 text = self.revision(node)
98 start, end = self._search(text, f)
99 start, end = self._search(text, f)
99 if start == end:
100 if start == end:
100 return None, None
101 return None, None
101 l = text[start:end]
102 l = text[start:end]
102 f, n = l.split('\0')
103 f, n = l.split('\0')
103 return revlog.bin(n[:40]), n[40:-1]
104 return revlog.bin(n[:40]), n[40:-1]
104
105
105 def add(self, map, transaction, link, p1=None, p2=None,
106 def add(self, map, transaction, link, p1=None, p2=None,
106 changed=None):
107 changed=None):
107 # apply the changes collected during the bisect loop to our addlist
108 # apply the changes collected during the bisect loop to our addlist
108 # return a delta suitable for addrevision
109 # return a delta suitable for addrevision
109 def addlistdelta(addlist, x):
110 def addlistdelta(addlist, x):
110 # start from the bottom up
111 # start from the bottom up
111 # so changes to the offsets don't mess things up.
112 # so changes to the offsets don't mess things up.
112 i = len(x)
113 i = len(x)
113 while i > 0:
114 while i > 0:
114 i -= 1
115 i -= 1
115 start = x[i][0]
116 start = x[i][0]
116 end = x[i][1]
117 end = x[i][1]
117 if x[i][2]:
118 if x[i][2]:
118 addlist[start:end] = array.array('c', x[i][2])
119 addlist[start:end] = array.array('c', x[i][2])
119 else:
120 else:
120 del addlist[start:end]
121 del addlist[start:end]
121 return "".join([struct.pack(">lll", d[0], d[1], len(d[2])) + d[2]
122 return "".join([struct.pack(">lll", d[0], d[1], len(d[2])) + d[2]
122 for d in x ])
123 for d in x ])
123
124
124 def checkforbidden(l):
125 def checkforbidden(l):
125 for f in l:
126 for f in l:
126 if '\n' in f or '\r' in f:
127 if '\n' in f or '\r' in f:
127 raise error.RevlogError(
128 raise error.RevlogError(
128 _("'\\n' and '\\r' disallowed in filenames: %r") % f)
129 _("'\\n' and '\\r' disallowed in filenames: %r") % f)
129
130
130 # if we're using the listcache, make sure it is valid and
131 # if we're using the listcache, make sure it is valid and
131 # parented by the same node we're diffing against
132 # parented by the same node we're diffing against
132 if not (changed and self.listcache and p1 and self.mapcache[0] == p1):
133 if not (changed and self.listcache and p1 and self.mapcache[0] == p1):
133 files = sorted(map)
134 files = sorted(map)
134 checkforbidden(files)
135 checkforbidden(files)
135
136
136 # if this is changed to support newlines in filenames,
137 # if this is changed to support newlines in filenames,
137 # be sure to check the templates/ dir again (especially *-raw.tmpl)
138 # be sure to check the templates/ dir again (especially *-raw.tmpl)
138 hex, flags = revlog.hex, map.flags
139 hex, flags = revlog.hex, map.flags
139 text = ["%s\000%s%s\n" % (f, hex(map[f]), flags(f))
140 text = ["%s\000%s%s\n" % (f, hex(map[f]), flags(f))
140 for f in files]
141 for f in files]
141 self.listcache = array.array('c', "".join(text))
142 self.listcache = array.array('c', "".join(text))
142 cachedelta = None
143 cachedelta = None
143 else:
144 else:
144 addlist = self.listcache
145 addlist = self.listcache
145
146
146 checkforbidden(changed[0])
147 checkforbidden(changed[0])
147 # combine the changed lists into one list for sorting
148 # combine the changed lists into one list for sorting
148 work = [[x, 0] for x in changed[0]]
149 work = [[x, 0] for x in changed[0]]
149 work[len(work):] = [[x, 1] for x in changed[1]]
150 work[len(work):] = [[x, 1] for x in changed[1]]
150 work.sort()
151 work.sort()
151
152
152 delta = []
153 delta = []
153 dstart = None
154 dstart = None
154 dend = None
155 dend = None
155 dline = [""]
156 dline = [""]
156 start = 0
157 start = 0
157 # zero copy representation of addlist as a buffer
158 # zero copy representation of addlist as a buffer
158 addbuf = buffer(addlist)
159 addbuf = buffer(addlist)
159
160
160 # start with a readonly loop that finds the offset of
161 # start with a readonly loop that finds the offset of
161 # each line and creates the deltas
162 # each line and creates the deltas
162 for w in work:
163 for w in work:
163 f = w[0]
164 f = w[0]
164 # bs will either be the index of the item or the insert point
165 # bs will either be the index of the item or the insert point
165 start, end = self._search(addbuf, f, start)
166 start, end = self._search(addbuf, f, start)
166 if w[1] == 0:
167 if w[1] == 0:
167 l = "%s\000%s%s\n" % (f, revlog.hex(map[f]), map.flags(f))
168 l = "%s\000%s%s\n" % (f, revlog.hex(map[f]), map.flags(f))
168 else:
169 else:
169 l = ""
170 l = ""
170 if start == end and w[1] == 1:
171 if start == end and w[1] == 1:
171 # item we want to delete was not found, error out
172 # item we want to delete was not found, error out
172 raise AssertionError(
173 raise AssertionError(
173 _("failed to remove %s from manifest") % f)
174 _("failed to remove %s from manifest") % f)
174 if dstart != None and dstart <= start and dend >= start:
175 if dstart != None and dstart <= start and dend >= start:
175 if dend < end:
176 if dend < end:
176 dend = end
177 dend = end
177 if l:
178 if l:
178 dline.append(l)
179 dline.append(l)
179 else:
180 else:
180 if dstart != None:
181 if dstart != None:
181 delta.append([dstart, dend, "".join(dline)])
182 delta.append([dstart, dend, "".join(dline)])
182 dstart = start
183 dstart = start
183 dend = end
184 dend = end
184 dline = [l]
185 dline = [l]
185
186
186 if dstart != None:
187 if dstart != None:
187 delta.append([dstart, dend, "".join(dline)])
188 delta.append([dstart, dend, "".join(dline)])
188 # apply the delta to the addlist, and get a delta for addrevision
189 # apply the delta to the addlist, and get a delta for addrevision
189 cachedelta = addlistdelta(addlist, delta)
190 cachedelta = addlistdelta(addlist, delta)
190
191
191 # the delta is only valid if we've been processing the tip revision
192 # the delta is only valid if we've been processing the tip revision
192 if self.mapcache[0] != self.tip():
193 if self.mapcache[0] != self.tip():
193 cachedelta = None
194 cachedelta = None
194 self.listcache = addlist
195 self.listcache = addlist
195
196
196 n = self.addrevision(buffer(self.listcache), transaction, link,
197 n = self.addrevision(buffer(self.listcache), transaction, link,
197 p1, p2, cachedelta)
198 p1, p2, cachedelta)
198 self.mapcache = (n, map)
199 self.mapcache = (n, map)
199
200
200 return n
201 return n
@@ -1,269 +1,270 b''
1 # mdiff.py - diff and patch routines for mercurial
1 # mdiff.py - diff and patch routines for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from i18n import _
8 from i18n import _
9 import bdiff, mpatch, re, struct, util
9 import bdiff, mpatch, util
10 import re, struct
10
11
11 def splitnewlines(text):
12 def splitnewlines(text):
12 '''like str.splitlines, but only split on newlines.'''
13 '''like str.splitlines, but only split on newlines.'''
13 lines = [l + '\n' for l in text.split('\n')]
14 lines = [l + '\n' for l in text.split('\n')]
14 if lines:
15 if lines:
15 if lines[-1] == '\n':
16 if lines[-1] == '\n':
16 lines.pop()
17 lines.pop()
17 else:
18 else:
18 lines[-1] = lines[-1][:-1]
19 lines[-1] = lines[-1][:-1]
19 return lines
20 return lines
20
21
21 class diffopts(object):
22 class diffopts(object):
22 '''context is the number of context lines
23 '''context is the number of context lines
23 text treats all files as text
24 text treats all files as text
24 showfunc enables diff -p output
25 showfunc enables diff -p output
25 git enables the git extended patch format
26 git enables the git extended patch format
26 nodates removes dates from diff headers
27 nodates removes dates from diff headers
27 ignorews ignores all whitespace changes in the diff
28 ignorews ignores all whitespace changes in the diff
28 ignorewsamount ignores changes in the amount of whitespace
29 ignorewsamount ignores changes in the amount of whitespace
29 ignoreblanklines ignores changes whose lines are all blank'''
30 ignoreblanklines ignores changes whose lines are all blank'''
30
31
31 defaults = {
32 defaults = {
32 'context': 3,
33 'context': 3,
33 'text': False,
34 'text': False,
34 'showfunc': False,
35 'showfunc': False,
35 'git': False,
36 'git': False,
36 'nodates': False,
37 'nodates': False,
37 'ignorews': False,
38 'ignorews': False,
38 'ignorewsamount': False,
39 'ignorewsamount': False,
39 'ignoreblanklines': False,
40 'ignoreblanklines': False,
40 }
41 }
41
42
42 __slots__ = defaults.keys()
43 __slots__ = defaults.keys()
43
44
44 def __init__(self, **opts):
45 def __init__(self, **opts):
45 for k in self.__slots__:
46 for k in self.__slots__:
46 v = opts.get(k)
47 v = opts.get(k)
47 if v is None:
48 if v is None:
48 v = self.defaults[k]
49 v = self.defaults[k]
49 setattr(self, k, v)
50 setattr(self, k, v)
50
51
51 try:
52 try:
52 self.context = int(self.context)
53 self.context = int(self.context)
53 except ValueError:
54 except ValueError:
54 raise util.Abort(_('diff context lines count must be '
55 raise util.Abort(_('diff context lines count must be '
55 'an integer, not %r') % self.context)
56 'an integer, not %r') % self.context)
56
57
57 defaultopts = diffopts()
58 defaultopts = diffopts()
58
59
59 def wsclean(opts, text):
60 def wsclean(opts, text):
60 if opts.ignorews:
61 if opts.ignorews:
61 text = re.sub('[ \t]+', '', text)
62 text = re.sub('[ \t]+', '', text)
62 elif opts.ignorewsamount:
63 elif opts.ignorewsamount:
63 text = re.sub('[ \t]+', ' ', text)
64 text = re.sub('[ \t]+', ' ', text)
64 text = re.sub('[ \t]+\n', '\n', text)
65 text = re.sub('[ \t]+\n', '\n', text)
65 if opts.ignoreblanklines:
66 if opts.ignoreblanklines:
66 text = re.sub('\n+', '', text)
67 text = re.sub('\n+', '', text)
67 return text
68 return text
68
69
69 def diffline(revs, a, b, opts):
70 def diffline(revs, a, b, opts):
70 parts = ['diff']
71 parts = ['diff']
71 if opts.git:
72 if opts.git:
72 parts.append('--git')
73 parts.append('--git')
73 if revs and not opts.git:
74 if revs and not opts.git:
74 parts.append(' '.join(["-r %s" % rev for rev in revs]))
75 parts.append(' '.join(["-r %s" % rev for rev in revs]))
75 if opts.git:
76 if opts.git:
76 parts.append('a/%s' % a)
77 parts.append('a/%s' % a)
77 parts.append('b/%s' % b)
78 parts.append('b/%s' % b)
78 else:
79 else:
79 parts.append(a)
80 parts.append(a)
80 return ' '.join(parts) + '\n'
81 return ' '.join(parts) + '\n'
81
82
82 def unidiff(a, ad, b, bd, fn1, fn2, r=None, opts=defaultopts):
83 def unidiff(a, ad, b, bd, fn1, fn2, r=None, opts=defaultopts):
83 def datetag(date, addtab=True):
84 def datetag(date, addtab=True):
84 if not opts.git and not opts.nodates:
85 if not opts.git and not opts.nodates:
85 return '\t%s\n' % date
86 return '\t%s\n' % date
86 if addtab and ' ' in fn1:
87 if addtab and ' ' in fn1:
87 return '\t\n'
88 return '\t\n'
88 return '\n'
89 return '\n'
89
90
90 if not a and not b: return ""
91 if not a and not b: return ""
91 epoch = util.datestr((0, 0))
92 epoch = util.datestr((0, 0))
92
93
93 if not opts.text and (util.binary(a) or util.binary(b)):
94 if not opts.text and (util.binary(a) or util.binary(b)):
94 if a and b and len(a) == len(b) and a == b:
95 if a and b and len(a) == len(b) and a == b:
95 return ""
96 return ""
96 l = ['Binary file %s has changed\n' % fn1]
97 l = ['Binary file %s has changed\n' % fn1]
97 elif not a:
98 elif not a:
98 b = splitnewlines(b)
99 b = splitnewlines(b)
99 if a is None:
100 if a is None:
100 l1 = '--- /dev/null%s' % datetag(epoch, False)
101 l1 = '--- /dev/null%s' % datetag(epoch, False)
101 else:
102 else:
102 l1 = "--- %s%s" % ("a/" + fn1, datetag(ad))
103 l1 = "--- %s%s" % ("a/" + fn1, datetag(ad))
103 l2 = "+++ %s%s" % ("b/" + fn2, datetag(bd))
104 l2 = "+++ %s%s" % ("b/" + fn2, datetag(bd))
104 l3 = "@@ -0,0 +1,%d @@\n" % len(b)
105 l3 = "@@ -0,0 +1,%d @@\n" % len(b)
105 l = [l1, l2, l3] + ["+" + e for e in b]
106 l = [l1, l2, l3] + ["+" + e for e in b]
106 elif not b:
107 elif not b:
107 a = splitnewlines(a)
108 a = splitnewlines(a)
108 l1 = "--- %s%s" % ("a/" + fn1, datetag(ad))
109 l1 = "--- %s%s" % ("a/" + fn1, datetag(ad))
109 if b is None:
110 if b is None:
110 l2 = '+++ /dev/null%s' % datetag(epoch, False)
111 l2 = '+++ /dev/null%s' % datetag(epoch, False)
111 else:
112 else:
112 l2 = "+++ %s%s" % ("b/" + fn2, datetag(bd))
113 l2 = "+++ %s%s" % ("b/" + fn2, datetag(bd))
113 l3 = "@@ -1,%d +0,0 @@\n" % len(a)
114 l3 = "@@ -1,%d +0,0 @@\n" % len(a)
114 l = [l1, l2, l3] + ["-" + e for e in a]
115 l = [l1, l2, l3] + ["-" + e for e in a]
115 else:
116 else:
116 al = splitnewlines(a)
117 al = splitnewlines(a)
117 bl = splitnewlines(b)
118 bl = splitnewlines(b)
118 l = list(bunidiff(a, b, al, bl, "a/" + fn1, "b/" + fn2, opts=opts))
119 l = list(bunidiff(a, b, al, bl, "a/" + fn1, "b/" + fn2, opts=opts))
119 if not l: return ""
120 if not l: return ""
120 # difflib uses a space, rather than a tab
121 # difflib uses a space, rather than a tab
121 l[0] = "%s%s" % (l[0][:-2], datetag(ad))
122 l[0] = "%s%s" % (l[0][:-2], datetag(ad))
122 l[1] = "%s%s" % (l[1][:-2], datetag(bd))
123 l[1] = "%s%s" % (l[1][:-2], datetag(bd))
123
124
124 for ln in xrange(len(l)):
125 for ln in xrange(len(l)):
125 if l[ln][-1] != '\n':
126 if l[ln][-1] != '\n':
126 l[ln] += "\n\ No newline at end of file\n"
127 l[ln] += "\n\ No newline at end of file\n"
127
128
128 if r:
129 if r:
129 l.insert(0, diffline(r, fn1, fn2, opts))
130 l.insert(0, diffline(r, fn1, fn2, opts))
130
131
131 return "".join(l)
132 return "".join(l)
132
133
133 # somewhat self contained replacement for difflib.unified_diff
134 # somewhat self contained replacement for difflib.unified_diff
134 # t1 and t2 are the text to be diffed
135 # t1 and t2 are the text to be diffed
135 # l1 and l2 are the text broken up into lines
136 # l1 and l2 are the text broken up into lines
136 # header1 and header2 are the filenames for the diff output
137 # header1 and header2 are the filenames for the diff output
137 def bunidiff(t1, t2, l1, l2, header1, header2, opts=defaultopts):
138 def bunidiff(t1, t2, l1, l2, header1, header2, opts=defaultopts):
138 def contextend(l, len):
139 def contextend(l, len):
139 ret = l + opts.context
140 ret = l + opts.context
140 if ret > len:
141 if ret > len:
141 ret = len
142 ret = len
142 return ret
143 return ret
143
144
144 def contextstart(l):
145 def contextstart(l):
145 ret = l - opts.context
146 ret = l - opts.context
146 if ret < 0:
147 if ret < 0:
147 return 0
148 return 0
148 return ret
149 return ret
149
150
150 def yieldhunk(hunk, header):
151 def yieldhunk(hunk, header):
151 if header:
152 if header:
152 for x in header:
153 for x in header:
153 yield x
154 yield x
154 (astart, a2, bstart, b2, delta) = hunk
155 (astart, a2, bstart, b2, delta) = hunk
155 aend = contextend(a2, len(l1))
156 aend = contextend(a2, len(l1))
156 alen = aend - astart
157 alen = aend - astart
157 blen = b2 - bstart + aend - a2
158 blen = b2 - bstart + aend - a2
158
159
159 func = ""
160 func = ""
160 if opts.showfunc:
161 if opts.showfunc:
161 # walk backwards from the start of the context
162 # walk backwards from the start of the context
162 # to find a line starting with an alphanumeric char.
163 # to find a line starting with an alphanumeric char.
163 for x in xrange(astart - 1, -1, -1):
164 for x in xrange(astart - 1, -1, -1):
164 t = l1[x].rstrip()
165 t = l1[x].rstrip()
165 if funcre.match(t):
166 if funcre.match(t):
166 func = ' ' + t[:40]
167 func = ' ' + t[:40]
167 break
168 break
168
169
169 yield "@@ -%d,%d +%d,%d @@%s\n" % (astart + 1, alen,
170 yield "@@ -%d,%d +%d,%d @@%s\n" % (astart + 1, alen,
170 bstart + 1, blen, func)
171 bstart + 1, blen, func)
171 for x in delta:
172 for x in delta:
172 yield x
173 yield x
173 for x in xrange(a2, aend):
174 for x in xrange(a2, aend):
174 yield ' ' + l1[x]
175 yield ' ' + l1[x]
175
176
176 header = [ "--- %s\t\n" % header1, "+++ %s\t\n" % header2 ]
177 header = [ "--- %s\t\n" % header1, "+++ %s\t\n" % header2 ]
177
178
178 if opts.showfunc:
179 if opts.showfunc:
179 funcre = re.compile('\w')
180 funcre = re.compile('\w')
180
181
181 # bdiff.blocks gives us the matching sequences in the files. The loop
182 # bdiff.blocks gives us the matching sequences in the files. The loop
182 # below finds the spaces between those matching sequences and translates
183 # below finds the spaces between those matching sequences and translates
183 # them into diff output.
184 # them into diff output.
184 #
185 #
185 diff = bdiff.blocks(t1, t2)
186 diff = bdiff.blocks(t1, t2)
186 hunk = None
187 hunk = None
187 for i in xrange(len(diff)):
188 for i in xrange(len(diff)):
188 # The first match is special.
189 # The first match is special.
189 # we've either found a match starting at line 0 or a match later
190 # we've either found a match starting at line 0 or a match later
190 # in the file. If it starts later, old and new below will both be
191 # in the file. If it starts later, old and new below will both be
191 # empty and we'll continue to the next match.
192 # empty and we'll continue to the next match.
192 if i > 0:
193 if i > 0:
193 s = diff[i-1]
194 s = diff[i-1]
194 else:
195 else:
195 s = [0, 0, 0, 0]
196 s = [0, 0, 0, 0]
196 delta = []
197 delta = []
197 s1 = diff[i]
198 s1 = diff[i]
198 a1 = s[1]
199 a1 = s[1]
199 a2 = s1[0]
200 a2 = s1[0]
200 b1 = s[3]
201 b1 = s[3]
201 b2 = s1[2]
202 b2 = s1[2]
202
203
203 old = l1[a1:a2]
204 old = l1[a1:a2]
204 new = l2[b1:b2]
205 new = l2[b1:b2]
205
206
206 # bdiff sometimes gives huge matches past eof, this check eats them,
207 # bdiff sometimes gives huge matches past eof, this check eats them,
207 # and deals with the special first match case described above
208 # and deals with the special first match case described above
208 if not old and not new:
209 if not old and not new:
209 continue
210 continue
210
211
211 if opts.ignorews or opts.ignorewsamount or opts.ignoreblanklines:
212 if opts.ignorews or opts.ignorewsamount or opts.ignoreblanklines:
212 if wsclean(opts, "".join(old)) == wsclean(opts, "".join(new)):
213 if wsclean(opts, "".join(old)) == wsclean(opts, "".join(new)):
213 continue
214 continue
214
215
215 astart = contextstart(a1)
216 astart = contextstart(a1)
216 bstart = contextstart(b1)
217 bstart = contextstart(b1)
217 prev = None
218 prev = None
218 if hunk:
219 if hunk:
219 # join with the previous hunk if it falls inside the context
220 # join with the previous hunk if it falls inside the context
220 if astart < hunk[1] + opts.context + 1:
221 if astart < hunk[1] + opts.context + 1:
221 prev = hunk
222 prev = hunk
222 astart = hunk[1]
223 astart = hunk[1]
223 bstart = hunk[3]
224 bstart = hunk[3]
224 else:
225 else:
225 for x in yieldhunk(hunk, header):
226 for x in yieldhunk(hunk, header):
226 yield x
227 yield x
227 # we only want to yield the header if the files differ, and
228 # we only want to yield the header if the files differ, and
228 # we only want to yield it once.
229 # we only want to yield it once.
229 header = None
230 header = None
230 if prev:
231 if prev:
231 # we've joined the previous hunk, record the new ending points.
232 # we've joined the previous hunk, record the new ending points.
232 hunk[1] = a2
233 hunk[1] = a2
233 hunk[3] = b2
234 hunk[3] = b2
234 delta = hunk[4]
235 delta = hunk[4]
235 else:
236 else:
236 # create a new hunk
237 # create a new hunk
237 hunk = [ astart, a2, bstart, b2, delta ]
238 hunk = [ astart, a2, bstart, b2, delta ]
238
239
239 delta[len(delta):] = [ ' ' + x for x in l1[astart:a1] ]
240 delta[len(delta):] = [ ' ' + x for x in l1[astart:a1] ]
240 delta[len(delta):] = [ '-' + x for x in old ]
241 delta[len(delta):] = [ '-' + x for x in old ]
241 delta[len(delta):] = [ '+' + x for x in new ]
242 delta[len(delta):] = [ '+' + x for x in new ]
242
243
243 if hunk:
244 if hunk:
244 for x in yieldhunk(hunk, header):
245 for x in yieldhunk(hunk, header):
245 yield x
246 yield x
246
247
247 def patchtext(bin):
248 def patchtext(bin):
248 pos = 0
249 pos = 0
249 t = []
250 t = []
250 while pos < len(bin):
251 while pos < len(bin):
251 p1, p2, l = struct.unpack(">lll", bin[pos:pos + 12])
252 p1, p2, l = struct.unpack(">lll", bin[pos:pos + 12])
252 pos += 12
253 pos += 12
253 t.append(bin[pos:pos + l])
254 t.append(bin[pos:pos + l])
254 pos += l
255 pos += l
255 return "".join(t)
256 return "".join(t)
256
257
257 def patch(a, bin):
258 def patch(a, bin):
258 return mpatch.patches(a, [bin])
259 return mpatch.patches(a, [bin])
259
260
260 # similar to difflib.SequenceMatcher.get_matching_blocks
261 # similar to difflib.SequenceMatcher.get_matching_blocks
261 def get_matching_blocks(a, b):
262 def get_matching_blocks(a, b):
262 return [(d[0], d[2], d[1] - d[0]) for d in bdiff.blocks(a, b)]
263 return [(d[0], d[2], d[1] - d[0]) for d in bdiff.blocks(a, b)]
263
264
264 def trivialdiffheader(length):
265 def trivialdiffheader(length):
265 return struct.pack(">lll", 0, 0, length)
266 return struct.pack(">lll", 0, 0, length)
266
267
267 patches = mpatch.patches
268 patches = mpatch.patches
268 patchedsize = mpatch.patchedsize
269 patchedsize = mpatch.patchedsize
269 textdiff = bdiff.bdiff
270 textdiff = bdiff.bdiff
@@ -1,508 +1,509 b''
1 # merge.py - directory-level update/merge handling for Mercurial
1 # merge.py - directory-level update/merge handling for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from node import nullid, nullrev, hex, bin
8 from node import nullid, nullrev, hex, bin
9 from i18n import _
9 from i18n import _
10 import errno, util, os, filemerge, copies, shutil
10 import util, filemerge, copies
11 import errno, os, shutil
11
12
12 class mergestate(object):
13 class mergestate(object):
13 '''track 3-way merge state of individual files'''
14 '''track 3-way merge state of individual files'''
14 def __init__(self, repo):
15 def __init__(self, repo):
15 self._repo = repo
16 self._repo = repo
16 self._read()
17 self._read()
17 def reset(self, node=None):
18 def reset(self, node=None):
18 self._state = {}
19 self._state = {}
19 if node:
20 if node:
20 self._local = node
21 self._local = node
21 shutil.rmtree(self._repo.join("merge"), True)
22 shutil.rmtree(self._repo.join("merge"), True)
22 def _read(self):
23 def _read(self):
23 self._state = {}
24 self._state = {}
24 try:
25 try:
25 localnode = None
26 localnode = None
26 f = self._repo.opener("merge/state")
27 f = self._repo.opener("merge/state")
27 for i, l in enumerate(f):
28 for i, l in enumerate(f):
28 if i == 0:
29 if i == 0:
29 localnode = l[:-1]
30 localnode = l[:-1]
30 else:
31 else:
31 bits = l[:-1].split("\0")
32 bits = l[:-1].split("\0")
32 self._state[bits[0]] = bits[1:]
33 self._state[bits[0]] = bits[1:]
33 self._local = bin(localnode)
34 self._local = bin(localnode)
34 except IOError, err:
35 except IOError, err:
35 if err.errno != errno.ENOENT:
36 if err.errno != errno.ENOENT:
36 raise
37 raise
37 def _write(self):
38 def _write(self):
38 f = self._repo.opener("merge/state", "w")
39 f = self._repo.opener("merge/state", "w")
39 f.write(hex(self._local) + "\n")
40 f.write(hex(self._local) + "\n")
40 for d, v in self._state.iteritems():
41 for d, v in self._state.iteritems():
41 f.write("\0".join([d] + v) + "\n")
42 f.write("\0".join([d] + v) + "\n")
42 def add(self, fcl, fco, fca, fd, flags):
43 def add(self, fcl, fco, fca, fd, flags):
43 hash = util.sha1(fcl.path()).hexdigest()
44 hash = util.sha1(fcl.path()).hexdigest()
44 self._repo.opener("merge/" + hash, "w").write(fcl.data())
45 self._repo.opener("merge/" + hash, "w").write(fcl.data())
45 self._state[fd] = ['u', hash, fcl.path(), fca.path(),
46 self._state[fd] = ['u', hash, fcl.path(), fca.path(),
46 hex(fca.filenode()), fco.path(), flags]
47 hex(fca.filenode()), fco.path(), flags]
47 self._write()
48 self._write()
48 def __contains__(self, dfile):
49 def __contains__(self, dfile):
49 return dfile in self._state
50 return dfile in self._state
50 def __getitem__(self, dfile):
51 def __getitem__(self, dfile):
51 return self._state[dfile][0]
52 return self._state[dfile][0]
52 def __iter__(self):
53 def __iter__(self):
53 l = self._state.keys()
54 l = self._state.keys()
54 l.sort()
55 l.sort()
55 for f in l:
56 for f in l:
56 yield f
57 yield f
57 def mark(self, dfile, state):
58 def mark(self, dfile, state):
58 self._state[dfile][0] = state
59 self._state[dfile][0] = state
59 self._write()
60 self._write()
60 def resolve(self, dfile, wctx, octx):
61 def resolve(self, dfile, wctx, octx):
61 if self[dfile] == 'r':
62 if self[dfile] == 'r':
62 return 0
63 return 0
63 state, hash, lfile, afile, anode, ofile, flags = self._state[dfile]
64 state, hash, lfile, afile, anode, ofile, flags = self._state[dfile]
64 f = self._repo.opener("merge/" + hash)
65 f = self._repo.opener("merge/" + hash)
65 self._repo.wwrite(dfile, f.read(), flags)
66 self._repo.wwrite(dfile, f.read(), flags)
66 fcd = wctx[dfile]
67 fcd = wctx[dfile]
67 fco = octx[ofile]
68 fco = octx[ofile]
68 fca = self._repo.filectx(afile, fileid=anode)
69 fca = self._repo.filectx(afile, fileid=anode)
69 r = filemerge.filemerge(self._repo, self._local, lfile, fcd, fco, fca)
70 r = filemerge.filemerge(self._repo, self._local, lfile, fcd, fco, fca)
70 if not r:
71 if not r:
71 self.mark(dfile, 'r')
72 self.mark(dfile, 'r')
72 return r
73 return r
73
74
74 def _checkunknown(wctx, mctx):
75 def _checkunknown(wctx, mctx):
75 "check for collisions between unknown files and files in mctx"
76 "check for collisions between unknown files and files in mctx"
76 for f in wctx.unknown():
77 for f in wctx.unknown():
77 if f in mctx and mctx[f].cmp(wctx[f].data()):
78 if f in mctx and mctx[f].cmp(wctx[f].data()):
78 raise util.Abort(_("untracked file in working directory differs"
79 raise util.Abort(_("untracked file in working directory differs"
79 " from file in requested revision: '%s'") % f)
80 " from file in requested revision: '%s'") % f)
80
81
81 def _checkcollision(mctx):
82 def _checkcollision(mctx):
82 "check for case folding collisions in the destination context"
83 "check for case folding collisions in the destination context"
83 folded = {}
84 folded = {}
84 for fn in mctx:
85 for fn in mctx:
85 fold = fn.lower()
86 fold = fn.lower()
86 if fold in folded:
87 if fold in folded:
87 raise util.Abort(_("case-folding collision between %s and %s")
88 raise util.Abort(_("case-folding collision between %s and %s")
88 % (fn, folded[fold]))
89 % (fn, folded[fold]))
89 folded[fold] = fn
90 folded[fold] = fn
90
91
91 def _forgetremoved(wctx, mctx, branchmerge):
92 def _forgetremoved(wctx, mctx, branchmerge):
92 """
93 """
93 Forget removed files
94 Forget removed files
94
95
95 If we're jumping between revisions (as opposed to merging), and if
96 If we're jumping between revisions (as opposed to merging), and if
96 neither the working directory nor the target rev has the file,
97 neither the working directory nor the target rev has the file,
97 then we need to remove it from the dirstate, to prevent the
98 then we need to remove it from the dirstate, to prevent the
98 dirstate from listing the file when it is no longer in the
99 dirstate from listing the file when it is no longer in the
99 manifest.
100 manifest.
100
101
101 If we're merging, and the other revision has removed a file
102 If we're merging, and the other revision has removed a file
102 that is not present in the working directory, we need to mark it
103 that is not present in the working directory, we need to mark it
103 as removed.
104 as removed.
104 """
105 """
105
106
106 action = []
107 action = []
107 state = branchmerge and 'r' or 'f'
108 state = branchmerge and 'r' or 'f'
108 for f in wctx.deleted():
109 for f in wctx.deleted():
109 if f not in mctx:
110 if f not in mctx:
110 action.append((f, state))
111 action.append((f, state))
111
112
112 if not branchmerge:
113 if not branchmerge:
113 for f in wctx.removed():
114 for f in wctx.removed():
114 if f not in mctx:
115 if f not in mctx:
115 action.append((f, "f"))
116 action.append((f, "f"))
116
117
117 return action
118 return action
118
119
119 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
120 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
120 """
121 """
121 Merge p1 and p2 with ancestor ma and generate merge action list
122 Merge p1 and p2 with ancestor ma and generate merge action list
122
123
123 overwrite = whether we clobber working files
124 overwrite = whether we clobber working files
124 partial = function to filter file lists
125 partial = function to filter file lists
125 """
126 """
126
127
127 repo.ui.note(_("resolving manifests\n"))
128 repo.ui.note(_("resolving manifests\n"))
128 repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
129 repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
129 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
130 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
130
131
131 m1 = p1.manifest()
132 m1 = p1.manifest()
132 m2 = p2.manifest()
133 m2 = p2.manifest()
133 ma = pa.manifest()
134 ma = pa.manifest()
134 backwards = (pa == p2)
135 backwards = (pa == p2)
135 action = []
136 action = []
136 copy, copied, diverge = {}, {}, {}
137 copy, copied, diverge = {}, {}, {}
137
138
138 def fmerge(f, f2=None, fa=None):
139 def fmerge(f, f2=None, fa=None):
139 """merge flags"""
140 """merge flags"""
140 if not f2:
141 if not f2:
141 f2 = f
142 f2 = f
142 fa = f
143 fa = f
143 a, m, n = ma.flags(fa), m1.flags(f), m2.flags(f2)
144 a, m, n = ma.flags(fa), m1.flags(f), m2.flags(f2)
144 if m == n: # flags agree
145 if m == n: # flags agree
145 return m # unchanged
146 return m # unchanged
146 if m and n: # flags are set but don't agree
147 if m and n: # flags are set but don't agree
147 if not a: # both differ from parent
148 if not a: # both differ from parent
148 r = repo.ui.prompt(
149 r = repo.ui.prompt(
149 _(" conflicting flags for %s\n"
150 _(" conflicting flags for %s\n"
150 "(n)one, e(x)ec or sym(l)ink?") % f,
151 "(n)one, e(x)ec or sym(l)ink?") % f,
151 (_("&None"), _("E&xec"), _("Sym&link")), _("n"))
152 (_("&None"), _("E&xec"), _("Sym&link")), _("n"))
152 return r != _("n") and r or ''
153 return r != _("n") and r or ''
153 if m == a:
154 if m == a:
154 return n # changed from m to n
155 return n # changed from m to n
155 return m # changed from n to m
156 return m # changed from n to m
156 if m and m != a: # changed from a to m
157 if m and m != a: # changed from a to m
157 return m
158 return m
158 if n and n != a: # changed from a to n
159 if n and n != a: # changed from a to n
159 return n
160 return n
160 return '' # flag was cleared
161 return '' # flag was cleared
161
162
162 def act(msg, m, f, *args):
163 def act(msg, m, f, *args):
163 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
164 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
164 action.append((f, m) + args)
165 action.append((f, m) + args)
165
166
166 if pa and not (backwards or overwrite):
167 if pa and not (backwards or overwrite):
167 if repo.ui.configbool("merge", "followcopies", True):
168 if repo.ui.configbool("merge", "followcopies", True):
168 dirs = repo.ui.configbool("merge", "followdirs", True)
169 dirs = repo.ui.configbool("merge", "followdirs", True)
169 copy, diverge = copies.copies(repo, p1, p2, pa, dirs)
170 copy, diverge = copies.copies(repo, p1, p2, pa, dirs)
170 copied = set(copy.values())
171 copied = set(copy.values())
171 for of, fl in diverge.iteritems():
172 for of, fl in diverge.iteritems():
172 act("divergent renames", "dr", of, fl)
173 act("divergent renames", "dr", of, fl)
173
174
174 # Compare manifests
175 # Compare manifests
175 for f, n in m1.iteritems():
176 for f, n in m1.iteritems():
176 if partial and not partial(f):
177 if partial and not partial(f):
177 continue
178 continue
178 if f in m2:
179 if f in m2:
179 if overwrite or backwards:
180 if overwrite or backwards:
180 rflags = m2.flags(f)
181 rflags = m2.flags(f)
181 else:
182 else:
182 rflags = fmerge(f)
183 rflags = fmerge(f)
183 # are files different?
184 # are files different?
184 if n != m2[f]:
185 if n != m2[f]:
185 a = ma.get(f, nullid)
186 a = ma.get(f, nullid)
186 # are we clobbering?
187 # are we clobbering?
187 if overwrite:
188 if overwrite:
188 act("clobbering", "g", f, rflags)
189 act("clobbering", "g", f, rflags)
189 # or are we going back in time and clean?
190 # or are we going back in time and clean?
190 elif backwards:
191 elif backwards:
191 if not n[20:] or not p2[f].cmp(p1[f].data()):
192 if not n[20:] or not p2[f].cmp(p1[f].data()):
192 act("reverting", "g", f, rflags)
193 act("reverting", "g", f, rflags)
193 # are both different from the ancestor?
194 # are both different from the ancestor?
194 elif n != a and m2[f] != a:
195 elif n != a and m2[f] != a:
195 act("versions differ", "m", f, f, f, rflags, False)
196 act("versions differ", "m", f, f, f, rflags, False)
196 # is remote's version newer?
197 # is remote's version newer?
197 elif m2[f] != a:
198 elif m2[f] != a:
198 act("remote is newer", "g", f, rflags)
199 act("remote is newer", "g", f, rflags)
199 # local is newer, not overwrite, check mode bits
200 # local is newer, not overwrite, check mode bits
200 elif m1.flags(f) != rflags:
201 elif m1.flags(f) != rflags:
201 act("update permissions", "e", f, rflags)
202 act("update permissions", "e", f, rflags)
202 # contents same, check mode bits
203 # contents same, check mode bits
203 elif m1.flags(f) != rflags:
204 elif m1.flags(f) != rflags:
204 act("update permissions", "e", f, rflags)
205 act("update permissions", "e", f, rflags)
205 elif f in copied:
206 elif f in copied:
206 continue
207 continue
207 elif f in copy:
208 elif f in copy:
208 f2 = copy[f]
209 f2 = copy[f]
209 if f2 not in m2: # directory rename
210 if f2 not in m2: # directory rename
210 act("remote renamed directory to " + f2, "d",
211 act("remote renamed directory to " + f2, "d",
211 f, None, f2, m1.flags(f))
212 f, None, f2, m1.flags(f))
212 elif f2 in m1: # case 2 A,B/B/B
213 elif f2 in m1: # case 2 A,B/B/B
213 act("local copied to " + f2, "m",
214 act("local copied to " + f2, "m",
214 f, f2, f, fmerge(f, f2, f2), False)
215 f, f2, f, fmerge(f, f2, f2), False)
215 else: # case 4,21 A/B/B
216 else: # case 4,21 A/B/B
216 act("local moved to " + f2, "m",
217 act("local moved to " + f2, "m",
217 f, f2, f, fmerge(f, f2, f2), False)
218 f, f2, f, fmerge(f, f2, f2), False)
218 elif f in ma:
219 elif f in ma:
219 if n != ma[f] and not overwrite:
220 if n != ma[f] and not overwrite:
220 if repo.ui.prompt(
221 if repo.ui.prompt(
221 _(" local changed %s which remote deleted\n"
222 _(" local changed %s which remote deleted\n"
222 "use (c)hanged version or (d)elete?") % f,
223 "use (c)hanged version or (d)elete?") % f,
223 (_("&Changed"), _("&Delete")), _("c")) == _("d"):
224 (_("&Changed"), _("&Delete")), _("c")) == _("d"):
224 act("prompt delete", "r", f)
225 act("prompt delete", "r", f)
225 act("prompt keep", "a", f)
226 act("prompt keep", "a", f)
226 else:
227 else:
227 act("other deleted", "r", f)
228 act("other deleted", "r", f)
228 else:
229 else:
229 # file is created on branch or in working directory
230 # file is created on branch or in working directory
230 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
231 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
231 act("remote deleted", "r", f)
232 act("remote deleted", "r", f)
232
233
233 for f, n in m2.iteritems():
234 for f, n in m2.iteritems():
234 if partial and not partial(f):
235 if partial and not partial(f):
235 continue
236 continue
236 if f in m1:
237 if f in m1:
237 continue
238 continue
238 if f in copied:
239 if f in copied:
239 continue
240 continue
240 if f in copy:
241 if f in copy:
241 f2 = copy[f]
242 f2 = copy[f]
242 if f2 not in m1: # directory rename
243 if f2 not in m1: # directory rename
243 act("local renamed directory to " + f2, "d",
244 act("local renamed directory to " + f2, "d",
244 None, f, f2, m2.flags(f))
245 None, f, f2, m2.flags(f))
245 elif f2 in m2: # rename case 1, A/A,B/A
246 elif f2 in m2: # rename case 1, A/A,B/A
246 act("remote copied to " + f, "m",
247 act("remote copied to " + f, "m",
247 f2, f, f, fmerge(f2, f, f2), False)
248 f2, f, f, fmerge(f2, f, f2), False)
248 else: # case 3,20 A/B/A
249 else: # case 3,20 A/B/A
249 act("remote moved to " + f, "m",
250 act("remote moved to " + f, "m",
250 f2, f, f, fmerge(f2, f, f2), True)
251 f2, f, f, fmerge(f2, f, f2), True)
251 elif f in ma:
252 elif f in ma:
252 if overwrite or backwards:
253 if overwrite or backwards:
253 act("recreating", "g", f, m2.flags(f))
254 act("recreating", "g", f, m2.flags(f))
254 elif n != ma[f]:
255 elif n != ma[f]:
255 if repo.ui.prompt(
256 if repo.ui.prompt(
256 _("remote changed %s which local deleted\n"
257 _("remote changed %s which local deleted\n"
257 "use (c)hanged version or leave (d)eleted?") % f,
258 "use (c)hanged version or leave (d)eleted?") % f,
258 (_("&Changed"), _("&Deleted")), _("c")) == _("c"):
259 (_("&Changed"), _("&Deleted")), _("c")) == _("c"):
259 act("prompt recreating", "g", f, m2.flags(f))
260 act("prompt recreating", "g", f, m2.flags(f))
260 else:
261 else:
261 act("remote created", "g", f, m2.flags(f))
262 act("remote created", "g", f, m2.flags(f))
262
263
263 return action
264 return action
264
265
265 def actioncmp(a1, a2):
266 def actioncmp(a1, a2):
266 m1 = a1[1]
267 m1 = a1[1]
267 m2 = a2[1]
268 m2 = a2[1]
268 if m1 == m2:
269 if m1 == m2:
269 return cmp(a1, a2)
270 return cmp(a1, a2)
270 if m1 == 'r':
271 if m1 == 'r':
271 return -1
272 return -1
272 if m2 == 'r':
273 if m2 == 'r':
273 return 1
274 return 1
274 return cmp(a1, a2)
275 return cmp(a1, a2)
275
276
276 def applyupdates(repo, action, wctx, mctx):
277 def applyupdates(repo, action, wctx, mctx):
277 "apply the merge action list to the working directory"
278 "apply the merge action list to the working directory"
278
279
279 updated, merged, removed, unresolved = 0, 0, 0, 0
280 updated, merged, removed, unresolved = 0, 0, 0, 0
280 ms = mergestate(repo)
281 ms = mergestate(repo)
281 ms.reset(wctx.parents()[0].node())
282 ms.reset(wctx.parents()[0].node())
282 moves = []
283 moves = []
283 action.sort(actioncmp)
284 action.sort(actioncmp)
284
285
285 # prescan for merges
286 # prescan for merges
286 for a in action:
287 for a in action:
287 f, m = a[:2]
288 f, m = a[:2]
288 if m == 'm': # merge
289 if m == 'm': # merge
289 f2, fd, flags, move = a[2:]
290 f2, fd, flags, move = a[2:]
290 repo.ui.debug(_("preserving %s for resolve of %s\n") % (f, fd))
291 repo.ui.debug(_("preserving %s for resolve of %s\n") % (f, fd))
291 fcl = wctx[f]
292 fcl = wctx[f]
292 fco = mctx[f2]
293 fco = mctx[f2]
293 fca = fcl.ancestor(fco) or repo.filectx(f, fileid=nullrev)
294 fca = fcl.ancestor(fco) or repo.filectx(f, fileid=nullrev)
294 ms.add(fcl, fco, fca, fd, flags)
295 ms.add(fcl, fco, fca, fd, flags)
295 if f != fd and move:
296 if f != fd and move:
296 moves.append(f)
297 moves.append(f)
297
298
298 # remove renamed files after safely stored
299 # remove renamed files after safely stored
299 for f in moves:
300 for f in moves:
300 if util.lexists(repo.wjoin(f)):
301 if util.lexists(repo.wjoin(f)):
301 repo.ui.debug(_("removing %s\n") % f)
302 repo.ui.debug(_("removing %s\n") % f)
302 os.unlink(repo.wjoin(f))
303 os.unlink(repo.wjoin(f))
303
304
304 audit_path = util.path_auditor(repo.root)
305 audit_path = util.path_auditor(repo.root)
305
306
306 for a in action:
307 for a in action:
307 f, m = a[:2]
308 f, m = a[:2]
308 if f and f[0] == "/":
309 if f and f[0] == "/":
309 continue
310 continue
310 if m == "r": # remove
311 if m == "r": # remove
311 repo.ui.note(_("removing %s\n") % f)
312 repo.ui.note(_("removing %s\n") % f)
312 audit_path(f)
313 audit_path(f)
313 try:
314 try:
314 util.unlink(repo.wjoin(f))
315 util.unlink(repo.wjoin(f))
315 except OSError, inst:
316 except OSError, inst:
316 if inst.errno != errno.ENOENT:
317 if inst.errno != errno.ENOENT:
317 repo.ui.warn(_("update failed to remove %s: %s!\n") %
318 repo.ui.warn(_("update failed to remove %s: %s!\n") %
318 (f, inst.strerror))
319 (f, inst.strerror))
319 removed += 1
320 removed += 1
320 elif m == "m": # merge
321 elif m == "m": # merge
321 f2, fd, flags, move = a[2:]
322 f2, fd, flags, move = a[2:]
322 r = ms.resolve(fd, wctx, mctx)
323 r = ms.resolve(fd, wctx, mctx)
323 if r > 0:
324 if r > 0:
324 unresolved += 1
325 unresolved += 1
325 else:
326 else:
326 if r is None:
327 if r is None:
327 updated += 1
328 updated += 1
328 else:
329 else:
329 merged += 1
330 merged += 1
330 util.set_flags(repo.wjoin(fd), 'l' in flags, 'x' in flags)
331 util.set_flags(repo.wjoin(fd), 'l' in flags, 'x' in flags)
331 if f != fd and move and util.lexists(repo.wjoin(f)):
332 if f != fd and move and util.lexists(repo.wjoin(f)):
332 repo.ui.debug(_("removing %s\n") % f)
333 repo.ui.debug(_("removing %s\n") % f)
333 os.unlink(repo.wjoin(f))
334 os.unlink(repo.wjoin(f))
334 elif m == "g": # get
335 elif m == "g": # get
335 flags = a[2]
336 flags = a[2]
336 repo.ui.note(_("getting %s\n") % f)
337 repo.ui.note(_("getting %s\n") % f)
337 t = mctx.filectx(f).data()
338 t = mctx.filectx(f).data()
338 repo.wwrite(f, t, flags)
339 repo.wwrite(f, t, flags)
339 updated += 1
340 updated += 1
340 elif m == "d": # directory rename
341 elif m == "d": # directory rename
341 f2, fd, flags = a[2:]
342 f2, fd, flags = a[2:]
342 if f:
343 if f:
343 repo.ui.note(_("moving %s to %s\n") % (f, fd))
344 repo.ui.note(_("moving %s to %s\n") % (f, fd))
344 t = wctx.filectx(f).data()
345 t = wctx.filectx(f).data()
345 repo.wwrite(fd, t, flags)
346 repo.wwrite(fd, t, flags)
346 util.unlink(repo.wjoin(f))
347 util.unlink(repo.wjoin(f))
347 if f2:
348 if f2:
348 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
349 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
349 t = mctx.filectx(f2).data()
350 t = mctx.filectx(f2).data()
350 repo.wwrite(fd, t, flags)
351 repo.wwrite(fd, t, flags)
351 updated += 1
352 updated += 1
352 elif m == "dr": # divergent renames
353 elif m == "dr": # divergent renames
353 fl = a[2]
354 fl = a[2]
354 repo.ui.warn(_("warning: detected divergent renames of %s to:\n") % f)
355 repo.ui.warn(_("warning: detected divergent renames of %s to:\n") % f)
355 for nf in fl:
356 for nf in fl:
356 repo.ui.warn(" %s\n" % nf)
357 repo.ui.warn(" %s\n" % nf)
357 elif m == "e": # exec
358 elif m == "e": # exec
358 flags = a[2]
359 flags = a[2]
359 util.set_flags(repo.wjoin(f), 'l' in flags, 'x' in flags)
360 util.set_flags(repo.wjoin(f), 'l' in flags, 'x' in flags)
360
361
361 return updated, merged, removed, unresolved
362 return updated, merged, removed, unresolved
362
363
363 def recordupdates(repo, action, branchmerge):
364 def recordupdates(repo, action, branchmerge):
364 "record merge actions to the dirstate"
365 "record merge actions to the dirstate"
365
366
366 for a in action:
367 for a in action:
367 f, m = a[:2]
368 f, m = a[:2]
368 if m == "r": # remove
369 if m == "r": # remove
369 if branchmerge:
370 if branchmerge:
370 repo.dirstate.remove(f)
371 repo.dirstate.remove(f)
371 else:
372 else:
372 repo.dirstate.forget(f)
373 repo.dirstate.forget(f)
373 elif m == "a": # re-add
374 elif m == "a": # re-add
374 if not branchmerge:
375 if not branchmerge:
375 repo.dirstate.add(f)
376 repo.dirstate.add(f)
376 elif m == "f": # forget
377 elif m == "f": # forget
377 repo.dirstate.forget(f)
378 repo.dirstate.forget(f)
378 elif m == "e": # exec change
379 elif m == "e": # exec change
379 repo.dirstate.normallookup(f)
380 repo.dirstate.normallookup(f)
380 elif m == "g": # get
381 elif m == "g": # get
381 if branchmerge:
382 if branchmerge:
382 repo.dirstate.normaldirty(f)
383 repo.dirstate.normaldirty(f)
383 else:
384 else:
384 repo.dirstate.normal(f)
385 repo.dirstate.normal(f)
385 elif m == "m": # merge
386 elif m == "m": # merge
386 f2, fd, flag, move = a[2:]
387 f2, fd, flag, move = a[2:]
387 if branchmerge:
388 if branchmerge:
388 # We've done a branch merge, mark this file as merged
389 # We've done a branch merge, mark this file as merged
389 # so that we properly record the merger later
390 # so that we properly record the merger later
390 repo.dirstate.merge(fd)
391 repo.dirstate.merge(fd)
391 if f != f2: # copy/rename
392 if f != f2: # copy/rename
392 if move:
393 if move:
393 repo.dirstate.remove(f)
394 repo.dirstate.remove(f)
394 if f != fd:
395 if f != fd:
395 repo.dirstate.copy(f, fd)
396 repo.dirstate.copy(f, fd)
396 else:
397 else:
397 repo.dirstate.copy(f2, fd)
398 repo.dirstate.copy(f2, fd)
398 else:
399 else:
399 # We've update-merged a locally modified file, so
400 # We've update-merged a locally modified file, so
400 # we set the dirstate to emulate a normal checkout
401 # we set the dirstate to emulate a normal checkout
401 # of that file some time in the past. Thus our
402 # of that file some time in the past. Thus our
402 # merge will appear as a normal local file
403 # merge will appear as a normal local file
403 # modification.
404 # modification.
404 repo.dirstate.normallookup(fd)
405 repo.dirstate.normallookup(fd)
405 if move:
406 if move:
406 repo.dirstate.forget(f)
407 repo.dirstate.forget(f)
407 elif m == "d": # directory rename
408 elif m == "d": # directory rename
408 f2, fd, flag = a[2:]
409 f2, fd, flag = a[2:]
409 if not f2 and f not in repo.dirstate:
410 if not f2 and f not in repo.dirstate:
410 # untracked file moved
411 # untracked file moved
411 continue
412 continue
412 if branchmerge:
413 if branchmerge:
413 repo.dirstate.add(fd)
414 repo.dirstate.add(fd)
414 if f:
415 if f:
415 repo.dirstate.remove(f)
416 repo.dirstate.remove(f)
416 repo.dirstate.copy(f, fd)
417 repo.dirstate.copy(f, fd)
417 if f2:
418 if f2:
418 repo.dirstate.copy(f2, fd)
419 repo.dirstate.copy(f2, fd)
419 else:
420 else:
420 repo.dirstate.normal(fd)
421 repo.dirstate.normal(fd)
421 if f:
422 if f:
422 repo.dirstate.forget(f)
423 repo.dirstate.forget(f)
423
424
424 def update(repo, node, branchmerge, force, partial):
425 def update(repo, node, branchmerge, force, partial):
425 """
426 """
426 Perform a merge between the working directory and the given node
427 Perform a merge between the working directory and the given node
427
428
428 branchmerge = whether to merge between branches
429 branchmerge = whether to merge between branches
429 force = whether to force branch merging or file overwriting
430 force = whether to force branch merging or file overwriting
430 partial = a function to filter file lists (dirstate not updated)
431 partial = a function to filter file lists (dirstate not updated)
431 """
432 """
432
433
433 wlock = repo.wlock()
434 wlock = repo.wlock()
434 try:
435 try:
435 wc = repo[None]
436 wc = repo[None]
436 if node is None:
437 if node is None:
437 # tip of current branch
438 # tip of current branch
438 try:
439 try:
439 node = repo.branchtags()[wc.branch()]
440 node = repo.branchtags()[wc.branch()]
440 except KeyError:
441 except KeyError:
441 if wc.branch() == "default": # no default branch!
442 if wc.branch() == "default": # no default branch!
442 node = repo.lookup("tip") # update to tip
443 node = repo.lookup("tip") # update to tip
443 else:
444 else:
444 raise util.Abort(_("branch %s not found") % wc.branch())
445 raise util.Abort(_("branch %s not found") % wc.branch())
445 overwrite = force and not branchmerge
446 overwrite = force and not branchmerge
446 pl = wc.parents()
447 pl = wc.parents()
447 p1, p2 = pl[0], repo[node]
448 p1, p2 = pl[0], repo[node]
448 pa = p1.ancestor(p2)
449 pa = p1.ancestor(p2)
449 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
450 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
450 fastforward = False
451 fastforward = False
451
452
452 ### check phase
453 ### check phase
453 if not overwrite and len(pl) > 1:
454 if not overwrite and len(pl) > 1:
454 raise util.Abort(_("outstanding uncommitted merges"))
455 raise util.Abort(_("outstanding uncommitted merges"))
455 if branchmerge:
456 if branchmerge:
456 if pa == p2:
457 if pa == p2:
457 raise util.Abort(_("can't merge with ancestor"))
458 raise util.Abort(_("can't merge with ancestor"))
458 elif pa == p1:
459 elif pa == p1:
459 if p1.branch() != p2.branch():
460 if p1.branch() != p2.branch():
460 fastforward = True
461 fastforward = True
461 else:
462 else:
462 raise util.Abort(_("nothing to merge (use 'hg update'"
463 raise util.Abort(_("nothing to merge (use 'hg update'"
463 " or check 'hg heads')"))
464 " or check 'hg heads')"))
464 if not force and (wc.files() or wc.deleted()):
465 if not force and (wc.files() or wc.deleted()):
465 raise util.Abort(_("outstanding uncommitted changes"))
466 raise util.Abort(_("outstanding uncommitted changes"))
466 elif not overwrite:
467 elif not overwrite:
467 if pa == p1 or pa == p2: # linear
468 if pa == p1 or pa == p2: # linear
468 pass # all good
469 pass # all good
469 elif p1.branch() == p2.branch():
470 elif p1.branch() == p2.branch():
470 if wc.files() or wc.deleted():
471 if wc.files() or wc.deleted():
471 raise util.Abort(_("crosses branches (use 'hg merge' or "
472 raise util.Abort(_("crosses branches (use 'hg merge' or "
472 "'hg update -C' to discard changes)"))
473 "'hg update -C' to discard changes)"))
473 raise util.Abort(_("crosses branches (use 'hg merge' "
474 raise util.Abort(_("crosses branches (use 'hg merge' "
474 "or 'hg update -C')"))
475 "or 'hg update -C')"))
475 elif wc.files() or wc.deleted():
476 elif wc.files() or wc.deleted():
476 raise util.Abort(_("crosses named branches (use "
477 raise util.Abort(_("crosses named branches (use "
477 "'hg update -C' to discard changes)"))
478 "'hg update -C' to discard changes)"))
478 else:
479 else:
479 # Allow jumping branches if there are no changes
480 # Allow jumping branches if there are no changes
480 overwrite = True
481 overwrite = True
481
482
482 ### calculate phase
483 ### calculate phase
483 action = []
484 action = []
484 if not force:
485 if not force:
485 _checkunknown(wc, p2)
486 _checkunknown(wc, p2)
486 if not util.checkcase(repo.path):
487 if not util.checkcase(repo.path):
487 _checkcollision(p2)
488 _checkcollision(p2)
488 action += _forgetremoved(wc, p2, branchmerge)
489 action += _forgetremoved(wc, p2, branchmerge)
489 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
490 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
490
491
491 ### apply phase
492 ### apply phase
492 if not branchmerge: # just jump to the new rev
493 if not branchmerge: # just jump to the new rev
493 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
494 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
494 if not partial:
495 if not partial:
495 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
496 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
496
497
497 stats = applyupdates(repo, action, wc, p2)
498 stats = applyupdates(repo, action, wc, p2)
498
499
499 if not partial:
500 if not partial:
500 recordupdates(repo, action, branchmerge)
501 recordupdates(repo, action, branchmerge)
501 repo.dirstate.setparents(fp1, fp2)
502 repo.dirstate.setparents(fp1, fp2)
502 if not branchmerge and not fastforward:
503 if not branchmerge and not fastforward:
503 repo.dirstate.setbranch(p2.branch())
504 repo.dirstate.setbranch(p2.branch())
504 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
505 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
505
506
506 return stats
507 return stats
507 finally:
508 finally:
508 wlock.release()
509 wlock.release()
@@ -1,217 +1,218 b''
1 # posix.py - Posix utility function implementations for Mercurial
1 # posix.py - Posix utility function implementations for Mercurial
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from i18n import _
8 from i18n import _
9 import os, sys, osutil, errno, stat, getpass, pwd, grp
9 import osutil
10 import os, sys, errno, stat, getpass, pwd, grp
10
11
11 posixfile = file
12 posixfile = file
12 nulldev = '/dev/null'
13 nulldev = '/dev/null'
13 normpath = os.path.normpath
14 normpath = os.path.normpath
14 samestat = os.path.samestat
15 samestat = os.path.samestat
15
16
16 umask = os.umask(0)
17 umask = os.umask(0)
17 os.umask(umask)
18 os.umask(umask)
18
19
19 def openhardlinks():
20 def openhardlinks():
20 '''return true if it is safe to hold open file handles to hardlinks'''
21 '''return true if it is safe to hold open file handles to hardlinks'''
21 return True
22 return True
22
23
23 def rcfiles(path):
24 def rcfiles(path):
24 rcs = [os.path.join(path, 'hgrc')]
25 rcs = [os.path.join(path, 'hgrc')]
25 rcdir = os.path.join(path, 'hgrc.d')
26 rcdir = os.path.join(path, 'hgrc.d')
26 try:
27 try:
27 rcs.extend([os.path.join(rcdir, f)
28 rcs.extend([os.path.join(rcdir, f)
28 for f, kind in osutil.listdir(rcdir)
29 for f, kind in osutil.listdir(rcdir)
29 if f.endswith(".rc")])
30 if f.endswith(".rc")])
30 except OSError:
31 except OSError:
31 pass
32 pass
32 return rcs
33 return rcs
33
34
34 def system_rcpath():
35 def system_rcpath():
35 path = []
36 path = []
36 # old mod_python does not set sys.argv
37 # old mod_python does not set sys.argv
37 if len(getattr(sys, 'argv', [])) > 0:
38 if len(getattr(sys, 'argv', [])) > 0:
38 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
39 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
39 '/../etc/mercurial'))
40 '/../etc/mercurial'))
40 path.extend(rcfiles('/etc/mercurial'))
41 path.extend(rcfiles('/etc/mercurial'))
41 return path
42 return path
42
43
43 def user_rcpath():
44 def user_rcpath():
44 return [os.path.expanduser('~/.hgrc')]
45 return [os.path.expanduser('~/.hgrc')]
45
46
46 def parse_patch_output(output_line):
47 def parse_patch_output(output_line):
47 """parses the output produced by patch and returns the file name"""
48 """parses the output produced by patch and returns the file name"""
48 pf = output_line[14:]
49 pf = output_line[14:]
49 if os.sys.platform == 'OpenVMS':
50 if os.sys.platform == 'OpenVMS':
50 if pf[0] == '`':
51 if pf[0] == '`':
51 pf = pf[1:-1] # Remove the quotes
52 pf = pf[1:-1] # Remove the quotes
52 else:
53 else:
53 if pf.startswith("'") and pf.endswith("'") and " " in pf:
54 if pf.startswith("'") and pf.endswith("'") and " " in pf:
54 pf = pf[1:-1] # Remove the quotes
55 pf = pf[1:-1] # Remove the quotes
55 return pf
56 return pf
56
57
57 def sshargs(sshcmd, host, user, port):
58 def sshargs(sshcmd, host, user, port):
58 '''Build argument list for ssh'''
59 '''Build argument list for ssh'''
59 args = user and ("%s@%s" % (user, host)) or host
60 args = user and ("%s@%s" % (user, host)) or host
60 return port and ("%s -p %s" % (args, port)) or args
61 return port and ("%s -p %s" % (args, port)) or args
61
62
62 def is_exec(f):
63 def is_exec(f):
63 """check whether a file is executable"""
64 """check whether a file is executable"""
64 return (os.lstat(f).st_mode & 0100 != 0)
65 return (os.lstat(f).st_mode & 0100 != 0)
65
66
66 def set_flags(f, l, x):
67 def set_flags(f, l, x):
67 s = os.lstat(f).st_mode
68 s = os.lstat(f).st_mode
68 if l:
69 if l:
69 if not stat.S_ISLNK(s):
70 if not stat.S_ISLNK(s):
70 # switch file to link
71 # switch file to link
71 data = file(f).read()
72 data = file(f).read()
72 os.unlink(f)
73 os.unlink(f)
73 try:
74 try:
74 os.symlink(data, f)
75 os.symlink(data, f)
75 except:
76 except:
76 # failed to make a link, rewrite file
77 # failed to make a link, rewrite file
77 file(f, "w").write(data)
78 file(f, "w").write(data)
78 # no chmod needed at this point
79 # no chmod needed at this point
79 return
80 return
80 if stat.S_ISLNK(s):
81 if stat.S_ISLNK(s):
81 # switch link to file
82 # switch link to file
82 data = os.readlink(f)
83 data = os.readlink(f)
83 os.unlink(f)
84 os.unlink(f)
84 file(f, "w").write(data)
85 file(f, "w").write(data)
85 s = 0666 & ~umask # avoid restatting for chmod
86 s = 0666 & ~umask # avoid restatting for chmod
86
87
87 sx = s & 0100
88 sx = s & 0100
88 if x and not sx:
89 if x and not sx:
89 # Turn on +x for every +r bit when making a file executable
90 # Turn on +x for every +r bit when making a file executable
90 # and obey umask.
91 # and obey umask.
91 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
92 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
92 elif not x and sx:
93 elif not x and sx:
93 # Turn off all +x bits
94 # Turn off all +x bits
94 os.chmod(f, s & 0666)
95 os.chmod(f, s & 0666)
95
96
96 def set_binary(fd):
97 def set_binary(fd):
97 pass
98 pass
98
99
99 def pconvert(path):
100 def pconvert(path):
100 return path
101 return path
101
102
102 def localpath(path):
103 def localpath(path):
103 return path
104 return path
104
105
105 def shellquote(s):
106 def shellquote(s):
106 if os.sys.platform == 'OpenVMS':
107 if os.sys.platform == 'OpenVMS':
107 return '"%s"' % s
108 return '"%s"' % s
108 else:
109 else:
109 return "'%s'" % s.replace("'", "'\\''")
110 return "'%s'" % s.replace("'", "'\\''")
110
111
111 def quotecommand(cmd):
112 def quotecommand(cmd):
112 return cmd
113 return cmd
113
114
114 def popen(command, mode='r'):
115 def popen(command, mode='r'):
115 return os.popen(command, mode)
116 return os.popen(command, mode)
116
117
117 def testpid(pid):
118 def testpid(pid):
118 '''return False if pid dead, True if running or not sure'''
119 '''return False if pid dead, True if running or not sure'''
119 if os.sys.platform == 'OpenVMS':
120 if os.sys.platform == 'OpenVMS':
120 return True
121 return True
121 try:
122 try:
122 os.kill(pid, 0)
123 os.kill(pid, 0)
123 return True
124 return True
124 except OSError, inst:
125 except OSError, inst:
125 return inst.errno != errno.ESRCH
126 return inst.errno != errno.ESRCH
126
127
127 def explain_exit(code):
128 def explain_exit(code):
128 """return a 2-tuple (desc, code) describing a process's status"""
129 """return a 2-tuple (desc, code) describing a process's status"""
129 if os.WIFEXITED(code):
130 if os.WIFEXITED(code):
130 val = os.WEXITSTATUS(code)
131 val = os.WEXITSTATUS(code)
131 return _("exited with status %d") % val, val
132 return _("exited with status %d") % val, val
132 elif os.WIFSIGNALED(code):
133 elif os.WIFSIGNALED(code):
133 val = os.WTERMSIG(code)
134 val = os.WTERMSIG(code)
134 return _("killed by signal %d") % val, val
135 return _("killed by signal %d") % val, val
135 elif os.WIFSTOPPED(code):
136 elif os.WIFSTOPPED(code):
136 val = os.WSTOPSIG(code)
137 val = os.WSTOPSIG(code)
137 return _("stopped by signal %d") % val, val
138 return _("stopped by signal %d") % val, val
138 raise ValueError(_("invalid exit code"))
139 raise ValueError(_("invalid exit code"))
139
140
140 def isowner(fp, st=None):
141 def isowner(fp, st=None):
141 """Return True if the file object f belongs to the current user.
142 """Return True if the file object f belongs to the current user.
142
143
143 The return value of a util.fstat(f) may be passed as the st argument.
144 The return value of a util.fstat(f) may be passed as the st argument.
144 """
145 """
145 if st is None:
146 if st is None:
146 st = fstat(fp)
147 st = fstat(fp)
147 return st.st_uid == os.getuid()
148 return st.st_uid == os.getuid()
148
149
149 def find_exe(command):
150 def find_exe(command):
150 '''Find executable for command searching like which does.
151 '''Find executable for command searching like which does.
151 If command is a basename then PATH is searched for command.
152 If command is a basename then PATH is searched for command.
152 PATH isn't searched if command is an absolute or relative path.
153 PATH isn't searched if command is an absolute or relative path.
153 If command isn't found None is returned.'''
154 If command isn't found None is returned.'''
154 if sys.platform == 'OpenVMS':
155 if sys.platform == 'OpenVMS':
155 return command
156 return command
156
157
157 def findexisting(executable):
158 def findexisting(executable):
158 'Will return executable if existing file'
159 'Will return executable if existing file'
159 if os.path.exists(executable):
160 if os.path.exists(executable):
160 return executable
161 return executable
161 return None
162 return None
162
163
163 if os.sep in command:
164 if os.sep in command:
164 return findexisting(command)
165 return findexisting(command)
165
166
166 for path in os.environ.get('PATH', '').split(os.pathsep):
167 for path in os.environ.get('PATH', '').split(os.pathsep):
167 executable = findexisting(os.path.join(path, command))
168 executable = findexisting(os.path.join(path, command))
168 if executable is not None:
169 if executable is not None:
169 return executable
170 return executable
170 return None
171 return None
171
172
172 def set_signal_handler():
173 def set_signal_handler():
173 pass
174 pass
174
175
175 def statfiles(files):
176 def statfiles(files):
176 'Stat each file in files and yield stat or None if file does not exist.'
177 'Stat each file in files and yield stat or None if file does not exist.'
177 lstat = os.lstat
178 lstat = os.lstat
178 for nf in files:
179 for nf in files:
179 try:
180 try:
180 st = lstat(nf)
181 st = lstat(nf)
181 except OSError, err:
182 except OSError, err:
182 if err.errno not in (errno.ENOENT, errno.ENOTDIR):
183 if err.errno not in (errno.ENOENT, errno.ENOTDIR):
183 raise
184 raise
184 st = None
185 st = None
185 yield st
186 yield st
186
187
187 def getuser():
188 def getuser():
188 '''return name of current user'''
189 '''return name of current user'''
189 return getpass.getuser()
190 return getpass.getuser()
190
191
191 def expand_glob(pats):
192 def expand_glob(pats):
192 '''On Windows, expand the implicit globs in a list of patterns'''
193 '''On Windows, expand the implicit globs in a list of patterns'''
193 return list(pats)
194 return list(pats)
194
195
195 def username(uid=None):
196 def username(uid=None):
196 """Return the name of the user with the given uid.
197 """Return the name of the user with the given uid.
197
198
198 If uid is None, return the name of the current user."""
199 If uid is None, return the name of the current user."""
199
200
200 if uid is None:
201 if uid is None:
201 uid = os.getuid()
202 uid = os.getuid()
202 try:
203 try:
203 return pwd.getpwuid(uid)[0]
204 return pwd.getpwuid(uid)[0]
204 except KeyError:
205 except KeyError:
205 return str(uid)
206 return str(uid)
206
207
207 def groupname(gid=None):
208 def groupname(gid=None):
208 """Return the name of the group with the given gid.
209 """Return the name of the group with the given gid.
209
210
210 If gid is None, return the name of the current group."""
211 If gid is None, return the name of the current group."""
211
212
212 if gid is None:
213 if gid is None:
213 gid = os.getgid()
214 gid = os.getgid()
214 try:
215 try:
215 return grp.getgrgid(gid)[0]
216 return grp.getgrgid(gid)[0]
216 except KeyError:
217 except KeyError:
217 return str(gid)
218 return str(gid)
@@ -1,149 +1,150 b''
1 # repair.py - functions for repository repair for mercurial
1 # repair.py - functions for repository repair for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 # Copyright 2007 Matt Mackall
4 # Copyright 2007 Matt Mackall
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2, incorporated herein by reference.
7 # GNU General Public License version 2, incorporated herein by reference.
8
8
9 import changegroup, os
9 import changegroup
10 from node import nullrev, short
10 from node import nullrev, short
11 from i18n import _
11 from i18n import _
12 import os
12
13
13 def _bundle(repo, bases, heads, node, suffix, extranodes=None):
14 def _bundle(repo, bases, heads, node, suffix, extranodes=None):
14 """create a bundle with the specified revisions as a backup"""
15 """create a bundle with the specified revisions as a backup"""
15 cg = repo.changegroupsubset(bases, heads, 'strip', extranodes)
16 cg = repo.changegroupsubset(bases, heads, 'strip', extranodes)
16 backupdir = repo.join("strip-backup")
17 backupdir = repo.join("strip-backup")
17 if not os.path.isdir(backupdir):
18 if not os.path.isdir(backupdir):
18 os.mkdir(backupdir)
19 os.mkdir(backupdir)
19 name = os.path.join(backupdir, "%s-%s" % (short(node), suffix))
20 name = os.path.join(backupdir, "%s-%s" % (short(node), suffix))
20 repo.ui.warn(_("saving bundle to %s\n") % name)
21 repo.ui.warn(_("saving bundle to %s\n") % name)
21 return changegroup.writebundle(cg, name, "HG10BZ")
22 return changegroup.writebundle(cg, name, "HG10BZ")
22
23
23 def _collectfiles(repo, striprev):
24 def _collectfiles(repo, striprev):
24 """find out the filelogs affected by the strip"""
25 """find out the filelogs affected by the strip"""
25 files = {}
26 files = {}
26
27
27 for x in xrange(striprev, len(repo)):
28 for x in xrange(striprev, len(repo)):
28 for name in repo[x].files():
29 for name in repo[x].files():
29 if name in files:
30 if name in files:
30 continue
31 continue
31 files[name] = 1
32 files[name] = 1
32
33
33 files = files.keys()
34 files = files.keys()
34 files.sort()
35 files.sort()
35 return files
36 return files
36
37
37 def _collectextranodes(repo, files, link):
38 def _collectextranodes(repo, files, link):
38 """return the nodes that have to be saved before the strip"""
39 """return the nodes that have to be saved before the strip"""
39 def collectone(revlog):
40 def collectone(revlog):
40 extra = []
41 extra = []
41 startrev = count = len(revlog)
42 startrev = count = len(revlog)
42 # find the truncation point of the revlog
43 # find the truncation point of the revlog
43 for i in xrange(0, count):
44 for i in xrange(0, count):
44 lrev = revlog.linkrev(i)
45 lrev = revlog.linkrev(i)
45 if lrev >= link:
46 if lrev >= link:
46 startrev = i + 1
47 startrev = i + 1
47 break
48 break
48
49
49 # see if any revision after that point has a linkrev less than link
50 # see if any revision after that point has a linkrev less than link
50 # (we have to manually save these guys)
51 # (we have to manually save these guys)
51 for i in xrange(startrev, count):
52 for i in xrange(startrev, count):
52 node = revlog.node(i)
53 node = revlog.node(i)
53 lrev = revlog.linkrev(i)
54 lrev = revlog.linkrev(i)
54 if lrev < link:
55 if lrev < link:
55 extra.append((node, cl.node(lrev)))
56 extra.append((node, cl.node(lrev)))
56
57
57 return extra
58 return extra
58
59
59 extranodes = {}
60 extranodes = {}
60 cl = repo.changelog
61 cl = repo.changelog
61 extra = collectone(repo.manifest)
62 extra = collectone(repo.manifest)
62 if extra:
63 if extra:
63 extranodes[1] = extra
64 extranodes[1] = extra
64 for fname in files:
65 for fname in files:
65 f = repo.file(fname)
66 f = repo.file(fname)
66 extra = collectone(f)
67 extra = collectone(f)
67 if extra:
68 if extra:
68 extranodes[fname] = extra
69 extranodes[fname] = extra
69
70
70 return extranodes
71 return extranodes
71
72
72 def strip(ui, repo, node, backup="all"):
73 def strip(ui, repo, node, backup="all"):
73 cl = repo.changelog
74 cl = repo.changelog
74 # TODO delete the undo files, and handle undo of merge sets
75 # TODO delete the undo files, and handle undo of merge sets
75 striprev = cl.rev(node)
76 striprev = cl.rev(node)
76
77
77 # Some revisions with rev > striprev may not be descendants of striprev.
78 # Some revisions with rev > striprev may not be descendants of striprev.
78 # We have to find these revisions and put them in a bundle, so that
79 # We have to find these revisions and put them in a bundle, so that
79 # we can restore them after the truncations.
80 # we can restore them after the truncations.
80 # To create the bundle we use repo.changegroupsubset which requires
81 # To create the bundle we use repo.changegroupsubset which requires
81 # the list of heads and bases of the set of interesting revisions.
82 # the list of heads and bases of the set of interesting revisions.
82 # (head = revision in the set that has no descendant in the set;
83 # (head = revision in the set that has no descendant in the set;
83 # base = revision in the set that has no ancestor in the set)
84 # base = revision in the set that has no ancestor in the set)
84 tostrip = {striprev: 1}
85 tostrip = {striprev: 1}
85 saveheads = {}
86 saveheads = {}
86 savebases = []
87 savebases = []
87 for r in xrange(striprev + 1, len(cl)):
88 for r in xrange(striprev + 1, len(cl)):
88 parents = cl.parentrevs(r)
89 parents = cl.parentrevs(r)
89 if parents[0] in tostrip or parents[1] in tostrip:
90 if parents[0] in tostrip or parents[1] in tostrip:
90 # r is a descendant of striprev
91 # r is a descendant of striprev
91 tostrip[r] = 1
92 tostrip[r] = 1
92 # if this is a merge and one of the parents does not descend
93 # if this is a merge and one of the parents does not descend
93 # from striprev, mark that parent as a savehead.
94 # from striprev, mark that parent as a savehead.
94 if parents[1] != nullrev:
95 if parents[1] != nullrev:
95 for p in parents:
96 for p in parents:
96 if p not in tostrip and p > striprev:
97 if p not in tostrip and p > striprev:
97 saveheads[p] = 1
98 saveheads[p] = 1
98 else:
99 else:
99 # if no parents of this revision will be stripped, mark it as
100 # if no parents of this revision will be stripped, mark it as
100 # a savebase
101 # a savebase
101 if parents[0] < striprev and parents[1] < striprev:
102 if parents[0] < striprev and parents[1] < striprev:
102 savebases.append(cl.node(r))
103 savebases.append(cl.node(r))
103
104
104 for p in parents:
105 for p in parents:
105 if p in saveheads:
106 if p in saveheads:
106 del saveheads[p]
107 del saveheads[p]
107 saveheads[r] = 1
108 saveheads[r] = 1
108
109
109 saveheads = [cl.node(r) for r in saveheads]
110 saveheads = [cl.node(r) for r in saveheads]
110 files = _collectfiles(repo, striprev)
111 files = _collectfiles(repo, striprev)
111
112
112 extranodes = _collectextranodes(repo, files, striprev)
113 extranodes = _collectextranodes(repo, files, striprev)
113
114
114 # create a changegroup for all the branches we need to keep
115 # create a changegroup for all the branches we need to keep
115 if backup == "all":
116 if backup == "all":
116 _bundle(repo, [node], cl.heads(), node, 'backup')
117 _bundle(repo, [node], cl.heads(), node, 'backup')
117 if saveheads or extranodes:
118 if saveheads or extranodes:
118 chgrpfile = _bundle(repo, savebases, saveheads, node, 'temp',
119 chgrpfile = _bundle(repo, savebases, saveheads, node, 'temp',
119 extranodes)
120 extranodes)
120
121
121 fs = [repo.file(name) for name in files]
122 fs = [repo.file(name) for name in files]
122 mfst = repo.manifest
123 mfst = repo.manifest
123
124
124 tr = repo.transaction()
125 tr = repo.transaction()
125 offset = len(tr.entries)
126 offset = len(tr.entries)
126
127
127 cl.strip(striprev, tr)
128 cl.strip(striprev, tr)
128 mfst.strip(striprev, tr)
129 mfst.strip(striprev, tr)
129 for f in fs:
130 for f in fs:
130 f.strip(striprev, tr)
131 f.strip(striprev, tr)
131
132
132 try:
133 try:
133 for i in xrange(offset, len(tr.entries)):
134 for i in xrange(offset, len(tr.entries)):
134 file, troffset, ignore = tr.entries[i]
135 file, troffset, ignore = tr.entries[i]
135 repo.sopener(file, 'a').truncate(troffset)
136 repo.sopener(file, 'a').truncate(troffset)
136 tr.close()
137 tr.close()
137 except:
138 except:
138 tr.abort()
139 tr.abort()
139 raise
140 raise
140
141
141 if saveheads or extranodes:
142 if saveheads or extranodes:
142 ui.status(_("adding branch\n"))
143 ui.status(_("adding branch\n"))
143 f = open(chgrpfile, "rb")
144 f = open(chgrpfile, "rb")
144 gen = changegroup.readbundle(f, chgrpfile)
145 gen = changegroup.readbundle(f, chgrpfile)
145 repo.addchangegroup(gen, 'strip', 'bundle:' + chgrpfile, True)
146 repo.addchangegroup(gen, 'strip', 'bundle:' + chgrpfile, True)
146 f.close()
147 f.close()
147 if backup != "strip":
148 if backup != "strip":
148 os.unlink(chgrpfile)
149 os.unlink(chgrpfile)
149
150
@@ -1,1370 +1,1370 b''
1 # revlog.py - storage back-end for mercurial
1 # revlog.py - storage back-end for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 """Storage back-end for Mercurial.
8 """Storage back-end for Mercurial.
9
9
10 This provides efficient delta storage with O(1) retrieve and append
10 This provides efficient delta storage with O(1) retrieve and append
11 and O(changes) merge between branches.
11 and O(changes) merge between branches.
12 """
12 """
13
13
14 # import stuff from node for others to import from revlog
14 # import stuff from node for others to import from revlog
15 from node import bin, hex, nullid, nullrev, short #@UnusedImport
15 from node import bin, hex, nullid, nullrev, short #@UnusedImport
16 from i18n import _
16 from i18n import _
17 import changegroup, errno, ancestor, mdiff, parsers
17 import changegroup, ancestor, mdiff, parsers, error, util
18 import struct, util, zlib, error
18 import struct, zlib, errno
19
19
20 _pack = struct.pack
20 _pack = struct.pack
21 _unpack = struct.unpack
21 _unpack = struct.unpack
22 _compress = zlib.compress
22 _compress = zlib.compress
23 _decompress = zlib.decompress
23 _decompress = zlib.decompress
24 _sha = util.sha1
24 _sha = util.sha1
25
25
26 # revlog flags
26 # revlog flags
27 REVLOGV0 = 0
27 REVLOGV0 = 0
28 REVLOGNG = 1
28 REVLOGNG = 1
29 REVLOGNGINLINEDATA = (1 << 16)
29 REVLOGNGINLINEDATA = (1 << 16)
30 REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
30 REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
31 REVLOG_DEFAULT_FORMAT = REVLOGNG
31 REVLOG_DEFAULT_FORMAT = REVLOGNG
32 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
32 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
33
33
34 RevlogError = error.RevlogError
34 RevlogError = error.RevlogError
35 LookupError = error.LookupError
35 LookupError = error.LookupError
36
36
37 def getoffset(q):
37 def getoffset(q):
38 return int(q >> 16)
38 return int(q >> 16)
39
39
40 def gettype(q):
40 def gettype(q):
41 return int(q & 0xFFFF)
41 return int(q & 0xFFFF)
42
42
43 def offset_type(offset, type):
43 def offset_type(offset, type):
44 return long(long(offset) << 16 | type)
44 return long(long(offset) << 16 | type)
45
45
46 nullhash = _sha(nullid)
46 nullhash = _sha(nullid)
47
47
48 def hash(text, p1, p2):
48 def hash(text, p1, p2):
49 """generate a hash from the given text and its parent hashes
49 """generate a hash from the given text and its parent hashes
50
50
51 This hash combines both the current file contents and its history
51 This hash combines both the current file contents and its history
52 in a manner that makes it easy to distinguish nodes with the same
52 in a manner that makes it easy to distinguish nodes with the same
53 content in the revision graph.
53 content in the revision graph.
54 """
54 """
55 # As of now, if one of the parent node is null, p2 is null
55 # As of now, if one of the parent node is null, p2 is null
56 if p2 == nullid:
56 if p2 == nullid:
57 # deep copy of a hash is faster than creating one
57 # deep copy of a hash is faster than creating one
58 s = nullhash.copy()
58 s = nullhash.copy()
59 s.update(p1)
59 s.update(p1)
60 else:
60 else:
61 # none of the parent nodes are nullid
61 # none of the parent nodes are nullid
62 l = [p1, p2]
62 l = [p1, p2]
63 l.sort()
63 l.sort()
64 s = _sha(l[0])
64 s = _sha(l[0])
65 s.update(l[1])
65 s.update(l[1])
66 s.update(text)
66 s.update(text)
67 return s.digest()
67 return s.digest()
68
68
69 def compress(text):
69 def compress(text):
70 """ generate a possibly-compressed representation of text """
70 """ generate a possibly-compressed representation of text """
71 if not text:
71 if not text:
72 return ("", text)
72 return ("", text)
73 l = len(text)
73 l = len(text)
74 bin = None
74 bin = None
75 if l < 44:
75 if l < 44:
76 pass
76 pass
77 elif l > 1000000:
77 elif l > 1000000:
78 # zlib makes an internal copy, thus doubling memory usage for
78 # zlib makes an internal copy, thus doubling memory usage for
79 # large files, so lets do this in pieces
79 # large files, so lets do this in pieces
80 z = zlib.compressobj()
80 z = zlib.compressobj()
81 p = []
81 p = []
82 pos = 0
82 pos = 0
83 while pos < l:
83 while pos < l:
84 pos2 = pos + 2**20
84 pos2 = pos + 2**20
85 p.append(z.compress(text[pos:pos2]))
85 p.append(z.compress(text[pos:pos2]))
86 pos = pos2
86 pos = pos2
87 p.append(z.flush())
87 p.append(z.flush())
88 if sum(map(len, p)) < l:
88 if sum(map(len, p)) < l:
89 bin = "".join(p)
89 bin = "".join(p)
90 else:
90 else:
91 bin = _compress(text)
91 bin = _compress(text)
92 if bin is None or len(bin) > l:
92 if bin is None or len(bin) > l:
93 if text[0] == '\0':
93 if text[0] == '\0':
94 return ("", text)
94 return ("", text)
95 return ('u', text)
95 return ('u', text)
96 return ("", bin)
96 return ("", bin)
97
97
98 def decompress(bin):
98 def decompress(bin):
99 """ decompress the given input """
99 """ decompress the given input """
100 if not bin:
100 if not bin:
101 return bin
101 return bin
102 t = bin[0]
102 t = bin[0]
103 if t == '\0':
103 if t == '\0':
104 return bin
104 return bin
105 if t == 'x':
105 if t == 'x':
106 return _decompress(bin)
106 return _decompress(bin)
107 if t == 'u':
107 if t == 'u':
108 return bin[1:]
108 return bin[1:]
109 raise RevlogError(_("unknown compression type %r") % t)
109 raise RevlogError(_("unknown compression type %r") % t)
110
110
111 class lazyparser(object):
111 class lazyparser(object):
112 """
112 """
113 this class avoids the need to parse the entirety of large indices
113 this class avoids the need to parse the entirety of large indices
114 """
114 """
115
115
116 # lazyparser is not safe to use on windows if win32 extensions not
116 # lazyparser is not safe to use on windows if win32 extensions not
117 # available. it keeps file handle open, which make it not possible
117 # available. it keeps file handle open, which make it not possible
118 # to break hardlinks on local cloned repos.
118 # to break hardlinks on local cloned repos.
119
119
120 def __init__(self, dataf, size):
120 def __init__(self, dataf, size):
121 self.dataf = dataf
121 self.dataf = dataf
122 self.s = struct.calcsize(indexformatng)
122 self.s = struct.calcsize(indexformatng)
123 self.datasize = size
123 self.datasize = size
124 self.l = size/self.s
124 self.l = size/self.s
125 self.index = [None] * self.l
125 self.index = [None] * self.l
126 self.map = {nullid: nullrev}
126 self.map = {nullid: nullrev}
127 self.allmap = 0
127 self.allmap = 0
128 self.all = 0
128 self.all = 0
129 self.mapfind_count = 0
129 self.mapfind_count = 0
130
130
131 def loadmap(self):
131 def loadmap(self):
132 """
132 """
133 during a commit, we need to make sure the rev being added is
133 during a commit, we need to make sure the rev being added is
134 not a duplicate. This requires loading the entire index,
134 not a duplicate. This requires loading the entire index,
135 which is fairly slow. loadmap can load up just the node map,
135 which is fairly slow. loadmap can load up just the node map,
136 which takes much less time.
136 which takes much less time.
137 """
137 """
138 if self.allmap:
138 if self.allmap:
139 return
139 return
140 end = self.datasize
140 end = self.datasize
141 self.allmap = 1
141 self.allmap = 1
142 cur = 0
142 cur = 0
143 count = 0
143 count = 0
144 blocksize = self.s * 256
144 blocksize = self.s * 256
145 self.dataf.seek(0)
145 self.dataf.seek(0)
146 while cur < end:
146 while cur < end:
147 data = self.dataf.read(blocksize)
147 data = self.dataf.read(blocksize)
148 off = 0
148 off = 0
149 for x in xrange(256):
149 for x in xrange(256):
150 n = data[off + ngshaoffset:off + ngshaoffset + 20]
150 n = data[off + ngshaoffset:off + ngshaoffset + 20]
151 self.map[n] = count
151 self.map[n] = count
152 count += 1
152 count += 1
153 if count >= self.l:
153 if count >= self.l:
154 break
154 break
155 off += self.s
155 off += self.s
156 cur += blocksize
156 cur += blocksize
157
157
158 def loadblock(self, blockstart, blocksize, data=None):
158 def loadblock(self, blockstart, blocksize, data=None):
159 if self.all:
159 if self.all:
160 return
160 return
161 if data is None:
161 if data is None:
162 self.dataf.seek(blockstart)
162 self.dataf.seek(blockstart)
163 if blockstart + blocksize > self.datasize:
163 if blockstart + blocksize > self.datasize:
164 # the revlog may have grown since we've started running,
164 # the revlog may have grown since we've started running,
165 # but we don't have space in self.index for more entries.
165 # but we don't have space in self.index for more entries.
166 # limit blocksize so that we don't get too much data.
166 # limit blocksize so that we don't get too much data.
167 blocksize = max(self.datasize - blockstart, 0)
167 blocksize = max(self.datasize - blockstart, 0)
168 data = self.dataf.read(blocksize)
168 data = self.dataf.read(blocksize)
169 lend = len(data) / self.s
169 lend = len(data) / self.s
170 i = blockstart / self.s
170 i = blockstart / self.s
171 off = 0
171 off = 0
172 # lazyindex supports __delitem__
172 # lazyindex supports __delitem__
173 if lend > len(self.index) - i:
173 if lend > len(self.index) - i:
174 lend = len(self.index) - i
174 lend = len(self.index) - i
175 for x in xrange(lend):
175 for x in xrange(lend):
176 if self.index[i + x] == None:
176 if self.index[i + x] == None:
177 b = data[off : off + self.s]
177 b = data[off : off + self.s]
178 self.index[i + x] = b
178 self.index[i + x] = b
179 n = b[ngshaoffset:ngshaoffset + 20]
179 n = b[ngshaoffset:ngshaoffset + 20]
180 self.map[n] = i + x
180 self.map[n] = i + x
181 off += self.s
181 off += self.s
182
182
183 def findnode(self, node):
183 def findnode(self, node):
184 """search backwards through the index file for a specific node"""
184 """search backwards through the index file for a specific node"""
185 if self.allmap:
185 if self.allmap:
186 return None
186 return None
187
187
188 # hg log will cause many many searches for the manifest
188 # hg log will cause many many searches for the manifest
189 # nodes. After we get called a few times, just load the whole
189 # nodes. After we get called a few times, just load the whole
190 # thing.
190 # thing.
191 if self.mapfind_count > 8:
191 if self.mapfind_count > 8:
192 self.loadmap()
192 self.loadmap()
193 if node in self.map:
193 if node in self.map:
194 return node
194 return node
195 return None
195 return None
196 self.mapfind_count += 1
196 self.mapfind_count += 1
197 last = self.l - 1
197 last = self.l - 1
198 while self.index[last] != None:
198 while self.index[last] != None:
199 if last == 0:
199 if last == 0:
200 self.all = 1
200 self.all = 1
201 self.allmap = 1
201 self.allmap = 1
202 return None
202 return None
203 last -= 1
203 last -= 1
204 end = (last + 1) * self.s
204 end = (last + 1) * self.s
205 blocksize = self.s * 256
205 blocksize = self.s * 256
206 while end >= 0:
206 while end >= 0:
207 start = max(end - blocksize, 0)
207 start = max(end - blocksize, 0)
208 self.dataf.seek(start)
208 self.dataf.seek(start)
209 data = self.dataf.read(end - start)
209 data = self.dataf.read(end - start)
210 findend = end - start
210 findend = end - start
211 while True:
211 while True:
212 # we're searching backwards, so we have to make sure
212 # we're searching backwards, so we have to make sure
213 # we don't find a changeset where this node is a parent
213 # we don't find a changeset where this node is a parent
214 off = data.find(node, 0, findend)
214 off = data.find(node, 0, findend)
215 findend = off
215 findend = off
216 if off >= 0:
216 if off >= 0:
217 i = off / self.s
217 i = off / self.s
218 off = i * self.s
218 off = i * self.s
219 n = data[off + ngshaoffset:off + ngshaoffset + 20]
219 n = data[off + ngshaoffset:off + ngshaoffset + 20]
220 if n == node:
220 if n == node:
221 self.map[n] = i + start / self.s
221 self.map[n] = i + start / self.s
222 return node
222 return node
223 else:
223 else:
224 break
224 break
225 end -= blocksize
225 end -= blocksize
226 return None
226 return None
227
227
228 def loadindex(self, i=None, end=None):
228 def loadindex(self, i=None, end=None):
229 if self.all:
229 if self.all:
230 return
230 return
231 all = False
231 all = False
232 if i == None:
232 if i == None:
233 blockstart = 0
233 blockstart = 0
234 blocksize = (65536 / self.s) * self.s
234 blocksize = (65536 / self.s) * self.s
235 end = self.datasize
235 end = self.datasize
236 all = True
236 all = True
237 else:
237 else:
238 if end:
238 if end:
239 blockstart = i * self.s
239 blockstart = i * self.s
240 end = end * self.s
240 end = end * self.s
241 blocksize = end - blockstart
241 blocksize = end - blockstart
242 else:
242 else:
243 blockstart = (i & ~1023) * self.s
243 blockstart = (i & ~1023) * self.s
244 blocksize = self.s * 1024
244 blocksize = self.s * 1024
245 end = blockstart + blocksize
245 end = blockstart + blocksize
246 while blockstart < end:
246 while blockstart < end:
247 self.loadblock(blockstart, blocksize)
247 self.loadblock(blockstart, blocksize)
248 blockstart += blocksize
248 blockstart += blocksize
249 if all:
249 if all:
250 self.all = True
250 self.all = True
251
251
252 class lazyindex(object):
252 class lazyindex(object):
253 """a lazy version of the index array"""
253 """a lazy version of the index array"""
254 def __init__(self, parser):
254 def __init__(self, parser):
255 self.p = parser
255 self.p = parser
256 def __len__(self):
256 def __len__(self):
257 return len(self.p.index)
257 return len(self.p.index)
258 def load(self, pos):
258 def load(self, pos):
259 if pos < 0:
259 if pos < 0:
260 pos += len(self.p.index)
260 pos += len(self.p.index)
261 self.p.loadindex(pos)
261 self.p.loadindex(pos)
262 return self.p.index[pos]
262 return self.p.index[pos]
263 def __getitem__(self, pos):
263 def __getitem__(self, pos):
264 return _unpack(indexformatng, self.p.index[pos] or self.load(pos))
264 return _unpack(indexformatng, self.p.index[pos] or self.load(pos))
265 def __setitem__(self, pos, item):
265 def __setitem__(self, pos, item):
266 self.p.index[pos] = _pack(indexformatng, *item)
266 self.p.index[pos] = _pack(indexformatng, *item)
267 def __delitem__(self, pos):
267 def __delitem__(self, pos):
268 del self.p.index[pos]
268 del self.p.index[pos]
269 def insert(self, pos, e):
269 def insert(self, pos, e):
270 self.p.index.insert(pos, _pack(indexformatng, *e))
270 self.p.index.insert(pos, _pack(indexformatng, *e))
271 def append(self, e):
271 def append(self, e):
272 self.p.index.append(_pack(indexformatng, *e))
272 self.p.index.append(_pack(indexformatng, *e))
273
273
274 class lazymap(object):
274 class lazymap(object):
275 """a lazy version of the node map"""
275 """a lazy version of the node map"""
276 def __init__(self, parser):
276 def __init__(self, parser):
277 self.p = parser
277 self.p = parser
278 def load(self, key):
278 def load(self, key):
279 n = self.p.findnode(key)
279 n = self.p.findnode(key)
280 if n == None:
280 if n == None:
281 raise KeyError(key)
281 raise KeyError(key)
282 def __contains__(self, key):
282 def __contains__(self, key):
283 if key in self.p.map:
283 if key in self.p.map:
284 return True
284 return True
285 self.p.loadmap()
285 self.p.loadmap()
286 return key in self.p.map
286 return key in self.p.map
287 def __iter__(self):
287 def __iter__(self):
288 yield nullid
288 yield nullid
289 for i in xrange(self.p.l):
289 for i in xrange(self.p.l):
290 ret = self.p.index[i]
290 ret = self.p.index[i]
291 if not ret:
291 if not ret:
292 self.p.loadindex(i)
292 self.p.loadindex(i)
293 ret = self.p.index[i]
293 ret = self.p.index[i]
294 if isinstance(ret, str):
294 if isinstance(ret, str):
295 ret = _unpack(indexformatng, ret)
295 ret = _unpack(indexformatng, ret)
296 yield ret[7]
296 yield ret[7]
297 def __getitem__(self, key):
297 def __getitem__(self, key):
298 try:
298 try:
299 return self.p.map[key]
299 return self.p.map[key]
300 except KeyError:
300 except KeyError:
301 try:
301 try:
302 self.load(key)
302 self.load(key)
303 return self.p.map[key]
303 return self.p.map[key]
304 except KeyError:
304 except KeyError:
305 raise KeyError("node " + hex(key))
305 raise KeyError("node " + hex(key))
306 def __setitem__(self, key, val):
306 def __setitem__(self, key, val):
307 self.p.map[key] = val
307 self.p.map[key] = val
308 def __delitem__(self, key):
308 def __delitem__(self, key):
309 del self.p.map[key]
309 del self.p.map[key]
310
310
311 indexformatv0 = ">4l20s20s20s"
311 indexformatv0 = ">4l20s20s20s"
312 v0shaoffset = 56
312 v0shaoffset = 56
313
313
314 class revlogoldio(object):
314 class revlogoldio(object):
315 def __init__(self):
315 def __init__(self):
316 self.size = struct.calcsize(indexformatv0)
316 self.size = struct.calcsize(indexformatv0)
317
317
318 def parseindex(self, fp, inline):
318 def parseindex(self, fp, inline):
319 s = self.size
319 s = self.size
320 index = []
320 index = []
321 nodemap = {nullid: nullrev}
321 nodemap = {nullid: nullrev}
322 n = off = 0
322 n = off = 0
323 data = fp.read()
323 data = fp.read()
324 l = len(data)
324 l = len(data)
325 while off + s <= l:
325 while off + s <= l:
326 cur = data[off:off + s]
326 cur = data[off:off + s]
327 off += s
327 off += s
328 e = _unpack(indexformatv0, cur)
328 e = _unpack(indexformatv0, cur)
329 # transform to revlogv1 format
329 # transform to revlogv1 format
330 e2 = (offset_type(e[0], 0), e[1], -1, e[2], e[3],
330 e2 = (offset_type(e[0], 0), e[1], -1, e[2], e[3],
331 nodemap.get(e[4], nullrev), nodemap.get(e[5], nullrev), e[6])
331 nodemap.get(e[4], nullrev), nodemap.get(e[5], nullrev), e[6])
332 index.append(e2)
332 index.append(e2)
333 nodemap[e[6]] = n
333 nodemap[e[6]] = n
334 n += 1
334 n += 1
335
335
336 return index, nodemap, None
336 return index, nodemap, None
337
337
338 def packentry(self, entry, node, version, rev):
338 def packentry(self, entry, node, version, rev):
339 e2 = (getoffset(entry[0]), entry[1], entry[3], entry[4],
339 e2 = (getoffset(entry[0]), entry[1], entry[3], entry[4],
340 node(entry[5]), node(entry[6]), entry[7])
340 node(entry[5]), node(entry[6]), entry[7])
341 return _pack(indexformatv0, *e2)
341 return _pack(indexformatv0, *e2)
342
342
343 # index ng:
343 # index ng:
344 # 6 bytes offset
344 # 6 bytes offset
345 # 2 bytes flags
345 # 2 bytes flags
346 # 4 bytes compressed length
346 # 4 bytes compressed length
347 # 4 bytes uncompressed length
347 # 4 bytes uncompressed length
348 # 4 bytes: base rev
348 # 4 bytes: base rev
349 # 4 bytes link rev
349 # 4 bytes link rev
350 # 4 bytes parent 1 rev
350 # 4 bytes parent 1 rev
351 # 4 bytes parent 2 rev
351 # 4 bytes parent 2 rev
352 # 32 bytes: nodeid
352 # 32 bytes: nodeid
353 indexformatng = ">Qiiiiii20s12x"
353 indexformatng = ">Qiiiiii20s12x"
354 ngshaoffset = 32
354 ngshaoffset = 32
355 versionformat = ">I"
355 versionformat = ">I"
356
356
357 class revlogio(object):
357 class revlogio(object):
358 def __init__(self):
358 def __init__(self):
359 self.size = struct.calcsize(indexformatng)
359 self.size = struct.calcsize(indexformatng)
360
360
361 def parseindex(self, fp, inline):
361 def parseindex(self, fp, inline):
362 try:
362 try:
363 size = util.fstat(fp).st_size
363 size = util.fstat(fp).st_size
364 except AttributeError:
364 except AttributeError:
365 size = 0
365 size = 0
366
366
367 if util.openhardlinks() and not inline and size > 1000000:
367 if util.openhardlinks() and not inline and size > 1000000:
368 # big index, let's parse it on demand
368 # big index, let's parse it on demand
369 parser = lazyparser(fp, size)
369 parser = lazyparser(fp, size)
370 index = lazyindex(parser)
370 index = lazyindex(parser)
371 nodemap = lazymap(parser)
371 nodemap = lazymap(parser)
372 e = list(index[0])
372 e = list(index[0])
373 type = gettype(e[0])
373 type = gettype(e[0])
374 e[0] = offset_type(0, type)
374 e[0] = offset_type(0, type)
375 index[0] = e
375 index[0] = e
376 return index, nodemap, None
376 return index, nodemap, None
377
377
378 data = fp.read()
378 data = fp.read()
379 # call the C implementation to parse the index data
379 # call the C implementation to parse the index data
380 index, nodemap, cache = parsers.parse_index(data, inline)
380 index, nodemap, cache = parsers.parse_index(data, inline)
381 return index, nodemap, cache
381 return index, nodemap, cache
382
382
383 def packentry(self, entry, node, version, rev):
383 def packentry(self, entry, node, version, rev):
384 p = _pack(indexformatng, *entry)
384 p = _pack(indexformatng, *entry)
385 if rev == 0:
385 if rev == 0:
386 p = _pack(versionformat, version) + p[4:]
386 p = _pack(versionformat, version) + p[4:]
387 return p
387 return p
388
388
389 class revlog(object):
389 class revlog(object):
390 """
390 """
391 the underlying revision storage object
391 the underlying revision storage object
392
392
393 A revlog consists of two parts, an index and the revision data.
393 A revlog consists of two parts, an index and the revision data.
394
394
395 The index is a file with a fixed record size containing
395 The index is a file with a fixed record size containing
396 information on each revision, including its nodeid (hash), the
396 information on each revision, including its nodeid (hash), the
397 nodeids of its parents, the position and offset of its data within
397 nodeids of its parents, the position and offset of its data within
398 the data file, and the revision it's based on. Finally, each entry
398 the data file, and the revision it's based on. Finally, each entry
399 contains a linkrev entry that can serve as a pointer to external
399 contains a linkrev entry that can serve as a pointer to external
400 data.
400 data.
401
401
402 The revision data itself is a linear collection of data chunks.
402 The revision data itself is a linear collection of data chunks.
403 Each chunk represents a revision and is usually represented as a
403 Each chunk represents a revision and is usually represented as a
404 delta against the previous chunk. To bound lookup time, runs of
404 delta against the previous chunk. To bound lookup time, runs of
405 deltas are limited to about 2 times the length of the original
405 deltas are limited to about 2 times the length of the original
406 version data. This makes retrieval of a version proportional to
406 version data. This makes retrieval of a version proportional to
407 its size, or O(1) relative to the number of revisions.
407 its size, or O(1) relative to the number of revisions.
408
408
409 Both pieces of the revlog are written to in an append-only
409 Both pieces of the revlog are written to in an append-only
410 fashion, which means we never need to rewrite a file to insert or
410 fashion, which means we never need to rewrite a file to insert or
411 remove data, and can use some simple techniques to avoid the need
411 remove data, and can use some simple techniques to avoid the need
412 for locking while reading.
412 for locking while reading.
413 """
413 """
414 def __init__(self, opener, indexfile):
414 def __init__(self, opener, indexfile):
415 """
415 """
416 create a revlog object
416 create a revlog object
417
417
418 opener is a function that abstracts the file opening operation
418 opener is a function that abstracts the file opening operation
419 and can be used to implement COW semantics or the like.
419 and can be used to implement COW semantics or the like.
420 """
420 """
421 self.indexfile = indexfile
421 self.indexfile = indexfile
422 self.datafile = indexfile[:-2] + ".d"
422 self.datafile = indexfile[:-2] + ".d"
423 self.opener = opener
423 self.opener = opener
424 self._cache = None
424 self._cache = None
425 self._chunkcache = None
425 self._chunkcache = None
426 self.nodemap = {nullid: nullrev}
426 self.nodemap = {nullid: nullrev}
427 self.index = []
427 self.index = []
428
428
429 v = REVLOG_DEFAULT_VERSION
429 v = REVLOG_DEFAULT_VERSION
430 if hasattr(opener, "defversion"):
430 if hasattr(opener, "defversion"):
431 v = opener.defversion
431 v = opener.defversion
432 if v & REVLOGNG:
432 if v & REVLOGNG:
433 v |= REVLOGNGINLINEDATA
433 v |= REVLOGNGINLINEDATA
434
434
435 i = ""
435 i = ""
436 try:
436 try:
437 f = self.opener(self.indexfile)
437 f = self.opener(self.indexfile)
438 i = f.read(4)
438 i = f.read(4)
439 f.seek(0)
439 f.seek(0)
440 if len(i) > 0:
440 if len(i) > 0:
441 v = struct.unpack(versionformat, i)[0]
441 v = struct.unpack(versionformat, i)[0]
442 except IOError, inst:
442 except IOError, inst:
443 if inst.errno != errno.ENOENT:
443 if inst.errno != errno.ENOENT:
444 raise
444 raise
445
445
446 self.version = v
446 self.version = v
447 self._inline = v & REVLOGNGINLINEDATA
447 self._inline = v & REVLOGNGINLINEDATA
448 flags = v & ~0xFFFF
448 flags = v & ~0xFFFF
449 fmt = v & 0xFFFF
449 fmt = v & 0xFFFF
450 if fmt == REVLOGV0 and flags:
450 if fmt == REVLOGV0 and flags:
451 raise RevlogError(_("index %s unknown flags %#04x for format v0")
451 raise RevlogError(_("index %s unknown flags %#04x for format v0")
452 % (self.indexfile, flags >> 16))
452 % (self.indexfile, flags >> 16))
453 elif fmt == REVLOGNG and flags & ~REVLOGNGINLINEDATA:
453 elif fmt == REVLOGNG and flags & ~REVLOGNGINLINEDATA:
454 raise RevlogError(_("index %s unknown flags %#04x for revlogng")
454 raise RevlogError(_("index %s unknown flags %#04x for revlogng")
455 % (self.indexfile, flags >> 16))
455 % (self.indexfile, flags >> 16))
456 elif fmt > REVLOGNG:
456 elif fmt > REVLOGNG:
457 raise RevlogError(_("index %s unknown format %d")
457 raise RevlogError(_("index %s unknown format %d")
458 % (self.indexfile, fmt))
458 % (self.indexfile, fmt))
459
459
460 self._io = revlogio()
460 self._io = revlogio()
461 if self.version == REVLOGV0:
461 if self.version == REVLOGV0:
462 self._io = revlogoldio()
462 self._io = revlogoldio()
463 if i:
463 if i:
464 try:
464 try:
465 d = self._io.parseindex(f, self._inline)
465 d = self._io.parseindex(f, self._inline)
466 except (ValueError, IndexError), e:
466 except (ValueError, IndexError), e:
467 raise RevlogError(_("index %s is corrupted") % (self.indexfile))
467 raise RevlogError(_("index %s is corrupted") % (self.indexfile))
468 self.index, self.nodemap, self._chunkcache = d
468 self.index, self.nodemap, self._chunkcache = d
469
469
470 # add the magic null revision at -1 (if it hasn't been done already)
470 # add the magic null revision at -1 (if it hasn't been done already)
471 if (self.index == [] or isinstance(self.index, lazyindex) or
471 if (self.index == [] or isinstance(self.index, lazyindex) or
472 self.index[-1][7] != nullid) :
472 self.index[-1][7] != nullid) :
473 self.index.append((0, 0, 0, -1, -1, -1, -1, nullid))
473 self.index.append((0, 0, 0, -1, -1, -1, -1, nullid))
474
474
475 def _loadindex(self, start, end):
475 def _loadindex(self, start, end):
476 """load a block of indexes all at once from the lazy parser"""
476 """load a block of indexes all at once from the lazy parser"""
477 if isinstance(self.index, lazyindex):
477 if isinstance(self.index, lazyindex):
478 self.index.p.loadindex(start, end)
478 self.index.p.loadindex(start, end)
479
479
480 def _loadindexmap(self):
480 def _loadindexmap(self):
481 """loads both the map and the index from the lazy parser"""
481 """loads both the map and the index from the lazy parser"""
482 if isinstance(self.index, lazyindex):
482 if isinstance(self.index, lazyindex):
483 p = self.index.p
483 p = self.index.p
484 p.loadindex()
484 p.loadindex()
485 self.nodemap = p.map
485 self.nodemap = p.map
486
486
487 def _loadmap(self):
487 def _loadmap(self):
488 """loads the map from the lazy parser"""
488 """loads the map from the lazy parser"""
489 if isinstance(self.nodemap, lazymap):
489 if isinstance(self.nodemap, lazymap):
490 self.nodemap.p.loadmap()
490 self.nodemap.p.loadmap()
491 self.nodemap = self.nodemap.p.map
491 self.nodemap = self.nodemap.p.map
492
492
493 def tip(self):
493 def tip(self):
494 return self.node(len(self.index) - 2)
494 return self.node(len(self.index) - 2)
495 def __len__(self):
495 def __len__(self):
496 return len(self.index) - 1
496 return len(self.index) - 1
497 def __iter__(self):
497 def __iter__(self):
498 for i in xrange(len(self)):
498 for i in xrange(len(self)):
499 yield i
499 yield i
500 def rev(self, node):
500 def rev(self, node):
501 try:
501 try:
502 return self.nodemap[node]
502 return self.nodemap[node]
503 except KeyError:
503 except KeyError:
504 raise LookupError(node, self.indexfile, _('no node'))
504 raise LookupError(node, self.indexfile, _('no node'))
505 def node(self, rev):
505 def node(self, rev):
506 return self.index[rev][7]
506 return self.index[rev][7]
507 def linkrev(self, rev):
507 def linkrev(self, rev):
508 return self.index[rev][4]
508 return self.index[rev][4]
509 def parents(self, node):
509 def parents(self, node):
510 i = self.index
510 i = self.index
511 d = i[self.rev(node)]
511 d = i[self.rev(node)]
512 return i[d[5]][7], i[d[6]][7] # map revisions to nodes inline
512 return i[d[5]][7], i[d[6]][7] # map revisions to nodes inline
513 def parentrevs(self, rev):
513 def parentrevs(self, rev):
514 return self.index[rev][5:7]
514 return self.index[rev][5:7]
515 def start(self, rev):
515 def start(self, rev):
516 return int(self.index[rev][0] >> 16)
516 return int(self.index[rev][0] >> 16)
517 def end(self, rev):
517 def end(self, rev):
518 return self.start(rev) + self.length(rev)
518 return self.start(rev) + self.length(rev)
519 def length(self, rev):
519 def length(self, rev):
520 return self.index[rev][1]
520 return self.index[rev][1]
521 def base(self, rev):
521 def base(self, rev):
522 return self.index[rev][3]
522 return self.index[rev][3]
523
523
524 def size(self, rev):
524 def size(self, rev):
525 """return the length of the uncompressed text for a given revision"""
525 """return the length of the uncompressed text for a given revision"""
526 l = self.index[rev][2]
526 l = self.index[rev][2]
527 if l >= 0:
527 if l >= 0:
528 return l
528 return l
529
529
530 t = self.revision(self.node(rev))
530 t = self.revision(self.node(rev))
531 return len(t)
531 return len(t)
532
532
533 # alternate implementation, The advantage to this code is it
533 # alternate implementation, The advantage to this code is it
534 # will be faster for a single revision. But, the results are not
534 # will be faster for a single revision. But, the results are not
535 # cached, so finding the size of every revision will be slower.
535 # cached, so finding the size of every revision will be slower.
536 """
536 """
537 if self.cache and self.cache[1] == rev:
537 if self.cache and self.cache[1] == rev:
538 return len(self.cache[2])
538 return len(self.cache[2])
539
539
540 base = self.base(rev)
540 base = self.base(rev)
541 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
541 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
542 base = self.cache[1]
542 base = self.cache[1]
543 text = self.cache[2]
543 text = self.cache[2]
544 else:
544 else:
545 text = self.revision(self.node(base))
545 text = self.revision(self.node(base))
546
546
547 l = len(text)
547 l = len(text)
548 for x in xrange(base + 1, rev + 1):
548 for x in xrange(base + 1, rev + 1):
549 l = mdiff.patchedsize(l, self.chunk(x))
549 l = mdiff.patchedsize(l, self.chunk(x))
550 return l
550 return l
551 """
551 """
552
552
553 def reachable(self, node, stop=None):
553 def reachable(self, node, stop=None):
554 """return a hash of all nodes ancestral to a given node, including
554 """return a hash of all nodes ancestral to a given node, including
555 the node itself, stopping when stop is matched"""
555 the node itself, stopping when stop is matched"""
556 reachable = {}
556 reachable = {}
557 visit = [node]
557 visit = [node]
558 reachable[node] = 1
558 reachable[node] = 1
559 if stop:
559 if stop:
560 stopn = self.rev(stop)
560 stopn = self.rev(stop)
561 else:
561 else:
562 stopn = 0
562 stopn = 0
563 while visit:
563 while visit:
564 n = visit.pop(0)
564 n = visit.pop(0)
565 if n == stop:
565 if n == stop:
566 continue
566 continue
567 if n == nullid:
567 if n == nullid:
568 continue
568 continue
569 for p in self.parents(n):
569 for p in self.parents(n):
570 if self.rev(p) < stopn:
570 if self.rev(p) < stopn:
571 continue
571 continue
572 if p not in reachable:
572 if p not in reachable:
573 reachable[p] = 1
573 reachable[p] = 1
574 visit.append(p)
574 visit.append(p)
575 return reachable
575 return reachable
576
576
577 def ancestors(self, *revs):
577 def ancestors(self, *revs):
578 'Generate the ancestors of revs using a breadth-first visit'
578 'Generate the ancestors of revs using a breadth-first visit'
579 visit = list(revs)
579 visit = list(revs)
580 seen = set([nullrev])
580 seen = set([nullrev])
581 while visit:
581 while visit:
582 for parent in self.parentrevs(visit.pop(0)):
582 for parent in self.parentrevs(visit.pop(0)):
583 if parent not in seen:
583 if parent not in seen:
584 visit.append(parent)
584 visit.append(parent)
585 seen.add(parent)
585 seen.add(parent)
586 yield parent
586 yield parent
587
587
588 def descendants(self, *revs):
588 def descendants(self, *revs):
589 'Generate the descendants of revs in topological order'
589 'Generate the descendants of revs in topological order'
590 seen = set(revs)
590 seen = set(revs)
591 for i in xrange(min(revs) + 1, len(self)):
591 for i in xrange(min(revs) + 1, len(self)):
592 for x in self.parentrevs(i):
592 for x in self.parentrevs(i):
593 if x != nullrev and x in seen:
593 if x != nullrev and x in seen:
594 seen.add(i)
594 seen.add(i)
595 yield i
595 yield i
596 break
596 break
597
597
598 def findmissing(self, common=None, heads=None):
598 def findmissing(self, common=None, heads=None):
599 '''
599 '''
600 returns the topologically sorted list of nodes from the set:
600 returns the topologically sorted list of nodes from the set:
601 missing = (ancestors(heads) \ ancestors(common))
601 missing = (ancestors(heads) \ ancestors(common))
602
602
603 where ancestors() is the set of ancestors from heads, heads included
603 where ancestors() is the set of ancestors from heads, heads included
604
604
605 if heads is None, the heads of the revlog are used
605 if heads is None, the heads of the revlog are used
606 if common is None, nullid is assumed to be a common node
606 if common is None, nullid is assumed to be a common node
607 '''
607 '''
608 if common is None:
608 if common is None:
609 common = [nullid]
609 common = [nullid]
610 if heads is None:
610 if heads is None:
611 heads = self.heads()
611 heads = self.heads()
612
612
613 common = [self.rev(n) for n in common]
613 common = [self.rev(n) for n in common]
614 heads = [self.rev(n) for n in heads]
614 heads = [self.rev(n) for n in heads]
615
615
616 # we want the ancestors, but inclusive
616 # we want the ancestors, but inclusive
617 has = set(self.ancestors(*common))
617 has = set(self.ancestors(*common))
618 has.add(nullrev)
618 has.add(nullrev)
619 has.update(common)
619 has.update(common)
620
620
621 # take all ancestors from heads that aren't in has
621 # take all ancestors from heads that aren't in has
622 missing = {}
622 missing = {}
623 visit = [r for r in heads if r not in has]
623 visit = [r for r in heads if r not in has]
624 while visit:
624 while visit:
625 r = visit.pop(0)
625 r = visit.pop(0)
626 if r in missing:
626 if r in missing:
627 continue
627 continue
628 else:
628 else:
629 missing[r] = None
629 missing[r] = None
630 for p in self.parentrevs(r):
630 for p in self.parentrevs(r):
631 if p not in has:
631 if p not in has:
632 visit.append(p)
632 visit.append(p)
633 missing = missing.keys()
633 missing = missing.keys()
634 missing.sort()
634 missing.sort()
635 return [self.node(r) for r in missing]
635 return [self.node(r) for r in missing]
636
636
637 def nodesbetween(self, roots=None, heads=None):
637 def nodesbetween(self, roots=None, heads=None):
638 """Return a tuple containing three elements. Elements 1 and 2 contain
638 """Return a tuple containing three elements. Elements 1 and 2 contain
639 a final list bases and heads after all the unreachable ones have been
639 a final list bases and heads after all the unreachable ones have been
640 pruned. Element 0 contains a topologically sorted list of all
640 pruned. Element 0 contains a topologically sorted list of all
641
641
642 nodes that satisfy these constraints:
642 nodes that satisfy these constraints:
643 1. All nodes must be descended from a node in roots (the nodes on
643 1. All nodes must be descended from a node in roots (the nodes on
644 roots are considered descended from themselves).
644 roots are considered descended from themselves).
645 2. All nodes must also be ancestors of a node in heads (the nodes in
645 2. All nodes must also be ancestors of a node in heads (the nodes in
646 heads are considered to be their own ancestors).
646 heads are considered to be their own ancestors).
647
647
648 If roots is unspecified, nullid is assumed as the only root.
648 If roots is unspecified, nullid is assumed as the only root.
649 If heads is unspecified, it is taken to be the output of the
649 If heads is unspecified, it is taken to be the output of the
650 heads method (i.e. a list of all nodes in the repository that
650 heads method (i.e. a list of all nodes in the repository that
651 have no children)."""
651 have no children)."""
652 nonodes = ([], [], [])
652 nonodes = ([], [], [])
653 if roots is not None:
653 if roots is not None:
654 roots = list(roots)
654 roots = list(roots)
655 if not roots:
655 if not roots:
656 return nonodes
656 return nonodes
657 lowestrev = min([self.rev(n) for n in roots])
657 lowestrev = min([self.rev(n) for n in roots])
658 else:
658 else:
659 roots = [nullid] # Everybody's a descendent of nullid
659 roots = [nullid] # Everybody's a descendent of nullid
660 lowestrev = nullrev
660 lowestrev = nullrev
661 if (lowestrev == nullrev) and (heads is None):
661 if (lowestrev == nullrev) and (heads is None):
662 # We want _all_ the nodes!
662 # We want _all_ the nodes!
663 return ([self.node(r) for r in self], [nullid], list(self.heads()))
663 return ([self.node(r) for r in self], [nullid], list(self.heads()))
664 if heads is None:
664 if heads is None:
665 # All nodes are ancestors, so the latest ancestor is the last
665 # All nodes are ancestors, so the latest ancestor is the last
666 # node.
666 # node.
667 highestrev = len(self) - 1
667 highestrev = len(self) - 1
668 # Set ancestors to None to signal that every node is an ancestor.
668 # Set ancestors to None to signal that every node is an ancestor.
669 ancestors = None
669 ancestors = None
670 # Set heads to an empty dictionary for later discovery of heads
670 # Set heads to an empty dictionary for later discovery of heads
671 heads = {}
671 heads = {}
672 else:
672 else:
673 heads = list(heads)
673 heads = list(heads)
674 if not heads:
674 if not heads:
675 return nonodes
675 return nonodes
676 ancestors = {}
676 ancestors = {}
677 # Turn heads into a dictionary so we can remove 'fake' heads.
677 # Turn heads into a dictionary so we can remove 'fake' heads.
678 # Also, later we will be using it to filter out the heads we can't
678 # Also, later we will be using it to filter out the heads we can't
679 # find from roots.
679 # find from roots.
680 heads = dict.fromkeys(heads, 0)
680 heads = dict.fromkeys(heads, 0)
681 # Start at the top and keep marking parents until we're done.
681 # Start at the top and keep marking parents until we're done.
682 nodestotag = set(heads)
682 nodestotag = set(heads)
683 # Remember where the top was so we can use it as a limit later.
683 # Remember where the top was so we can use it as a limit later.
684 highestrev = max([self.rev(n) for n in nodestotag])
684 highestrev = max([self.rev(n) for n in nodestotag])
685 while nodestotag:
685 while nodestotag:
686 # grab a node to tag
686 # grab a node to tag
687 n = nodestotag.pop()
687 n = nodestotag.pop()
688 # Never tag nullid
688 # Never tag nullid
689 if n == nullid:
689 if n == nullid:
690 continue
690 continue
691 # A node's revision number represents its place in a
691 # A node's revision number represents its place in a
692 # topologically sorted list of nodes.
692 # topologically sorted list of nodes.
693 r = self.rev(n)
693 r = self.rev(n)
694 if r >= lowestrev:
694 if r >= lowestrev:
695 if n not in ancestors:
695 if n not in ancestors:
696 # If we are possibly a descendent of one of the roots
696 # If we are possibly a descendent of one of the roots
697 # and we haven't already been marked as an ancestor
697 # and we haven't already been marked as an ancestor
698 ancestors[n] = 1 # Mark as ancestor
698 ancestors[n] = 1 # Mark as ancestor
699 # Add non-nullid parents to list of nodes to tag.
699 # Add non-nullid parents to list of nodes to tag.
700 nodestotag.update([p for p in self.parents(n) if
700 nodestotag.update([p for p in self.parents(n) if
701 p != nullid])
701 p != nullid])
702 elif n in heads: # We've seen it before, is it a fake head?
702 elif n in heads: # We've seen it before, is it a fake head?
703 # So it is, real heads should not be the ancestors of
703 # So it is, real heads should not be the ancestors of
704 # any other heads.
704 # any other heads.
705 heads.pop(n)
705 heads.pop(n)
706 if not ancestors:
706 if not ancestors:
707 return nonodes
707 return nonodes
708 # Now that we have our set of ancestors, we want to remove any
708 # Now that we have our set of ancestors, we want to remove any
709 # roots that are not ancestors.
709 # roots that are not ancestors.
710
710
711 # If one of the roots was nullid, everything is included anyway.
711 # If one of the roots was nullid, everything is included anyway.
712 if lowestrev > nullrev:
712 if lowestrev > nullrev:
713 # But, since we weren't, let's recompute the lowest rev to not
713 # But, since we weren't, let's recompute the lowest rev to not
714 # include roots that aren't ancestors.
714 # include roots that aren't ancestors.
715
715
716 # Filter out roots that aren't ancestors of heads
716 # Filter out roots that aren't ancestors of heads
717 roots = [n for n in roots if n in ancestors]
717 roots = [n for n in roots if n in ancestors]
718 # Recompute the lowest revision
718 # Recompute the lowest revision
719 if roots:
719 if roots:
720 lowestrev = min([self.rev(n) for n in roots])
720 lowestrev = min([self.rev(n) for n in roots])
721 else:
721 else:
722 # No more roots? Return empty list
722 # No more roots? Return empty list
723 return nonodes
723 return nonodes
724 else:
724 else:
725 # We are descending from nullid, and don't need to care about
725 # We are descending from nullid, and don't need to care about
726 # any other roots.
726 # any other roots.
727 lowestrev = nullrev
727 lowestrev = nullrev
728 roots = [nullid]
728 roots = [nullid]
729 # Transform our roots list into a set.
729 # Transform our roots list into a set.
730 descendents = set(roots)
730 descendents = set(roots)
731 # Also, keep the original roots so we can filter out roots that aren't
731 # Also, keep the original roots so we can filter out roots that aren't
732 # 'real' roots (i.e. are descended from other roots).
732 # 'real' roots (i.e. are descended from other roots).
733 roots = descendents.copy()
733 roots = descendents.copy()
734 # Our topologically sorted list of output nodes.
734 # Our topologically sorted list of output nodes.
735 orderedout = []
735 orderedout = []
736 # Don't start at nullid since we don't want nullid in our output list,
736 # Don't start at nullid since we don't want nullid in our output list,
737 # and if nullid shows up in descedents, empty parents will look like
737 # and if nullid shows up in descedents, empty parents will look like
738 # they're descendents.
738 # they're descendents.
739 for r in xrange(max(lowestrev, 0), highestrev + 1):
739 for r in xrange(max(lowestrev, 0), highestrev + 1):
740 n = self.node(r)
740 n = self.node(r)
741 isdescendent = False
741 isdescendent = False
742 if lowestrev == nullrev: # Everybody is a descendent of nullid
742 if lowestrev == nullrev: # Everybody is a descendent of nullid
743 isdescendent = True
743 isdescendent = True
744 elif n in descendents:
744 elif n in descendents:
745 # n is already a descendent
745 # n is already a descendent
746 isdescendent = True
746 isdescendent = True
747 # This check only needs to be done here because all the roots
747 # This check only needs to be done here because all the roots
748 # will start being marked is descendents before the loop.
748 # will start being marked is descendents before the loop.
749 if n in roots:
749 if n in roots:
750 # If n was a root, check if it's a 'real' root.
750 # If n was a root, check if it's a 'real' root.
751 p = tuple(self.parents(n))
751 p = tuple(self.parents(n))
752 # If any of its parents are descendents, it's not a root.
752 # If any of its parents are descendents, it's not a root.
753 if (p[0] in descendents) or (p[1] in descendents):
753 if (p[0] in descendents) or (p[1] in descendents):
754 roots.remove(n)
754 roots.remove(n)
755 else:
755 else:
756 p = tuple(self.parents(n))
756 p = tuple(self.parents(n))
757 # A node is a descendent if either of its parents are
757 # A node is a descendent if either of its parents are
758 # descendents. (We seeded the dependents list with the roots
758 # descendents. (We seeded the dependents list with the roots
759 # up there, remember?)
759 # up there, remember?)
760 if (p[0] in descendents) or (p[1] in descendents):
760 if (p[0] in descendents) or (p[1] in descendents):
761 descendents.add(n)
761 descendents.add(n)
762 isdescendent = True
762 isdescendent = True
763 if isdescendent and ((ancestors is None) or (n in ancestors)):
763 if isdescendent and ((ancestors is None) or (n in ancestors)):
764 # Only include nodes that are both descendents and ancestors.
764 # Only include nodes that are both descendents and ancestors.
765 orderedout.append(n)
765 orderedout.append(n)
766 if (ancestors is not None) and (n in heads):
766 if (ancestors is not None) and (n in heads):
767 # We're trying to figure out which heads are reachable
767 # We're trying to figure out which heads are reachable
768 # from roots.
768 # from roots.
769 # Mark this head as having been reached
769 # Mark this head as having been reached
770 heads[n] = 1
770 heads[n] = 1
771 elif ancestors is None:
771 elif ancestors is None:
772 # Otherwise, we're trying to discover the heads.
772 # Otherwise, we're trying to discover the heads.
773 # Assume this is a head because if it isn't, the next step
773 # Assume this is a head because if it isn't, the next step
774 # will eventually remove it.
774 # will eventually remove it.
775 heads[n] = 1
775 heads[n] = 1
776 # But, obviously its parents aren't.
776 # But, obviously its parents aren't.
777 for p in self.parents(n):
777 for p in self.parents(n):
778 heads.pop(p, None)
778 heads.pop(p, None)
779 heads = [n for n in heads.iterkeys() if heads[n] != 0]
779 heads = [n for n in heads.iterkeys() if heads[n] != 0]
780 roots = list(roots)
780 roots = list(roots)
781 assert orderedout
781 assert orderedout
782 assert roots
782 assert roots
783 assert heads
783 assert heads
784 return (orderedout, roots, heads)
784 return (orderedout, roots, heads)
785
785
786 def heads(self, start=None, stop=None):
786 def heads(self, start=None, stop=None):
787 """return the list of all nodes that have no children
787 """return the list of all nodes that have no children
788
788
789 if start is specified, only heads that are descendants of
789 if start is specified, only heads that are descendants of
790 start will be returned
790 start will be returned
791 if stop is specified, it will consider all the revs from stop
791 if stop is specified, it will consider all the revs from stop
792 as if they had no children
792 as if they had no children
793 """
793 """
794 if start is None and stop is None:
794 if start is None and stop is None:
795 count = len(self)
795 count = len(self)
796 if not count:
796 if not count:
797 return [nullid]
797 return [nullid]
798 ishead = [1] * (count + 1)
798 ishead = [1] * (count + 1)
799 index = self.index
799 index = self.index
800 for r in xrange(count):
800 for r in xrange(count):
801 e = index[r]
801 e = index[r]
802 ishead[e[5]] = ishead[e[6]] = 0
802 ishead[e[5]] = ishead[e[6]] = 0
803 return [self.node(r) for r in xrange(count) if ishead[r]]
803 return [self.node(r) for r in xrange(count) if ishead[r]]
804
804
805 if start is None:
805 if start is None:
806 start = nullid
806 start = nullid
807 if stop is None:
807 if stop is None:
808 stop = []
808 stop = []
809 stoprevs = set([self.rev(n) for n in stop])
809 stoprevs = set([self.rev(n) for n in stop])
810 startrev = self.rev(start)
810 startrev = self.rev(start)
811 reachable = {startrev: 1}
811 reachable = {startrev: 1}
812 heads = {startrev: 1}
812 heads = {startrev: 1}
813
813
814 parentrevs = self.parentrevs
814 parentrevs = self.parentrevs
815 for r in xrange(startrev + 1, len(self)):
815 for r in xrange(startrev + 1, len(self)):
816 for p in parentrevs(r):
816 for p in parentrevs(r):
817 if p in reachable:
817 if p in reachable:
818 if r not in stoprevs:
818 if r not in stoprevs:
819 reachable[r] = 1
819 reachable[r] = 1
820 heads[r] = 1
820 heads[r] = 1
821 if p in heads and p not in stoprevs:
821 if p in heads and p not in stoprevs:
822 del heads[p]
822 del heads[p]
823
823
824 return [self.node(r) for r in heads]
824 return [self.node(r) for r in heads]
825
825
826 def children(self, node):
826 def children(self, node):
827 """find the children of a given node"""
827 """find the children of a given node"""
828 c = []
828 c = []
829 p = self.rev(node)
829 p = self.rev(node)
830 for r in range(p + 1, len(self)):
830 for r in range(p + 1, len(self)):
831 prevs = [pr for pr in self.parentrevs(r) if pr != nullrev]
831 prevs = [pr for pr in self.parentrevs(r) if pr != nullrev]
832 if prevs:
832 if prevs:
833 for pr in prevs:
833 for pr in prevs:
834 if pr == p:
834 if pr == p:
835 c.append(self.node(r))
835 c.append(self.node(r))
836 elif p == nullrev:
836 elif p == nullrev:
837 c.append(self.node(r))
837 c.append(self.node(r))
838 return c
838 return c
839
839
840 def _match(self, id):
840 def _match(self, id):
841 if isinstance(id, (long, int)):
841 if isinstance(id, (long, int)):
842 # rev
842 # rev
843 return self.node(id)
843 return self.node(id)
844 if len(id) == 20:
844 if len(id) == 20:
845 # possibly a binary node
845 # possibly a binary node
846 # odds of a binary node being all hex in ASCII are 1 in 10**25
846 # odds of a binary node being all hex in ASCII are 1 in 10**25
847 try:
847 try:
848 node = id
848 node = id
849 self.rev(node) # quick search the index
849 self.rev(node) # quick search the index
850 return node
850 return node
851 except LookupError:
851 except LookupError:
852 pass # may be partial hex id
852 pass # may be partial hex id
853 try:
853 try:
854 # str(rev)
854 # str(rev)
855 rev = int(id)
855 rev = int(id)
856 if str(rev) != id:
856 if str(rev) != id:
857 raise ValueError
857 raise ValueError
858 if rev < 0:
858 if rev < 0:
859 rev = len(self) + rev
859 rev = len(self) + rev
860 if rev < 0 or rev >= len(self):
860 if rev < 0 or rev >= len(self):
861 raise ValueError
861 raise ValueError
862 return self.node(rev)
862 return self.node(rev)
863 except (ValueError, OverflowError):
863 except (ValueError, OverflowError):
864 pass
864 pass
865 if len(id) == 40:
865 if len(id) == 40:
866 try:
866 try:
867 # a full hex nodeid?
867 # a full hex nodeid?
868 node = bin(id)
868 node = bin(id)
869 self.rev(node)
869 self.rev(node)
870 return node
870 return node
871 except (TypeError, LookupError):
871 except (TypeError, LookupError):
872 pass
872 pass
873
873
874 def _partialmatch(self, id):
874 def _partialmatch(self, id):
875 if len(id) < 40:
875 if len(id) < 40:
876 try:
876 try:
877 # hex(node)[:...]
877 # hex(node)[:...]
878 l = len(id) / 2 # grab an even number of digits
878 l = len(id) / 2 # grab an even number of digits
879 bin_id = bin(id[:l*2])
879 bin_id = bin(id[:l*2])
880 nl = [n for n in self.nodemap if n[:l] == bin_id]
880 nl = [n for n in self.nodemap if n[:l] == bin_id]
881 nl = [n for n in nl if hex(n).startswith(id)]
881 nl = [n for n in nl if hex(n).startswith(id)]
882 if len(nl) > 0:
882 if len(nl) > 0:
883 if len(nl) == 1:
883 if len(nl) == 1:
884 return nl[0]
884 return nl[0]
885 raise LookupError(id, self.indexfile,
885 raise LookupError(id, self.indexfile,
886 _('ambiguous identifier'))
886 _('ambiguous identifier'))
887 return None
887 return None
888 except TypeError:
888 except TypeError:
889 pass
889 pass
890
890
891 def lookup(self, id):
891 def lookup(self, id):
892 """locate a node based on:
892 """locate a node based on:
893 - revision number or str(revision number)
893 - revision number or str(revision number)
894 - nodeid or subset of hex nodeid
894 - nodeid or subset of hex nodeid
895 """
895 """
896 n = self._match(id)
896 n = self._match(id)
897 if n is not None:
897 if n is not None:
898 return n
898 return n
899 n = self._partialmatch(id)
899 n = self._partialmatch(id)
900 if n:
900 if n:
901 return n
901 return n
902
902
903 raise LookupError(id, self.indexfile, _('no match found'))
903 raise LookupError(id, self.indexfile, _('no match found'))
904
904
905 def cmp(self, node, text):
905 def cmp(self, node, text):
906 """compare text with a given file revision"""
906 """compare text with a given file revision"""
907 p1, p2 = self.parents(node)
907 p1, p2 = self.parents(node)
908 return hash(text, p1, p2) != node
908 return hash(text, p1, p2) != node
909
909
910 def chunk(self, rev, df=None):
910 def chunk(self, rev, df=None):
911 def loadcache(df):
911 def loadcache(df):
912 if not df:
912 if not df:
913 if self._inline:
913 if self._inline:
914 df = self.opener(self.indexfile)
914 df = self.opener(self.indexfile)
915 else:
915 else:
916 df = self.opener(self.datafile)
916 df = self.opener(self.datafile)
917 df.seek(start)
917 df.seek(start)
918 self._chunkcache = (start, df.read(cache_length))
918 self._chunkcache = (start, df.read(cache_length))
919
919
920 start, length = self.start(rev), self.length(rev)
920 start, length = self.start(rev), self.length(rev)
921 if self._inline:
921 if self._inline:
922 start += (rev + 1) * self._io.size
922 start += (rev + 1) * self._io.size
923 end = start + length
923 end = start + length
924
924
925 offset = 0
925 offset = 0
926 if not self._chunkcache:
926 if not self._chunkcache:
927 cache_length = max(65536, length)
927 cache_length = max(65536, length)
928 loadcache(df)
928 loadcache(df)
929 else:
929 else:
930 cache_start = self._chunkcache[0]
930 cache_start = self._chunkcache[0]
931 cache_length = len(self._chunkcache[1])
931 cache_length = len(self._chunkcache[1])
932 cache_end = cache_start + cache_length
932 cache_end = cache_start + cache_length
933 if start >= cache_start and end <= cache_end:
933 if start >= cache_start and end <= cache_end:
934 # it is cached
934 # it is cached
935 offset = start - cache_start
935 offset = start - cache_start
936 else:
936 else:
937 cache_length = max(65536, length)
937 cache_length = max(65536, length)
938 loadcache(df)
938 loadcache(df)
939
939
940 # avoid copying large chunks
940 # avoid copying large chunks
941 c = self._chunkcache[1]
941 c = self._chunkcache[1]
942 if cache_length != length:
942 if cache_length != length:
943 c = c[offset:offset + length]
943 c = c[offset:offset + length]
944
944
945 return decompress(c)
945 return decompress(c)
946
946
947 def revdiff(self, rev1, rev2):
947 def revdiff(self, rev1, rev2):
948 """return or calculate a delta between two revisions"""
948 """return or calculate a delta between two revisions"""
949 if rev1 + 1 == rev2 and self.base(rev1) == self.base(rev2):
949 if rev1 + 1 == rev2 and self.base(rev1) == self.base(rev2):
950 return self.chunk(rev2)
950 return self.chunk(rev2)
951
951
952 return mdiff.textdiff(self.revision(self.node(rev1)),
952 return mdiff.textdiff(self.revision(self.node(rev1)),
953 self.revision(self.node(rev2)))
953 self.revision(self.node(rev2)))
954
954
955 def revision(self, node):
955 def revision(self, node):
956 """return an uncompressed revision of a given node"""
956 """return an uncompressed revision of a given node"""
957 if node == nullid:
957 if node == nullid:
958 return ""
958 return ""
959 if self._cache and self._cache[0] == node:
959 if self._cache and self._cache[0] == node:
960 return str(self._cache[2])
960 return str(self._cache[2])
961
961
962 # look up what we need to read
962 # look up what we need to read
963 text = None
963 text = None
964 rev = self.rev(node)
964 rev = self.rev(node)
965 base = self.base(rev)
965 base = self.base(rev)
966
966
967 # check rev flags
967 # check rev flags
968 if self.index[rev][0] & 0xFFFF:
968 if self.index[rev][0] & 0xFFFF:
969 raise RevlogError(_('incompatible revision flag %x') %
969 raise RevlogError(_('incompatible revision flag %x') %
970 (self.index[rev][0] & 0xFFFF))
970 (self.index[rev][0] & 0xFFFF))
971
971
972 df = None
972 df = None
973
973
974 # do we have useful data cached?
974 # do we have useful data cached?
975 if self._cache and self._cache[1] >= base and self._cache[1] < rev:
975 if self._cache and self._cache[1] >= base and self._cache[1] < rev:
976 base = self._cache[1]
976 base = self._cache[1]
977 text = str(self._cache[2])
977 text = str(self._cache[2])
978 self._loadindex(base, rev + 1)
978 self._loadindex(base, rev + 1)
979 if not self._inline and rev > base + 1:
979 if not self._inline and rev > base + 1:
980 df = self.opener(self.datafile)
980 df = self.opener(self.datafile)
981 else:
981 else:
982 self._loadindex(base, rev + 1)
982 self._loadindex(base, rev + 1)
983 if not self._inline and rev > base:
983 if not self._inline and rev > base:
984 df = self.opener(self.datafile)
984 df = self.opener(self.datafile)
985 text = self.chunk(base, df=df)
985 text = self.chunk(base, df=df)
986
986
987 bins = [self.chunk(r, df) for r in xrange(base + 1, rev + 1)]
987 bins = [self.chunk(r, df) for r in xrange(base + 1, rev + 1)]
988 text = mdiff.patches(text, bins)
988 text = mdiff.patches(text, bins)
989 p1, p2 = self.parents(node)
989 p1, p2 = self.parents(node)
990 if node != hash(text, p1, p2):
990 if node != hash(text, p1, p2):
991 raise RevlogError(_("integrity check failed on %s:%d")
991 raise RevlogError(_("integrity check failed on %s:%d")
992 % (self.datafile, rev))
992 % (self.datafile, rev))
993
993
994 self._cache = (node, rev, text)
994 self._cache = (node, rev, text)
995 return text
995 return text
996
996
997 def checkinlinesize(self, tr, fp=None):
997 def checkinlinesize(self, tr, fp=None):
998 if not self._inline:
998 if not self._inline:
999 return
999 return
1000 if not fp:
1000 if not fp:
1001 fp = self.opener(self.indexfile, 'r')
1001 fp = self.opener(self.indexfile, 'r')
1002 fp.seek(0, 2)
1002 fp.seek(0, 2)
1003 size = fp.tell()
1003 size = fp.tell()
1004 if size < 131072:
1004 if size < 131072:
1005 return
1005 return
1006 trinfo = tr.find(self.indexfile)
1006 trinfo = tr.find(self.indexfile)
1007 if trinfo == None:
1007 if trinfo == None:
1008 raise RevlogError(_("%s not found in the transaction")
1008 raise RevlogError(_("%s not found in the transaction")
1009 % self.indexfile)
1009 % self.indexfile)
1010
1010
1011 trindex = trinfo[2]
1011 trindex = trinfo[2]
1012 dataoff = self.start(trindex)
1012 dataoff = self.start(trindex)
1013
1013
1014 tr.add(self.datafile, dataoff)
1014 tr.add(self.datafile, dataoff)
1015 df = self.opener(self.datafile, 'w')
1015 df = self.opener(self.datafile, 'w')
1016 try:
1016 try:
1017 calc = self._io.size
1017 calc = self._io.size
1018 for r in self:
1018 for r in self:
1019 start = self.start(r) + (r + 1) * calc
1019 start = self.start(r) + (r + 1) * calc
1020 length = self.length(r)
1020 length = self.length(r)
1021 fp.seek(start)
1021 fp.seek(start)
1022 d = fp.read(length)
1022 d = fp.read(length)
1023 df.write(d)
1023 df.write(d)
1024 finally:
1024 finally:
1025 df.close()
1025 df.close()
1026
1026
1027 fp.close()
1027 fp.close()
1028 fp = self.opener(self.indexfile, 'w', atomictemp=True)
1028 fp = self.opener(self.indexfile, 'w', atomictemp=True)
1029 self.version &= ~(REVLOGNGINLINEDATA)
1029 self.version &= ~(REVLOGNGINLINEDATA)
1030 self._inline = False
1030 self._inline = False
1031 for i in self:
1031 for i in self:
1032 e = self._io.packentry(self.index[i], self.node, self.version, i)
1032 e = self._io.packentry(self.index[i], self.node, self.version, i)
1033 fp.write(e)
1033 fp.write(e)
1034
1034
1035 # if we don't call rename, the temp file will never replace the
1035 # if we don't call rename, the temp file will never replace the
1036 # real index
1036 # real index
1037 fp.rename()
1037 fp.rename()
1038
1038
1039 tr.replace(self.indexfile, trindex * calc)
1039 tr.replace(self.indexfile, trindex * calc)
1040 self._chunkcache = None
1040 self._chunkcache = None
1041
1041
1042 def addrevision(self, text, transaction, link, p1, p2, d=None):
1042 def addrevision(self, text, transaction, link, p1, p2, d=None):
1043 """add a revision to the log
1043 """add a revision to the log
1044
1044
1045 text - the revision data to add
1045 text - the revision data to add
1046 transaction - the transaction object used for rollback
1046 transaction - the transaction object used for rollback
1047 link - the linkrev data to add
1047 link - the linkrev data to add
1048 p1, p2 - the parent nodeids of the revision
1048 p1, p2 - the parent nodeids of the revision
1049 d - an optional precomputed delta
1049 d - an optional precomputed delta
1050 """
1050 """
1051 dfh = None
1051 dfh = None
1052 if not self._inline:
1052 if not self._inline:
1053 dfh = self.opener(self.datafile, "a")
1053 dfh = self.opener(self.datafile, "a")
1054 ifh = self.opener(self.indexfile, "a+")
1054 ifh = self.opener(self.indexfile, "a+")
1055 try:
1055 try:
1056 return self._addrevision(text, transaction, link, p1, p2, d, ifh, dfh)
1056 return self._addrevision(text, transaction, link, p1, p2, d, ifh, dfh)
1057 finally:
1057 finally:
1058 if dfh:
1058 if dfh:
1059 dfh.close()
1059 dfh.close()
1060 ifh.close()
1060 ifh.close()
1061
1061
1062 def _addrevision(self, text, transaction, link, p1, p2, d, ifh, dfh):
1062 def _addrevision(self, text, transaction, link, p1, p2, d, ifh, dfh):
1063 node = hash(text, p1, p2)
1063 node = hash(text, p1, p2)
1064 if node in self.nodemap:
1064 if node in self.nodemap:
1065 return node
1065 return node
1066
1066
1067 curr = len(self)
1067 curr = len(self)
1068 prev = curr - 1
1068 prev = curr - 1
1069 base = self.base(prev)
1069 base = self.base(prev)
1070 offset = self.end(prev)
1070 offset = self.end(prev)
1071
1071
1072 if curr:
1072 if curr:
1073 if not d:
1073 if not d:
1074 ptext = self.revision(self.node(prev))
1074 ptext = self.revision(self.node(prev))
1075 d = mdiff.textdiff(ptext, text)
1075 d = mdiff.textdiff(ptext, text)
1076 data = compress(d)
1076 data = compress(d)
1077 l = len(data[1]) + len(data[0])
1077 l = len(data[1]) + len(data[0])
1078 dist = l + offset - self.start(base)
1078 dist = l + offset - self.start(base)
1079
1079
1080 # full versions are inserted when the needed deltas
1080 # full versions are inserted when the needed deltas
1081 # become comparable to the uncompressed text
1081 # become comparable to the uncompressed text
1082 if not curr or dist > len(text) * 2:
1082 if not curr or dist > len(text) * 2:
1083 data = compress(text)
1083 data = compress(text)
1084 l = len(data[1]) + len(data[0])
1084 l = len(data[1]) + len(data[0])
1085 base = curr
1085 base = curr
1086
1086
1087 e = (offset_type(offset, 0), l, len(text),
1087 e = (offset_type(offset, 0), l, len(text),
1088 base, link, self.rev(p1), self.rev(p2), node)
1088 base, link, self.rev(p1), self.rev(p2), node)
1089 self.index.insert(-1, e)
1089 self.index.insert(-1, e)
1090 self.nodemap[node] = curr
1090 self.nodemap[node] = curr
1091
1091
1092 entry = self._io.packentry(e, self.node, self.version, curr)
1092 entry = self._io.packentry(e, self.node, self.version, curr)
1093 if not self._inline:
1093 if not self._inline:
1094 transaction.add(self.datafile, offset)
1094 transaction.add(self.datafile, offset)
1095 transaction.add(self.indexfile, curr * len(entry))
1095 transaction.add(self.indexfile, curr * len(entry))
1096 if data[0]:
1096 if data[0]:
1097 dfh.write(data[0])
1097 dfh.write(data[0])
1098 dfh.write(data[1])
1098 dfh.write(data[1])
1099 dfh.flush()
1099 dfh.flush()
1100 ifh.write(entry)
1100 ifh.write(entry)
1101 else:
1101 else:
1102 offset += curr * self._io.size
1102 offset += curr * self._io.size
1103 transaction.add(self.indexfile, offset, curr)
1103 transaction.add(self.indexfile, offset, curr)
1104 ifh.write(entry)
1104 ifh.write(entry)
1105 ifh.write(data[0])
1105 ifh.write(data[0])
1106 ifh.write(data[1])
1106 ifh.write(data[1])
1107 self.checkinlinesize(transaction, ifh)
1107 self.checkinlinesize(transaction, ifh)
1108
1108
1109 self._cache = (node, curr, text)
1109 self._cache = (node, curr, text)
1110 return node
1110 return node
1111
1111
1112 def ancestor(self, a, b):
1112 def ancestor(self, a, b):
1113 """calculate the least common ancestor of nodes a and b"""
1113 """calculate the least common ancestor of nodes a and b"""
1114
1114
1115 def parents(rev):
1115 def parents(rev):
1116 return [p for p in self.parentrevs(rev) if p != nullrev]
1116 return [p for p in self.parentrevs(rev) if p != nullrev]
1117
1117
1118 c = ancestor.ancestor(self.rev(a), self.rev(b), parents)
1118 c = ancestor.ancestor(self.rev(a), self.rev(b), parents)
1119 if c is None:
1119 if c is None:
1120 return nullid
1120 return nullid
1121
1121
1122 return self.node(c)
1122 return self.node(c)
1123
1123
1124 def group(self, nodelist, lookup, infocollect=None):
1124 def group(self, nodelist, lookup, infocollect=None):
1125 """calculate a delta group
1125 """calculate a delta group
1126
1126
1127 Given a list of changeset revs, return a set of deltas and
1127 Given a list of changeset revs, return a set of deltas and
1128 metadata corresponding to nodes. the first delta is
1128 metadata corresponding to nodes. the first delta is
1129 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
1129 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
1130 have this parent as it has all history before these
1130 have this parent as it has all history before these
1131 changesets. parent is parent[0]
1131 changesets. parent is parent[0]
1132 """
1132 """
1133 revs = [self.rev(n) for n in nodelist]
1133 revs = [self.rev(n) for n in nodelist]
1134
1134
1135 # if we don't have any revisions touched by these changesets, bail
1135 # if we don't have any revisions touched by these changesets, bail
1136 if not revs:
1136 if not revs:
1137 yield changegroup.closechunk()
1137 yield changegroup.closechunk()
1138 return
1138 return
1139
1139
1140 # add the parent of the first rev
1140 # add the parent of the first rev
1141 p = self.parents(self.node(revs[0]))[0]
1141 p = self.parents(self.node(revs[0]))[0]
1142 revs.insert(0, self.rev(p))
1142 revs.insert(0, self.rev(p))
1143
1143
1144 # build deltas
1144 # build deltas
1145 for d in xrange(0, len(revs) - 1):
1145 for d in xrange(0, len(revs) - 1):
1146 a, b = revs[d], revs[d + 1]
1146 a, b = revs[d], revs[d + 1]
1147 nb = self.node(b)
1147 nb = self.node(b)
1148
1148
1149 if infocollect is not None:
1149 if infocollect is not None:
1150 infocollect(nb)
1150 infocollect(nb)
1151
1151
1152 p = self.parents(nb)
1152 p = self.parents(nb)
1153 meta = nb + p[0] + p[1] + lookup(nb)
1153 meta = nb + p[0] + p[1] + lookup(nb)
1154 if a == -1:
1154 if a == -1:
1155 d = self.revision(nb)
1155 d = self.revision(nb)
1156 meta += mdiff.trivialdiffheader(len(d))
1156 meta += mdiff.trivialdiffheader(len(d))
1157 else:
1157 else:
1158 d = self.revdiff(a, b)
1158 d = self.revdiff(a, b)
1159 yield changegroup.chunkheader(len(meta) + len(d))
1159 yield changegroup.chunkheader(len(meta) + len(d))
1160 yield meta
1160 yield meta
1161 if len(d) > 2**20:
1161 if len(d) > 2**20:
1162 pos = 0
1162 pos = 0
1163 while pos < len(d):
1163 while pos < len(d):
1164 pos2 = pos + 2 ** 18
1164 pos2 = pos + 2 ** 18
1165 yield d[pos:pos2]
1165 yield d[pos:pos2]
1166 pos = pos2
1166 pos = pos2
1167 else:
1167 else:
1168 yield d
1168 yield d
1169
1169
1170 yield changegroup.closechunk()
1170 yield changegroup.closechunk()
1171
1171
1172 def addgroup(self, revs, linkmapper, transaction):
1172 def addgroup(self, revs, linkmapper, transaction):
1173 """
1173 """
1174 add a delta group
1174 add a delta group
1175
1175
1176 given a set of deltas, add them to the revision log. the
1176 given a set of deltas, add them to the revision log. the
1177 first delta is against its parent, which should be in our
1177 first delta is against its parent, which should be in our
1178 log, the rest are against the previous delta.
1178 log, the rest are against the previous delta.
1179 """
1179 """
1180
1180
1181 #track the base of the current delta log
1181 #track the base of the current delta log
1182 r = len(self)
1182 r = len(self)
1183 t = r - 1
1183 t = r - 1
1184 node = None
1184 node = None
1185
1185
1186 base = prev = nullrev
1186 base = prev = nullrev
1187 start = end = textlen = 0
1187 start = end = textlen = 0
1188 if r:
1188 if r:
1189 end = self.end(t)
1189 end = self.end(t)
1190
1190
1191 ifh = self.opener(self.indexfile, "a+")
1191 ifh = self.opener(self.indexfile, "a+")
1192 isize = r * self._io.size
1192 isize = r * self._io.size
1193 if self._inline:
1193 if self._inline:
1194 transaction.add(self.indexfile, end + isize, r)
1194 transaction.add(self.indexfile, end + isize, r)
1195 dfh = None
1195 dfh = None
1196 else:
1196 else:
1197 transaction.add(self.indexfile, isize, r)
1197 transaction.add(self.indexfile, isize, r)
1198 transaction.add(self.datafile, end)
1198 transaction.add(self.datafile, end)
1199 dfh = self.opener(self.datafile, "a")
1199 dfh = self.opener(self.datafile, "a")
1200
1200
1201 try:
1201 try:
1202 # loop through our set of deltas
1202 # loop through our set of deltas
1203 chain = None
1203 chain = None
1204 for chunk in revs:
1204 for chunk in revs:
1205 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
1205 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
1206 link = linkmapper(cs)
1206 link = linkmapper(cs)
1207 if node in self.nodemap:
1207 if node in self.nodemap:
1208 # this can happen if two branches make the same change
1208 # this can happen if two branches make the same change
1209 chain = node
1209 chain = node
1210 continue
1210 continue
1211 delta = buffer(chunk, 80)
1211 delta = buffer(chunk, 80)
1212 del chunk
1212 del chunk
1213
1213
1214 for p in (p1, p2):
1214 for p in (p1, p2):
1215 if not p in self.nodemap:
1215 if not p in self.nodemap:
1216 raise LookupError(p, self.indexfile, _('unknown parent'))
1216 raise LookupError(p, self.indexfile, _('unknown parent'))
1217
1217
1218 if not chain:
1218 if not chain:
1219 # retrieve the parent revision of the delta chain
1219 # retrieve the parent revision of the delta chain
1220 chain = p1
1220 chain = p1
1221 if not chain in self.nodemap:
1221 if not chain in self.nodemap:
1222 raise LookupError(chain, self.indexfile, _('unknown base'))
1222 raise LookupError(chain, self.indexfile, _('unknown base'))
1223
1223
1224 # full versions are inserted when the needed deltas become
1224 # full versions are inserted when the needed deltas become
1225 # comparable to the uncompressed text or when the previous
1225 # comparable to the uncompressed text or when the previous
1226 # version is not the one we have a delta against. We use
1226 # version is not the one we have a delta against. We use
1227 # the size of the previous full rev as a proxy for the
1227 # the size of the previous full rev as a proxy for the
1228 # current size.
1228 # current size.
1229
1229
1230 if chain == prev:
1230 if chain == prev:
1231 cdelta = compress(delta)
1231 cdelta = compress(delta)
1232 cdeltalen = len(cdelta[0]) + len(cdelta[1])
1232 cdeltalen = len(cdelta[0]) + len(cdelta[1])
1233 textlen = mdiff.patchedsize(textlen, delta)
1233 textlen = mdiff.patchedsize(textlen, delta)
1234
1234
1235 if chain != prev or (end - start + cdeltalen) > textlen * 2:
1235 if chain != prev or (end - start + cdeltalen) > textlen * 2:
1236 # flush our writes here so we can read it in revision
1236 # flush our writes here so we can read it in revision
1237 if dfh:
1237 if dfh:
1238 dfh.flush()
1238 dfh.flush()
1239 ifh.flush()
1239 ifh.flush()
1240 text = self.revision(chain)
1240 text = self.revision(chain)
1241 if len(text) == 0:
1241 if len(text) == 0:
1242 # skip over trivial delta header
1242 # skip over trivial delta header
1243 text = buffer(delta, 12)
1243 text = buffer(delta, 12)
1244 else:
1244 else:
1245 text = mdiff.patches(text, [delta])
1245 text = mdiff.patches(text, [delta])
1246 del delta
1246 del delta
1247 chk = self._addrevision(text, transaction, link, p1, p2, None,
1247 chk = self._addrevision(text, transaction, link, p1, p2, None,
1248 ifh, dfh)
1248 ifh, dfh)
1249 if not dfh and not self._inline:
1249 if not dfh and not self._inline:
1250 # addrevision switched from inline to conventional
1250 # addrevision switched from inline to conventional
1251 # reopen the index
1251 # reopen the index
1252 dfh = self.opener(self.datafile, "a")
1252 dfh = self.opener(self.datafile, "a")
1253 ifh = self.opener(self.indexfile, "a")
1253 ifh = self.opener(self.indexfile, "a")
1254 if chk != node:
1254 if chk != node:
1255 raise RevlogError(_("consistency error adding group"))
1255 raise RevlogError(_("consistency error adding group"))
1256 textlen = len(text)
1256 textlen = len(text)
1257 else:
1257 else:
1258 e = (offset_type(end, 0), cdeltalen, textlen, base,
1258 e = (offset_type(end, 0), cdeltalen, textlen, base,
1259 link, self.rev(p1), self.rev(p2), node)
1259 link, self.rev(p1), self.rev(p2), node)
1260 self.index.insert(-1, e)
1260 self.index.insert(-1, e)
1261 self.nodemap[node] = r
1261 self.nodemap[node] = r
1262 entry = self._io.packentry(e, self.node, self.version, r)
1262 entry = self._io.packentry(e, self.node, self.version, r)
1263 if self._inline:
1263 if self._inline:
1264 ifh.write(entry)
1264 ifh.write(entry)
1265 ifh.write(cdelta[0])
1265 ifh.write(cdelta[0])
1266 ifh.write(cdelta[1])
1266 ifh.write(cdelta[1])
1267 self.checkinlinesize(transaction, ifh)
1267 self.checkinlinesize(transaction, ifh)
1268 if not self._inline:
1268 if not self._inline:
1269 dfh = self.opener(self.datafile, "a")
1269 dfh = self.opener(self.datafile, "a")
1270 ifh = self.opener(self.indexfile, "a")
1270 ifh = self.opener(self.indexfile, "a")
1271 else:
1271 else:
1272 dfh.write(cdelta[0])
1272 dfh.write(cdelta[0])
1273 dfh.write(cdelta[1])
1273 dfh.write(cdelta[1])
1274 ifh.write(entry)
1274 ifh.write(entry)
1275
1275
1276 t, r, chain, prev = r, r + 1, node, node
1276 t, r, chain, prev = r, r + 1, node, node
1277 base = self.base(t)
1277 base = self.base(t)
1278 start = self.start(base)
1278 start = self.start(base)
1279 end = self.end(t)
1279 end = self.end(t)
1280 finally:
1280 finally:
1281 if dfh:
1281 if dfh:
1282 dfh.close()
1282 dfh.close()
1283 ifh.close()
1283 ifh.close()
1284
1284
1285 return node
1285 return node
1286
1286
1287 def strip(self, minlink, transaction):
1287 def strip(self, minlink, transaction):
1288 """truncate the revlog on the first revision with a linkrev >= minlink
1288 """truncate the revlog on the first revision with a linkrev >= minlink
1289
1289
1290 This function is called when we're stripping revision minlink and
1290 This function is called when we're stripping revision minlink and
1291 its descendants from the repository.
1291 its descendants from the repository.
1292
1292
1293 We have to remove all revisions with linkrev >= minlink, because
1293 We have to remove all revisions with linkrev >= minlink, because
1294 the equivalent changelog revisions will be renumbered after the
1294 the equivalent changelog revisions will be renumbered after the
1295 strip.
1295 strip.
1296
1296
1297 So we truncate the revlog on the first of these revisions, and
1297 So we truncate the revlog on the first of these revisions, and
1298 trust that the caller has saved the revisions that shouldn't be
1298 trust that the caller has saved the revisions that shouldn't be
1299 removed and that it'll readd them after this truncation.
1299 removed and that it'll readd them after this truncation.
1300 """
1300 """
1301 if len(self) == 0:
1301 if len(self) == 0:
1302 return
1302 return
1303
1303
1304 if isinstance(self.index, lazyindex):
1304 if isinstance(self.index, lazyindex):
1305 self._loadindexmap()
1305 self._loadindexmap()
1306
1306
1307 for rev in self:
1307 for rev in self:
1308 if self.index[rev][4] >= minlink:
1308 if self.index[rev][4] >= minlink:
1309 break
1309 break
1310 else:
1310 else:
1311 return
1311 return
1312
1312
1313 # first truncate the files on disk
1313 # first truncate the files on disk
1314 end = self.start(rev)
1314 end = self.start(rev)
1315 if not self._inline:
1315 if not self._inline:
1316 transaction.add(self.datafile, end)
1316 transaction.add(self.datafile, end)
1317 end = rev * self._io.size
1317 end = rev * self._io.size
1318 else:
1318 else:
1319 end += rev * self._io.size
1319 end += rev * self._io.size
1320
1320
1321 transaction.add(self.indexfile, end)
1321 transaction.add(self.indexfile, end)
1322
1322
1323 # then reset internal state in memory to forget those revisions
1323 # then reset internal state in memory to forget those revisions
1324 self._cache = None
1324 self._cache = None
1325 self._chunkcache = None
1325 self._chunkcache = None
1326 for x in xrange(rev, len(self)):
1326 for x in xrange(rev, len(self)):
1327 del self.nodemap[self.node(x)]
1327 del self.nodemap[self.node(x)]
1328
1328
1329 del self.index[rev:-1]
1329 del self.index[rev:-1]
1330
1330
1331 def checksize(self):
1331 def checksize(self):
1332 expected = 0
1332 expected = 0
1333 if len(self):
1333 if len(self):
1334 expected = max(0, self.end(len(self) - 1))
1334 expected = max(0, self.end(len(self) - 1))
1335
1335
1336 try:
1336 try:
1337 f = self.opener(self.datafile)
1337 f = self.opener(self.datafile)
1338 f.seek(0, 2)
1338 f.seek(0, 2)
1339 actual = f.tell()
1339 actual = f.tell()
1340 dd = actual - expected
1340 dd = actual - expected
1341 except IOError, inst:
1341 except IOError, inst:
1342 if inst.errno != errno.ENOENT:
1342 if inst.errno != errno.ENOENT:
1343 raise
1343 raise
1344 dd = 0
1344 dd = 0
1345
1345
1346 try:
1346 try:
1347 f = self.opener(self.indexfile)
1347 f = self.opener(self.indexfile)
1348 f.seek(0, 2)
1348 f.seek(0, 2)
1349 actual = f.tell()
1349 actual = f.tell()
1350 s = self._io.size
1350 s = self._io.size
1351 i = max(0, actual / s)
1351 i = max(0, actual / s)
1352 di = actual - (i * s)
1352 di = actual - (i * s)
1353 if self._inline:
1353 if self._inline:
1354 databytes = 0
1354 databytes = 0
1355 for r in self:
1355 for r in self:
1356 databytes += max(0, self.length(r))
1356 databytes += max(0, self.length(r))
1357 dd = 0
1357 dd = 0
1358 di = actual - len(self) * s - databytes
1358 di = actual - len(self) * s - databytes
1359 except IOError, inst:
1359 except IOError, inst:
1360 if inst.errno != errno.ENOENT:
1360 if inst.errno != errno.ENOENT:
1361 raise
1361 raise
1362 di = 0
1362 di = 0
1363
1363
1364 return (dd, di)
1364 return (dd, di)
1365
1365
1366 def files(self):
1366 def files(self):
1367 res = [ self.indexfile ]
1367 res = [ self.indexfile ]
1368 if not self._inline:
1368 if not self._inline:
1369 res.append(self.datafile)
1369 res.append(self.datafile)
1370 return res
1370 return res
@@ -1,450 +1,451 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 # Copyright (C) 2004, 2005 Canonical Ltd
2 # Copyright (C) 2004, 2005 Canonical Ltd
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 2 of the License, or
6 # the Free Software Foundation; either version 2 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software
15 # along with this program; if not, write to the Free Software
16 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
16 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
17
17
18 # mbp: "you know that thing where cvs gives you conflict markers?"
18 # mbp: "you know that thing where cvs gives you conflict markers?"
19 # s: "i hate that."
19 # s: "i hate that."
20
20
21 from i18n import _
21 from i18n import _
22 import util, mdiff, sys, os
22 import util, mdiff
23 import sys, os
23
24
24 class CantReprocessAndShowBase(Exception):
25 class CantReprocessAndShowBase(Exception):
25 pass
26 pass
26
27
27 def intersect(ra, rb):
28 def intersect(ra, rb):
28 """Given two ranges return the range where they intersect or None.
29 """Given two ranges return the range where they intersect or None.
29
30
30 >>> intersect((0, 10), (0, 6))
31 >>> intersect((0, 10), (0, 6))
31 (0, 6)
32 (0, 6)
32 >>> intersect((0, 10), (5, 15))
33 >>> intersect((0, 10), (5, 15))
33 (5, 10)
34 (5, 10)
34 >>> intersect((0, 10), (10, 15))
35 >>> intersect((0, 10), (10, 15))
35 >>> intersect((0, 9), (10, 15))
36 >>> intersect((0, 9), (10, 15))
36 >>> intersect((0, 9), (7, 15))
37 >>> intersect((0, 9), (7, 15))
37 (7, 9)
38 (7, 9)
38 """
39 """
39 assert ra[0] <= ra[1]
40 assert ra[0] <= ra[1]
40 assert rb[0] <= rb[1]
41 assert rb[0] <= rb[1]
41
42
42 sa = max(ra[0], rb[0])
43 sa = max(ra[0], rb[0])
43 sb = min(ra[1], rb[1])
44 sb = min(ra[1], rb[1])
44 if sa < sb:
45 if sa < sb:
45 return sa, sb
46 return sa, sb
46 else:
47 else:
47 return None
48 return None
48
49
49 def compare_range(a, astart, aend, b, bstart, bend):
50 def compare_range(a, astart, aend, b, bstart, bend):
50 """Compare a[astart:aend] == b[bstart:bend], without slicing.
51 """Compare a[astart:aend] == b[bstart:bend], without slicing.
51 """
52 """
52 if (aend-astart) != (bend-bstart):
53 if (aend-astart) != (bend-bstart):
53 return False
54 return False
54 for ia, ib in zip(xrange(astart, aend), xrange(bstart, bend)):
55 for ia, ib in zip(xrange(astart, aend), xrange(bstart, bend)):
55 if a[ia] != b[ib]:
56 if a[ia] != b[ib]:
56 return False
57 return False
57 else:
58 else:
58 return True
59 return True
59
60
60 class Merge3Text(object):
61 class Merge3Text(object):
61 """3-way merge of texts.
62 """3-way merge of texts.
62
63
63 Given strings BASE, OTHER, THIS, tries to produce a combined text
64 Given strings BASE, OTHER, THIS, tries to produce a combined text
64 incorporating the changes from both BASE->OTHER and BASE->THIS."""
65 incorporating the changes from both BASE->OTHER and BASE->THIS."""
65 def __init__(self, basetext, atext, btext, base=None, a=None, b=None):
66 def __init__(self, basetext, atext, btext, base=None, a=None, b=None):
66 self.basetext = basetext
67 self.basetext = basetext
67 self.atext = atext
68 self.atext = atext
68 self.btext = btext
69 self.btext = btext
69 if base is None:
70 if base is None:
70 base = mdiff.splitnewlines(basetext)
71 base = mdiff.splitnewlines(basetext)
71 if a is None:
72 if a is None:
72 a = mdiff.splitnewlines(atext)
73 a = mdiff.splitnewlines(atext)
73 if b is None:
74 if b is None:
74 b = mdiff.splitnewlines(btext)
75 b = mdiff.splitnewlines(btext)
75 self.base = base
76 self.base = base
76 self.a = a
77 self.a = a
77 self.b = b
78 self.b = b
78
79
79 def merge_lines(self,
80 def merge_lines(self,
80 name_a=None,
81 name_a=None,
81 name_b=None,
82 name_b=None,
82 name_base=None,
83 name_base=None,
83 start_marker='<<<<<<<',
84 start_marker='<<<<<<<',
84 mid_marker='=======',
85 mid_marker='=======',
85 end_marker='>>>>>>>',
86 end_marker='>>>>>>>',
86 base_marker=None,
87 base_marker=None,
87 reprocess=False):
88 reprocess=False):
88 """Return merge in cvs-like form.
89 """Return merge in cvs-like form.
89 """
90 """
90 self.conflicts = False
91 self.conflicts = False
91 newline = '\n'
92 newline = '\n'
92 if len(self.a) > 0:
93 if len(self.a) > 0:
93 if self.a[0].endswith('\r\n'):
94 if self.a[0].endswith('\r\n'):
94 newline = '\r\n'
95 newline = '\r\n'
95 elif self.a[0].endswith('\r'):
96 elif self.a[0].endswith('\r'):
96 newline = '\r'
97 newline = '\r'
97 if base_marker and reprocess:
98 if base_marker and reprocess:
98 raise CantReprocessAndShowBase()
99 raise CantReprocessAndShowBase()
99 if name_a:
100 if name_a:
100 start_marker = start_marker + ' ' + name_a
101 start_marker = start_marker + ' ' + name_a
101 if name_b:
102 if name_b:
102 end_marker = end_marker + ' ' + name_b
103 end_marker = end_marker + ' ' + name_b
103 if name_base and base_marker:
104 if name_base and base_marker:
104 base_marker = base_marker + ' ' + name_base
105 base_marker = base_marker + ' ' + name_base
105 merge_regions = self.merge_regions()
106 merge_regions = self.merge_regions()
106 if reprocess is True:
107 if reprocess is True:
107 merge_regions = self.reprocess_merge_regions(merge_regions)
108 merge_regions = self.reprocess_merge_regions(merge_regions)
108 for t in merge_regions:
109 for t in merge_regions:
109 what = t[0]
110 what = t[0]
110 if what == 'unchanged':
111 if what == 'unchanged':
111 for i in range(t[1], t[2]):
112 for i in range(t[1], t[2]):
112 yield self.base[i]
113 yield self.base[i]
113 elif what == 'a' or what == 'same':
114 elif what == 'a' or what == 'same':
114 for i in range(t[1], t[2]):
115 for i in range(t[1], t[2]):
115 yield self.a[i]
116 yield self.a[i]
116 elif what == 'b':
117 elif what == 'b':
117 for i in range(t[1], t[2]):
118 for i in range(t[1], t[2]):
118 yield self.b[i]
119 yield self.b[i]
119 elif what == 'conflict':
120 elif what == 'conflict':
120 self.conflicts = True
121 self.conflicts = True
121 yield start_marker + newline
122 yield start_marker + newline
122 for i in range(t[3], t[4]):
123 for i in range(t[3], t[4]):
123 yield self.a[i]
124 yield self.a[i]
124 if base_marker is not None:
125 if base_marker is not None:
125 yield base_marker + newline
126 yield base_marker + newline
126 for i in range(t[1], t[2]):
127 for i in range(t[1], t[2]):
127 yield self.base[i]
128 yield self.base[i]
128 yield mid_marker + newline
129 yield mid_marker + newline
129 for i in range(t[5], t[6]):
130 for i in range(t[5], t[6]):
130 yield self.b[i]
131 yield self.b[i]
131 yield end_marker + newline
132 yield end_marker + newline
132 else:
133 else:
133 raise ValueError(what)
134 raise ValueError(what)
134
135
135 def merge_annotated(self):
136 def merge_annotated(self):
136 """Return merge with conflicts, showing origin of lines.
137 """Return merge with conflicts, showing origin of lines.
137
138
138 Most useful for debugging merge.
139 Most useful for debugging merge.
139 """
140 """
140 for t in self.merge_regions():
141 for t in self.merge_regions():
141 what = t[0]
142 what = t[0]
142 if what == 'unchanged':
143 if what == 'unchanged':
143 for i in range(t[1], t[2]):
144 for i in range(t[1], t[2]):
144 yield 'u | ' + self.base[i]
145 yield 'u | ' + self.base[i]
145 elif what == 'a' or what == 'same':
146 elif what == 'a' or what == 'same':
146 for i in range(t[1], t[2]):
147 for i in range(t[1], t[2]):
147 yield what[0] + ' | ' + self.a[i]
148 yield what[0] + ' | ' + self.a[i]
148 elif what == 'b':
149 elif what == 'b':
149 for i in range(t[1], t[2]):
150 for i in range(t[1], t[2]):
150 yield 'b | ' + self.b[i]
151 yield 'b | ' + self.b[i]
151 elif what == 'conflict':
152 elif what == 'conflict':
152 yield '<<<<\n'
153 yield '<<<<\n'
153 for i in range(t[3], t[4]):
154 for i in range(t[3], t[4]):
154 yield 'A | ' + self.a[i]
155 yield 'A | ' + self.a[i]
155 yield '----\n'
156 yield '----\n'
156 for i in range(t[5], t[6]):
157 for i in range(t[5], t[6]):
157 yield 'B | ' + self.b[i]
158 yield 'B | ' + self.b[i]
158 yield '>>>>\n'
159 yield '>>>>\n'
159 else:
160 else:
160 raise ValueError(what)
161 raise ValueError(what)
161
162
162 def merge_groups(self):
163 def merge_groups(self):
163 """Yield sequence of line groups. Each one is a tuple:
164 """Yield sequence of line groups. Each one is a tuple:
164
165
165 'unchanged', lines
166 'unchanged', lines
166 Lines unchanged from base
167 Lines unchanged from base
167
168
168 'a', lines
169 'a', lines
169 Lines taken from a
170 Lines taken from a
170
171
171 'same', lines
172 'same', lines
172 Lines taken from a (and equal to b)
173 Lines taken from a (and equal to b)
173
174
174 'b', lines
175 'b', lines
175 Lines taken from b
176 Lines taken from b
176
177
177 'conflict', base_lines, a_lines, b_lines
178 'conflict', base_lines, a_lines, b_lines
178 Lines from base were changed to either a or b and conflict.
179 Lines from base were changed to either a or b and conflict.
179 """
180 """
180 for t in self.merge_regions():
181 for t in self.merge_regions():
181 what = t[0]
182 what = t[0]
182 if what == 'unchanged':
183 if what == 'unchanged':
183 yield what, self.base[t[1]:t[2]]
184 yield what, self.base[t[1]:t[2]]
184 elif what == 'a' or what == 'same':
185 elif what == 'a' or what == 'same':
185 yield what, self.a[t[1]:t[2]]
186 yield what, self.a[t[1]:t[2]]
186 elif what == 'b':
187 elif what == 'b':
187 yield what, self.b[t[1]:t[2]]
188 yield what, self.b[t[1]:t[2]]
188 elif what == 'conflict':
189 elif what == 'conflict':
189 yield (what,
190 yield (what,
190 self.base[t[1]:t[2]],
191 self.base[t[1]:t[2]],
191 self.a[t[3]:t[4]],
192 self.a[t[3]:t[4]],
192 self.b[t[5]:t[6]])
193 self.b[t[5]:t[6]])
193 else:
194 else:
194 raise ValueError(what)
195 raise ValueError(what)
195
196
196 def merge_regions(self):
197 def merge_regions(self):
197 """Return sequences of matching and conflicting regions.
198 """Return sequences of matching and conflicting regions.
198
199
199 This returns tuples, where the first value says what kind we
200 This returns tuples, where the first value says what kind we
200 have:
201 have:
201
202
202 'unchanged', start, end
203 'unchanged', start, end
203 Take a region of base[start:end]
204 Take a region of base[start:end]
204
205
205 'same', astart, aend
206 'same', astart, aend
206 b and a are different from base but give the same result
207 b and a are different from base but give the same result
207
208
208 'a', start, end
209 'a', start, end
209 Non-clashing insertion from a[start:end]
210 Non-clashing insertion from a[start:end]
210
211
211 Method is as follows:
212 Method is as follows:
212
213
213 The two sequences align only on regions which match the base
214 The two sequences align only on regions which match the base
214 and both descendents. These are found by doing a two-way diff
215 and both descendents. These are found by doing a two-way diff
215 of each one against the base, and then finding the
216 of each one against the base, and then finding the
216 intersections between those regions. These "sync regions"
217 intersections between those regions. These "sync regions"
217 are by definition unchanged in both and easily dealt with.
218 are by definition unchanged in both and easily dealt with.
218
219
219 The regions in between can be in any of three cases:
220 The regions in between can be in any of three cases:
220 conflicted, or changed on only one side.
221 conflicted, or changed on only one side.
221 """
222 """
222
223
223 # section a[0:ia] has been disposed of, etc
224 # section a[0:ia] has been disposed of, etc
224 iz = ia = ib = 0
225 iz = ia = ib = 0
225
226
226 for zmatch, zend, amatch, aend, bmatch, bend in self.find_sync_regions():
227 for zmatch, zend, amatch, aend, bmatch, bend in self.find_sync_regions():
227 #print 'match base [%d:%d]' % (zmatch, zend)
228 #print 'match base [%d:%d]' % (zmatch, zend)
228
229
229 matchlen = zend - zmatch
230 matchlen = zend - zmatch
230 assert matchlen >= 0
231 assert matchlen >= 0
231 assert matchlen == (aend - amatch)
232 assert matchlen == (aend - amatch)
232 assert matchlen == (bend - bmatch)
233 assert matchlen == (bend - bmatch)
233
234
234 len_a = amatch - ia
235 len_a = amatch - ia
235 len_b = bmatch - ib
236 len_b = bmatch - ib
236 len_base = zmatch - iz
237 len_base = zmatch - iz
237 assert len_a >= 0
238 assert len_a >= 0
238 assert len_b >= 0
239 assert len_b >= 0
239 assert len_base >= 0
240 assert len_base >= 0
240
241
241 #print 'unmatched a=%d, b=%d' % (len_a, len_b)
242 #print 'unmatched a=%d, b=%d' % (len_a, len_b)
242
243
243 if len_a or len_b:
244 if len_a or len_b:
244 # try to avoid actually slicing the lists
245 # try to avoid actually slicing the lists
245 equal_a = compare_range(self.a, ia, amatch,
246 equal_a = compare_range(self.a, ia, amatch,
246 self.base, iz, zmatch)
247 self.base, iz, zmatch)
247 equal_b = compare_range(self.b, ib, bmatch,
248 equal_b = compare_range(self.b, ib, bmatch,
248 self.base, iz, zmatch)
249 self.base, iz, zmatch)
249 same = compare_range(self.a, ia, amatch,
250 same = compare_range(self.a, ia, amatch,
250 self.b, ib, bmatch)
251 self.b, ib, bmatch)
251
252
252 if same:
253 if same:
253 yield 'same', ia, amatch
254 yield 'same', ia, amatch
254 elif equal_a and not equal_b:
255 elif equal_a and not equal_b:
255 yield 'b', ib, bmatch
256 yield 'b', ib, bmatch
256 elif equal_b and not equal_a:
257 elif equal_b and not equal_a:
257 yield 'a', ia, amatch
258 yield 'a', ia, amatch
258 elif not equal_a and not equal_b:
259 elif not equal_a and not equal_b:
259 yield 'conflict', iz, zmatch, ia, amatch, ib, bmatch
260 yield 'conflict', iz, zmatch, ia, amatch, ib, bmatch
260 else:
261 else:
261 raise AssertionError("can't handle a=b=base but unmatched")
262 raise AssertionError("can't handle a=b=base but unmatched")
262
263
263 ia = amatch
264 ia = amatch
264 ib = bmatch
265 ib = bmatch
265 iz = zmatch
266 iz = zmatch
266
267
267 # if the same part of the base was deleted on both sides
268 # if the same part of the base was deleted on both sides
268 # that's OK, we can just skip it.
269 # that's OK, we can just skip it.
269
270
270
271
271 if matchlen > 0:
272 if matchlen > 0:
272 assert ia == amatch
273 assert ia == amatch
273 assert ib == bmatch
274 assert ib == bmatch
274 assert iz == zmatch
275 assert iz == zmatch
275
276
276 yield 'unchanged', zmatch, zend
277 yield 'unchanged', zmatch, zend
277 iz = zend
278 iz = zend
278 ia = aend
279 ia = aend
279 ib = bend
280 ib = bend
280
281
281 def reprocess_merge_regions(self, merge_regions):
282 def reprocess_merge_regions(self, merge_regions):
282 """Where there are conflict regions, remove the agreed lines.
283 """Where there are conflict regions, remove the agreed lines.
283
284
284 Lines where both A and B have made the same changes are
285 Lines where both A and B have made the same changes are
285 eliminated.
286 eliminated.
286 """
287 """
287 for region in merge_regions:
288 for region in merge_regions:
288 if region[0] != "conflict":
289 if region[0] != "conflict":
289 yield region
290 yield region
290 continue
291 continue
291 type, iz, zmatch, ia, amatch, ib, bmatch = region
292 type, iz, zmatch, ia, amatch, ib, bmatch = region
292 a_region = self.a[ia:amatch]
293 a_region = self.a[ia:amatch]
293 b_region = self.b[ib:bmatch]
294 b_region = self.b[ib:bmatch]
294 matches = mdiff.get_matching_blocks(''.join(a_region),
295 matches = mdiff.get_matching_blocks(''.join(a_region),
295 ''.join(b_region))
296 ''.join(b_region))
296 next_a = ia
297 next_a = ia
297 next_b = ib
298 next_b = ib
298 for region_ia, region_ib, region_len in matches[:-1]:
299 for region_ia, region_ib, region_len in matches[:-1]:
299 region_ia += ia
300 region_ia += ia
300 region_ib += ib
301 region_ib += ib
301 reg = self.mismatch_region(next_a, region_ia, next_b,
302 reg = self.mismatch_region(next_a, region_ia, next_b,
302 region_ib)
303 region_ib)
303 if reg is not None:
304 if reg is not None:
304 yield reg
305 yield reg
305 yield 'same', region_ia, region_len+region_ia
306 yield 'same', region_ia, region_len+region_ia
306 next_a = region_ia + region_len
307 next_a = region_ia + region_len
307 next_b = region_ib + region_len
308 next_b = region_ib + region_len
308 reg = self.mismatch_region(next_a, amatch, next_b, bmatch)
309 reg = self.mismatch_region(next_a, amatch, next_b, bmatch)
309 if reg is not None:
310 if reg is not None:
310 yield reg
311 yield reg
311
312
312 def mismatch_region(next_a, region_ia, next_b, region_ib):
313 def mismatch_region(next_a, region_ia, next_b, region_ib):
313 if next_a < region_ia or next_b < region_ib:
314 if next_a < region_ia or next_b < region_ib:
314 return 'conflict', None, None, next_a, region_ia, next_b, region_ib
315 return 'conflict', None, None, next_a, region_ia, next_b, region_ib
315 mismatch_region = staticmethod(mismatch_region)
316 mismatch_region = staticmethod(mismatch_region)
316
317
317 def find_sync_regions(self):
318 def find_sync_regions(self):
318 """Return a list of sync regions, where both descendents match the base.
319 """Return a list of sync regions, where both descendents match the base.
319
320
320 Generates a list of (base1, base2, a1, a2, b1, b2). There is
321 Generates a list of (base1, base2, a1, a2, b1, b2). There is
321 always a zero-length sync region at the end of all the files.
322 always a zero-length sync region at the end of all the files.
322 """
323 """
323
324
324 ia = ib = 0
325 ia = ib = 0
325 amatches = mdiff.get_matching_blocks(self.basetext, self.atext)
326 amatches = mdiff.get_matching_blocks(self.basetext, self.atext)
326 bmatches = mdiff.get_matching_blocks(self.basetext, self.btext)
327 bmatches = mdiff.get_matching_blocks(self.basetext, self.btext)
327 len_a = len(amatches)
328 len_a = len(amatches)
328 len_b = len(bmatches)
329 len_b = len(bmatches)
329
330
330 sl = []
331 sl = []
331
332
332 while ia < len_a and ib < len_b:
333 while ia < len_a and ib < len_b:
333 abase, amatch, alen = amatches[ia]
334 abase, amatch, alen = amatches[ia]
334 bbase, bmatch, blen = bmatches[ib]
335 bbase, bmatch, blen = bmatches[ib]
335
336
336 # there is an unconflicted block at i; how long does it
337 # there is an unconflicted block at i; how long does it
337 # extend? until whichever one ends earlier.
338 # extend? until whichever one ends earlier.
338 i = intersect((abase, abase+alen), (bbase, bbase+blen))
339 i = intersect((abase, abase+alen), (bbase, bbase+blen))
339 if i:
340 if i:
340 intbase = i[0]
341 intbase = i[0]
341 intend = i[1]
342 intend = i[1]
342 intlen = intend - intbase
343 intlen = intend - intbase
343
344
344 # found a match of base[i[0], i[1]]; this may be less than
345 # found a match of base[i[0], i[1]]; this may be less than
345 # the region that matches in either one
346 # the region that matches in either one
346 assert intlen <= alen
347 assert intlen <= alen
347 assert intlen <= blen
348 assert intlen <= blen
348 assert abase <= intbase
349 assert abase <= intbase
349 assert bbase <= intbase
350 assert bbase <= intbase
350
351
351 asub = amatch + (intbase - abase)
352 asub = amatch + (intbase - abase)
352 bsub = bmatch + (intbase - bbase)
353 bsub = bmatch + (intbase - bbase)
353 aend = asub + intlen
354 aend = asub + intlen
354 bend = bsub + intlen
355 bend = bsub + intlen
355
356
356 assert self.base[intbase:intend] == self.a[asub:aend], \
357 assert self.base[intbase:intend] == self.a[asub:aend], \
357 (self.base[intbase:intend], self.a[asub:aend])
358 (self.base[intbase:intend], self.a[asub:aend])
358
359
359 assert self.base[intbase:intend] == self.b[bsub:bend]
360 assert self.base[intbase:intend] == self.b[bsub:bend]
360
361
361 sl.append((intbase, intend,
362 sl.append((intbase, intend,
362 asub, aend,
363 asub, aend,
363 bsub, bend))
364 bsub, bend))
364
365
365 # advance whichever one ends first in the base text
366 # advance whichever one ends first in the base text
366 if (abase + alen) < (bbase + blen):
367 if (abase + alen) < (bbase + blen):
367 ia += 1
368 ia += 1
368 else:
369 else:
369 ib += 1
370 ib += 1
370
371
371 intbase = len(self.base)
372 intbase = len(self.base)
372 abase = len(self.a)
373 abase = len(self.a)
373 bbase = len(self.b)
374 bbase = len(self.b)
374 sl.append((intbase, intbase, abase, abase, bbase, bbase))
375 sl.append((intbase, intbase, abase, abase, bbase, bbase))
375
376
376 return sl
377 return sl
377
378
378 def find_unconflicted(self):
379 def find_unconflicted(self):
379 """Return a list of ranges in base that are not conflicted."""
380 """Return a list of ranges in base that are not conflicted."""
380 am = mdiff.get_matching_blocks(self.basetext, self.atext)
381 am = mdiff.get_matching_blocks(self.basetext, self.atext)
381 bm = mdiff.get_matching_blocks(self.basetext, self.btext)
382 bm = mdiff.get_matching_blocks(self.basetext, self.btext)
382
383
383 unc = []
384 unc = []
384
385
385 while am and bm:
386 while am and bm:
386 # there is an unconflicted block at i; how long does it
387 # there is an unconflicted block at i; how long does it
387 # extend? until whichever one ends earlier.
388 # extend? until whichever one ends earlier.
388 a1 = am[0][0]
389 a1 = am[0][0]
389 a2 = a1 + am[0][2]
390 a2 = a1 + am[0][2]
390 b1 = bm[0][0]
391 b1 = bm[0][0]
391 b2 = b1 + bm[0][2]
392 b2 = b1 + bm[0][2]
392 i = intersect((a1, a2), (b1, b2))
393 i = intersect((a1, a2), (b1, b2))
393 if i:
394 if i:
394 unc.append(i)
395 unc.append(i)
395
396
396 if a2 < b2:
397 if a2 < b2:
397 del am[0]
398 del am[0]
398 else:
399 else:
399 del bm[0]
400 del bm[0]
400
401
401 return unc
402 return unc
402
403
403 def simplemerge(ui, local, base, other, **opts):
404 def simplemerge(ui, local, base, other, **opts):
404 def readfile(filename):
405 def readfile(filename):
405 f = open(filename, "rb")
406 f = open(filename, "rb")
406 text = f.read()
407 text = f.read()
407 f.close()
408 f.close()
408 if util.binary(text):
409 if util.binary(text):
409 msg = _("%s looks like a binary file.") % filename
410 msg = _("%s looks like a binary file.") % filename
410 if not opts.get('text'):
411 if not opts.get('text'):
411 raise util.Abort(msg)
412 raise util.Abort(msg)
412 elif not opts.get('quiet'):
413 elif not opts.get('quiet'):
413 ui.warn(_('warning: %s\n') % msg)
414 ui.warn(_('warning: %s\n') % msg)
414 return text
415 return text
415
416
416 name_a = local
417 name_a = local
417 name_b = other
418 name_b = other
418 labels = opts.get('label', [])
419 labels = opts.get('label', [])
419 if labels:
420 if labels:
420 name_a = labels.pop(0)
421 name_a = labels.pop(0)
421 if labels:
422 if labels:
422 name_b = labels.pop(0)
423 name_b = labels.pop(0)
423 if labels:
424 if labels:
424 raise util.Abort(_("can only specify two labels."))
425 raise util.Abort(_("can only specify two labels."))
425
426
426 localtext = readfile(local)
427 localtext = readfile(local)
427 basetext = readfile(base)
428 basetext = readfile(base)
428 othertext = readfile(other)
429 othertext = readfile(other)
429
430
430 local = os.path.realpath(local)
431 local = os.path.realpath(local)
431 if not opts.get('print'):
432 if not opts.get('print'):
432 opener = util.opener(os.path.dirname(local))
433 opener = util.opener(os.path.dirname(local))
433 out = opener(os.path.basename(local), "w", atomictemp=True)
434 out = opener(os.path.basename(local), "w", atomictemp=True)
434 else:
435 else:
435 out = sys.stdout
436 out = sys.stdout
436
437
437 reprocess = not opts.get('no_minimal')
438 reprocess = not opts.get('no_minimal')
438
439
439 m3 = Merge3Text(basetext, localtext, othertext)
440 m3 = Merge3Text(basetext, localtext, othertext)
440 for line in m3.merge_lines(name_a=name_a, name_b=name_b,
441 for line in m3.merge_lines(name_a=name_a, name_b=name_b,
441 reprocess=reprocess):
442 reprocess=reprocess):
442 out.write(line)
443 out.write(line)
443
444
444 if not opts.get('print'):
445 if not opts.get('print'):
445 out.rename()
446 out.rename()
446
447
447 if m3.conflicts:
448 if m3.conflicts:
448 if not opts.get('quiet'):
449 if not opts.get('quiet'):
449 ui.warn(_("warning: conflicts during merge.\n"))
450 ui.warn(_("warning: conflicts during merge.\n"))
450 return 1
451 return 1
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now