##// END OF EJS Templates
Add revlog.LookupError exception, and use it instead of RevlogError....
Brendan Cully -
r3930:01d98d68 default
parent child Browse files
Show More
@@ -1,255 +1,255 b''
1 """
1 """
2 bundlerepo.py - repository class for viewing uncompressed bundles
2 bundlerepo.py - repository class for viewing uncompressed bundles
3
3
4 This provides a read-only repository interface to bundles as if
4 This provides a read-only repository interface to bundles as if
5 they were part of the actual repository.
5 they were part of the actual repository.
6
6
7 Copyright 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
7 Copyright 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
8
8
9 This software may be used and distributed according to the terms
9 This software may be used and distributed according to the terms
10 of the GNU General Public License, incorporated herein by reference.
10 of the GNU General Public License, incorporated herein by reference.
11 """
11 """
12
12
13 from node import *
13 from node import *
14 from i18n import _
14 from i18n import _
15 import changegroup, util, os, struct, bz2, tempfile
15 import changegroup, util, os, struct, bz2, tempfile
16
16
17 import localrepo, changelog, manifest, filelog, revlog
17 import localrepo, changelog, manifest, filelog, revlog
18
18
19 class bundlerevlog(revlog.revlog):
19 class bundlerevlog(revlog.revlog):
20 def __init__(self, opener, indexfile, datafile, bundlefile,
20 def __init__(self, opener, indexfile, datafile, bundlefile,
21 linkmapper=None):
21 linkmapper=None):
22 # How it works:
22 # How it works:
23 # to retrieve a revision, we need to know the offset of
23 # to retrieve a revision, we need to know the offset of
24 # the revision in the bundlefile (an opened file).
24 # the revision in the bundlefile (an opened file).
25 #
25 #
26 # We store this offset in the index (start), to differentiate a
26 # We store this offset in the index (start), to differentiate a
27 # rev in the bundle and from a rev in the revlog, we check
27 # rev in the bundle and from a rev in the revlog, we check
28 # len(index[r]). If the tuple is bigger than 7, it is a bundle
28 # len(index[r]). If the tuple is bigger than 7, it is a bundle
29 # (it is bigger since we store the node to which the delta is)
29 # (it is bigger since we store the node to which the delta is)
30 #
30 #
31 revlog.revlog.__init__(self, opener, indexfile, datafile)
31 revlog.revlog.__init__(self, opener, indexfile, datafile)
32 self.bundlefile = bundlefile
32 self.bundlefile = bundlefile
33 self.basemap = {}
33 self.basemap = {}
34 def chunkpositer():
34 def chunkpositer():
35 for chunk in changegroup.chunkiter(bundlefile):
35 for chunk in changegroup.chunkiter(bundlefile):
36 pos = bundlefile.tell()
36 pos = bundlefile.tell()
37 yield chunk, pos - len(chunk)
37 yield chunk, pos - len(chunk)
38 n = self.count()
38 n = self.count()
39 prev = None
39 prev = None
40 for chunk, start in chunkpositer():
40 for chunk, start in chunkpositer():
41 size = len(chunk)
41 size = len(chunk)
42 if size < 80:
42 if size < 80:
43 raise util.Abort("invalid changegroup")
43 raise util.Abort("invalid changegroup")
44 start += 80
44 start += 80
45 size -= 80
45 size -= 80
46 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
46 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
47 if node in self.nodemap:
47 if node in self.nodemap:
48 prev = node
48 prev = node
49 continue
49 continue
50 for p in (p1, p2):
50 for p in (p1, p2):
51 if not p in self.nodemap:
51 if not p in self.nodemap:
52 raise revlog.RevlogError(_("unknown parent %s") % short(p1))
52 raise revlog.LookupError(_("unknown parent %s") % short(p1))
53 if linkmapper is None:
53 if linkmapper is None:
54 link = n
54 link = n
55 else:
55 else:
56 link = linkmapper(cs)
56 link = linkmapper(cs)
57
57
58 if not prev:
58 if not prev:
59 prev = p1
59 prev = p1
60 # start, size, base is not used, link, p1, p2, delta ref
60 # start, size, base is not used, link, p1, p2, delta ref
61 if self.version == revlog.REVLOGV0:
61 if self.version == revlog.REVLOGV0:
62 e = (start, size, None, link, p1, p2, node)
62 e = (start, size, None, link, p1, p2, node)
63 else:
63 else:
64 e = (self.offset_type(start, 0), size, -1, None, link,
64 e = (self.offset_type(start, 0), size, -1, None, link,
65 self.rev(p1), self.rev(p2), node)
65 self.rev(p1), self.rev(p2), node)
66 self.basemap[n] = prev
66 self.basemap[n] = prev
67 self.index.append(e)
67 self.index.append(e)
68 self.nodemap[node] = n
68 self.nodemap[node] = n
69 prev = node
69 prev = node
70 n += 1
70 n += 1
71
71
72 def bundle(self, rev):
72 def bundle(self, rev):
73 """is rev from the bundle"""
73 """is rev from the bundle"""
74 if rev < 0:
74 if rev < 0:
75 return False
75 return False
76 return rev in self.basemap
76 return rev in self.basemap
77 def bundlebase(self, rev): return self.basemap[rev]
77 def bundlebase(self, rev): return self.basemap[rev]
78 def chunk(self, rev, df=None, cachelen=4096):
78 def chunk(self, rev, df=None, cachelen=4096):
79 # Warning: in case of bundle, the diff is against bundlebase,
79 # Warning: in case of bundle, the diff is against bundlebase,
80 # not against rev - 1
80 # not against rev - 1
81 # XXX: could use some caching
81 # XXX: could use some caching
82 if not self.bundle(rev):
82 if not self.bundle(rev):
83 return revlog.revlog.chunk(self, rev, df, cachelen)
83 return revlog.revlog.chunk(self, rev, df, cachelen)
84 self.bundlefile.seek(self.start(rev))
84 self.bundlefile.seek(self.start(rev))
85 return self.bundlefile.read(self.length(rev))
85 return self.bundlefile.read(self.length(rev))
86
86
87 def revdiff(self, rev1, rev2):
87 def revdiff(self, rev1, rev2):
88 """return or calculate a delta between two revisions"""
88 """return or calculate a delta between two revisions"""
89 if self.bundle(rev1) and self.bundle(rev2):
89 if self.bundle(rev1) and self.bundle(rev2):
90 # hot path for bundle
90 # hot path for bundle
91 revb = self.rev(self.bundlebase(rev2))
91 revb = self.rev(self.bundlebase(rev2))
92 if revb == rev1:
92 if revb == rev1:
93 return self.chunk(rev2)
93 return self.chunk(rev2)
94 elif not self.bundle(rev1) and not self.bundle(rev2):
94 elif not self.bundle(rev1) and not self.bundle(rev2):
95 return revlog.revlog.chunk(self, rev1, rev2)
95 return revlog.revlog.chunk(self, rev1, rev2)
96
96
97 return self.diff(self.revision(self.node(rev1)),
97 return self.diff(self.revision(self.node(rev1)),
98 self.revision(self.node(rev2)))
98 self.revision(self.node(rev2)))
99
99
100 def revision(self, node):
100 def revision(self, node):
101 """return an uncompressed revision of a given"""
101 """return an uncompressed revision of a given"""
102 if node == nullid: return ""
102 if node == nullid: return ""
103
103
104 text = None
104 text = None
105 chain = []
105 chain = []
106 iter_node = node
106 iter_node = node
107 rev = self.rev(iter_node)
107 rev = self.rev(iter_node)
108 # reconstruct the revision if it is from a changegroup
108 # reconstruct the revision if it is from a changegroup
109 while self.bundle(rev):
109 while self.bundle(rev):
110 if self.cache and self.cache[0] == iter_node:
110 if self.cache and self.cache[0] == iter_node:
111 text = self.cache[2]
111 text = self.cache[2]
112 break
112 break
113 chain.append(rev)
113 chain.append(rev)
114 iter_node = self.bundlebase(rev)
114 iter_node = self.bundlebase(rev)
115 rev = self.rev(iter_node)
115 rev = self.rev(iter_node)
116 if text is None:
116 if text is None:
117 text = revlog.revlog.revision(self, iter_node)
117 text = revlog.revlog.revision(self, iter_node)
118
118
119 while chain:
119 while chain:
120 delta = self.chunk(chain.pop())
120 delta = self.chunk(chain.pop())
121 text = self.patches(text, [delta])
121 text = self.patches(text, [delta])
122
122
123 p1, p2 = self.parents(node)
123 p1, p2 = self.parents(node)
124 if node != revlog.hash(text, p1, p2):
124 if node != revlog.hash(text, p1, p2):
125 raise revlog.RevlogError(_("integrity check failed on %s:%d")
125 raise revlog.RevlogError(_("integrity check failed on %s:%d")
126 % (self.datafile, self.rev(node)))
126 % (self.datafile, self.rev(node)))
127
127
128 self.cache = (node, self.rev(node), text)
128 self.cache = (node, self.rev(node), text)
129 return text
129 return text
130
130
131 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
131 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
132 raise NotImplementedError
132 raise NotImplementedError
133 def addgroup(self, revs, linkmapper, transaction, unique=0):
133 def addgroup(self, revs, linkmapper, transaction, unique=0):
134 raise NotImplementedError
134 raise NotImplementedError
135 def strip(self, rev, minlink):
135 def strip(self, rev, minlink):
136 raise NotImplementedError
136 raise NotImplementedError
137 def checksize(self):
137 def checksize(self):
138 raise NotImplementedError
138 raise NotImplementedError
139
139
140 class bundlechangelog(bundlerevlog, changelog.changelog):
140 class bundlechangelog(bundlerevlog, changelog.changelog):
141 def __init__(self, opener, bundlefile):
141 def __init__(self, opener, bundlefile):
142 changelog.changelog.__init__(self, opener)
142 changelog.changelog.__init__(self, opener)
143 bundlerevlog.__init__(self, opener, self.indexfile, self.datafile,
143 bundlerevlog.__init__(self, opener, self.indexfile, self.datafile,
144 bundlefile)
144 bundlefile)
145
145
146 class bundlemanifest(bundlerevlog, manifest.manifest):
146 class bundlemanifest(bundlerevlog, manifest.manifest):
147 def __init__(self, opener, bundlefile, linkmapper):
147 def __init__(self, opener, bundlefile, linkmapper):
148 manifest.manifest.__init__(self, opener)
148 manifest.manifest.__init__(self, opener)
149 bundlerevlog.__init__(self, opener, self.indexfile, self.datafile,
149 bundlerevlog.__init__(self, opener, self.indexfile, self.datafile,
150 bundlefile, linkmapper)
150 bundlefile, linkmapper)
151
151
152 class bundlefilelog(bundlerevlog, filelog.filelog):
152 class bundlefilelog(bundlerevlog, filelog.filelog):
153 def __init__(self, opener, path, bundlefile, linkmapper):
153 def __init__(self, opener, path, bundlefile, linkmapper):
154 filelog.filelog.__init__(self, opener, path)
154 filelog.filelog.__init__(self, opener, path)
155 bundlerevlog.__init__(self, opener, self.indexfile, self.datafile,
155 bundlerevlog.__init__(self, opener, self.indexfile, self.datafile,
156 bundlefile, linkmapper)
156 bundlefile, linkmapper)
157
157
158 class bundlerepository(localrepo.localrepository):
158 class bundlerepository(localrepo.localrepository):
159 def __init__(self, ui, path, bundlename):
159 def __init__(self, ui, path, bundlename):
160 localrepo.localrepository.__init__(self, ui, path)
160 localrepo.localrepository.__init__(self, ui, path)
161
161
162 self._url = 'bundle:' + bundlename
162 self._url = 'bundle:' + bundlename
163 if path: self._url += '+' + path
163 if path: self._url += '+' + path
164
164
165 self.tempfile = None
165 self.tempfile = None
166 self.bundlefile = open(bundlename, "rb")
166 self.bundlefile = open(bundlename, "rb")
167 header = self.bundlefile.read(6)
167 header = self.bundlefile.read(6)
168 if not header.startswith("HG"):
168 if not header.startswith("HG"):
169 raise util.Abort(_("%s: not a Mercurial bundle file") % bundlename)
169 raise util.Abort(_("%s: not a Mercurial bundle file") % bundlename)
170 elif not header.startswith("HG10"):
170 elif not header.startswith("HG10"):
171 raise util.Abort(_("%s: unknown bundle version") % bundlename)
171 raise util.Abort(_("%s: unknown bundle version") % bundlename)
172 elif header == "HG10BZ":
172 elif header == "HG10BZ":
173 fdtemp, temp = tempfile.mkstemp(prefix="hg-bundle-",
173 fdtemp, temp = tempfile.mkstemp(prefix="hg-bundle-",
174 suffix=".hg10un", dir=self.path)
174 suffix=".hg10un", dir=self.path)
175 self.tempfile = temp
175 self.tempfile = temp
176 fptemp = os.fdopen(fdtemp, 'wb')
176 fptemp = os.fdopen(fdtemp, 'wb')
177 def generator(f):
177 def generator(f):
178 zd = bz2.BZ2Decompressor()
178 zd = bz2.BZ2Decompressor()
179 zd.decompress("BZ")
179 zd.decompress("BZ")
180 for chunk in f:
180 for chunk in f:
181 yield zd.decompress(chunk)
181 yield zd.decompress(chunk)
182 gen = generator(util.filechunkiter(self.bundlefile, 4096))
182 gen = generator(util.filechunkiter(self.bundlefile, 4096))
183
183
184 try:
184 try:
185 fptemp.write("HG10UN")
185 fptemp.write("HG10UN")
186 for chunk in gen:
186 for chunk in gen:
187 fptemp.write(chunk)
187 fptemp.write(chunk)
188 finally:
188 finally:
189 fptemp.close()
189 fptemp.close()
190 self.bundlefile.close()
190 self.bundlefile.close()
191
191
192 self.bundlefile = open(self.tempfile, "rb")
192 self.bundlefile = open(self.tempfile, "rb")
193 # seek right after the header
193 # seek right after the header
194 self.bundlefile.seek(6)
194 self.bundlefile.seek(6)
195 elif header == "HG10UN":
195 elif header == "HG10UN":
196 # nothing to do
196 # nothing to do
197 pass
197 pass
198 else:
198 else:
199 raise util.Abort(_("%s: unknown bundle compression type")
199 raise util.Abort(_("%s: unknown bundle compression type")
200 % bundlename)
200 % bundlename)
201 self.changelog = bundlechangelog(self.sopener, self.bundlefile)
201 self.changelog = bundlechangelog(self.sopener, self.bundlefile)
202 self.manifest = bundlemanifest(self.sopener, self.bundlefile,
202 self.manifest = bundlemanifest(self.sopener, self.bundlefile,
203 self.changelog.rev)
203 self.changelog.rev)
204 # dict with the mapping 'filename' -> position in the bundle
204 # dict with the mapping 'filename' -> position in the bundle
205 self.bundlefilespos = {}
205 self.bundlefilespos = {}
206 while 1:
206 while 1:
207 f = changegroup.getchunk(self.bundlefile)
207 f = changegroup.getchunk(self.bundlefile)
208 if not f:
208 if not f:
209 break
209 break
210 self.bundlefilespos[f] = self.bundlefile.tell()
210 self.bundlefilespos[f] = self.bundlefile.tell()
211 for c in changegroup.chunkiter(self.bundlefile):
211 for c in changegroup.chunkiter(self.bundlefile):
212 pass
212 pass
213
213
214 def url(self):
214 def url(self):
215 return self._url
215 return self._url
216
216
217 def dev(self):
217 def dev(self):
218 return -1
218 return -1
219
219
220 def file(self, f):
220 def file(self, f):
221 if f[0] == '/':
221 if f[0] == '/':
222 f = f[1:]
222 f = f[1:]
223 if f in self.bundlefilespos:
223 if f in self.bundlefilespos:
224 self.bundlefile.seek(self.bundlefilespos[f])
224 self.bundlefile.seek(self.bundlefilespos[f])
225 return bundlefilelog(self.sopener, f, self.bundlefile,
225 return bundlefilelog(self.sopener, f, self.bundlefile,
226 self.changelog.rev)
226 self.changelog.rev)
227 else:
227 else:
228 return filelog.filelog(self.sopener, f)
228 return filelog.filelog(self.sopener, f)
229
229
230 def close(self):
230 def close(self):
231 """Close assigned bundle file immediately."""
231 """Close assigned bundle file immediately."""
232 self.bundlefile.close()
232 self.bundlefile.close()
233
233
234 def __del__(self):
234 def __del__(self):
235 bundlefile = getattr(self, 'bundlefile', None)
235 bundlefile = getattr(self, 'bundlefile', None)
236 if bundlefile and not bundlefile.closed:
236 if bundlefile and not bundlefile.closed:
237 bundlefile.close()
237 bundlefile.close()
238 tempfile = getattr(self, 'tempfile', None)
238 tempfile = getattr(self, 'tempfile', None)
239 if tempfile is not None:
239 if tempfile is not None:
240 os.unlink(tempfile)
240 os.unlink(tempfile)
241
241
242 def instance(ui, path, create):
242 def instance(ui, path, create):
243 if create:
243 if create:
244 raise util.Abort(_('cannot create new bundle repository'))
244 raise util.Abort(_('cannot create new bundle repository'))
245 path = util.drop_scheme('file', path)
245 path = util.drop_scheme('file', path)
246 if path.startswith('bundle:'):
246 if path.startswith('bundle:'):
247 path = util.drop_scheme('bundle', path)
247 path = util.drop_scheme('bundle', path)
248 s = path.split("+", 1)
248 s = path.split("+", 1)
249 if len(s) == 1:
249 if len(s) == 1:
250 repopath, bundlename = "", s[0]
250 repopath, bundlename = "", s[0]
251 else:
251 else:
252 repopath, bundlename = s
252 repopath, bundlename = s
253 else:
253 else:
254 repopath, bundlename = '', path
254 repopath, bundlename = '', path
255 return bundlerepository(ui, repopath, bundlename)
255 return bundlerepository(ui, repopath, bundlename)
@@ -1,3284 +1,3285 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import demandimport; demandimport.enable()
8 import demandimport; demandimport.enable()
9 from node import *
9 from node import *
10 from i18n import _
10 from i18n import _
11 import bisect, os, re, sys, signal, imp, urllib, pdb, shlex, stat
11 import bisect, os, re, sys, signal, imp, urllib, pdb, shlex, stat
12 import fancyopts, ui, hg, util, lock, revlog, bundlerepo
12 import fancyopts, ui, hg, util, lock, revlog, bundlerepo
13 import difflib, patch, time, help, mdiff, tempfile
13 import difflib, patch, time, help, mdiff, tempfile
14 import traceback, errno, version, atexit
14 import traceback, errno, version, atexit
15 import archival, changegroup, cmdutil, hgweb.server, sshserver
15 import archival, changegroup, cmdutil, hgweb.server, sshserver
16
16
17 class UnknownCommand(Exception):
17 class UnknownCommand(Exception):
18 """Exception raised if command is not in the command table."""
18 """Exception raised if command is not in the command table."""
19 class AmbiguousCommand(Exception):
19 class AmbiguousCommand(Exception):
20 """Exception raised if command shortcut matches more than one command."""
20 """Exception raised if command shortcut matches more than one command."""
21
21
22 def bail_if_changed(repo):
22 def bail_if_changed(repo):
23 modified, added, removed, deleted = repo.status()[:4]
23 modified, added, removed, deleted = repo.status()[:4]
24 if modified or added or removed or deleted:
24 if modified or added or removed or deleted:
25 raise util.Abort(_("outstanding uncommitted changes"))
25 raise util.Abort(_("outstanding uncommitted changes"))
26
26
27 def logmessage(opts):
27 def logmessage(opts):
28 """ get the log message according to -m and -l option """
28 """ get the log message according to -m and -l option """
29 message = opts['message']
29 message = opts['message']
30 logfile = opts['logfile']
30 logfile = opts['logfile']
31
31
32 if message and logfile:
32 if message and logfile:
33 raise util.Abort(_('options --message and --logfile are mutually '
33 raise util.Abort(_('options --message and --logfile are mutually '
34 'exclusive'))
34 'exclusive'))
35 if not message and logfile:
35 if not message and logfile:
36 try:
36 try:
37 if logfile == '-':
37 if logfile == '-':
38 message = sys.stdin.read()
38 message = sys.stdin.read()
39 else:
39 else:
40 message = open(logfile).read()
40 message = open(logfile).read()
41 except IOError, inst:
41 except IOError, inst:
42 raise util.Abort(_("can't read commit message '%s': %s") %
42 raise util.Abort(_("can't read commit message '%s': %s") %
43 (logfile, inst.strerror))
43 (logfile, inst.strerror))
44 return message
44 return message
45
45
46 def setremoteconfig(ui, opts):
46 def setremoteconfig(ui, opts):
47 "copy remote options to ui tree"
47 "copy remote options to ui tree"
48 if opts.get('ssh'):
48 if opts.get('ssh'):
49 ui.setconfig("ui", "ssh", opts['ssh'])
49 ui.setconfig("ui", "ssh", opts['ssh'])
50 if opts.get('remotecmd'):
50 if opts.get('remotecmd'):
51 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
51 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
52
52
53 # Commands start here, listed alphabetically
53 # Commands start here, listed alphabetically
54
54
55 def add(ui, repo, *pats, **opts):
55 def add(ui, repo, *pats, **opts):
56 """add the specified files on the next commit
56 """add the specified files on the next commit
57
57
58 Schedule files to be version controlled and added to the repository.
58 Schedule files to be version controlled and added to the repository.
59
59
60 The files will be added to the repository at the next commit. To
60 The files will be added to the repository at the next commit. To
61 undo an add before that, see hg revert.
61 undo an add before that, see hg revert.
62
62
63 If no names are given, add all files in the repository.
63 If no names are given, add all files in the repository.
64 """
64 """
65
65
66 names = []
66 names = []
67 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
67 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
68 if exact:
68 if exact:
69 if ui.verbose:
69 if ui.verbose:
70 ui.status(_('adding %s\n') % rel)
70 ui.status(_('adding %s\n') % rel)
71 names.append(abs)
71 names.append(abs)
72 elif repo.dirstate.state(abs) == '?':
72 elif repo.dirstate.state(abs) == '?':
73 ui.status(_('adding %s\n') % rel)
73 ui.status(_('adding %s\n') % rel)
74 names.append(abs)
74 names.append(abs)
75 if not opts.get('dry_run'):
75 if not opts.get('dry_run'):
76 repo.add(names)
76 repo.add(names)
77
77
78 def addremove(ui, repo, *pats, **opts):
78 def addremove(ui, repo, *pats, **opts):
79 """add all new files, delete all missing files
79 """add all new files, delete all missing files
80
80
81 Add all new files and remove all missing files from the repository.
81 Add all new files and remove all missing files from the repository.
82
82
83 New files are ignored if they match any of the patterns in .hgignore. As
83 New files are ignored if they match any of the patterns in .hgignore. As
84 with add, these changes take effect at the next commit.
84 with add, these changes take effect at the next commit.
85
85
86 Use the -s option to detect renamed files. With a parameter > 0,
86 Use the -s option to detect renamed files. With a parameter > 0,
87 this compares every removed file with every added file and records
87 this compares every removed file with every added file and records
88 those similar enough as renames. This option takes a percentage
88 those similar enough as renames. This option takes a percentage
89 between 0 (disabled) and 100 (files must be identical) as its
89 between 0 (disabled) and 100 (files must be identical) as its
90 parameter. Detecting renamed files this way can be expensive.
90 parameter. Detecting renamed files this way can be expensive.
91 """
91 """
92 sim = float(opts.get('similarity') or 0)
92 sim = float(opts.get('similarity') or 0)
93 if sim < 0 or sim > 100:
93 if sim < 0 or sim > 100:
94 raise util.Abort(_('similarity must be between 0 and 100'))
94 raise util.Abort(_('similarity must be between 0 and 100'))
95 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
95 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
96
96
97 def annotate(ui, repo, *pats, **opts):
97 def annotate(ui, repo, *pats, **opts):
98 """show changeset information per file line
98 """show changeset information per file line
99
99
100 List changes in files, showing the revision id responsible for each line
100 List changes in files, showing the revision id responsible for each line
101
101
102 This command is useful to discover who did a change or when a change took
102 This command is useful to discover who did a change or when a change took
103 place.
103 place.
104
104
105 Without the -a option, annotate will avoid processing files it
105 Without the -a option, annotate will avoid processing files it
106 detects as binary. With -a, annotate will generate an annotation
106 detects as binary. With -a, annotate will generate an annotation
107 anyway, probably with undesirable results.
107 anyway, probably with undesirable results.
108 """
108 """
109 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
109 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
110
110
111 if not pats:
111 if not pats:
112 raise util.Abort(_('at least one file name or pattern required'))
112 raise util.Abort(_('at least one file name or pattern required'))
113
113
114 opmap = [['user', lambda x: ui.shortuser(x.user())],
114 opmap = [['user', lambda x: ui.shortuser(x.user())],
115 ['number', lambda x: str(x.rev())],
115 ['number', lambda x: str(x.rev())],
116 ['changeset', lambda x: short(x.node())],
116 ['changeset', lambda x: short(x.node())],
117 ['date', getdate], ['follow', lambda x: x.path()]]
117 ['date', getdate], ['follow', lambda x: x.path()]]
118 if (not opts['user'] and not opts['changeset'] and not opts['date']
118 if (not opts['user'] and not opts['changeset'] and not opts['date']
119 and not opts['follow']):
119 and not opts['follow']):
120 opts['number'] = 1
120 opts['number'] = 1
121
121
122 ctx = repo.changectx(opts['rev'])
122 ctx = repo.changectx(opts['rev'])
123
123
124 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
124 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
125 node=ctx.node()):
125 node=ctx.node()):
126 fctx = ctx.filectx(abs)
126 fctx = ctx.filectx(abs)
127 if not opts['text'] and util.binary(fctx.data()):
127 if not opts['text'] and util.binary(fctx.data()):
128 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
128 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
129 continue
129 continue
130
130
131 lines = fctx.annotate(follow=opts.get('follow'))
131 lines = fctx.annotate(follow=opts.get('follow'))
132 pieces = []
132 pieces = []
133
133
134 for o, f in opmap:
134 for o, f in opmap:
135 if opts[o]:
135 if opts[o]:
136 l = [f(n) for n, dummy in lines]
136 l = [f(n) for n, dummy in lines]
137 if l:
137 if l:
138 m = max(map(len, l))
138 m = max(map(len, l))
139 pieces.append(["%*s" % (m, x) for x in l])
139 pieces.append(["%*s" % (m, x) for x in l])
140
140
141 if pieces:
141 if pieces:
142 for p, l in zip(zip(*pieces), lines):
142 for p, l in zip(zip(*pieces), lines):
143 ui.write("%s: %s" % (" ".join(p), l[1]))
143 ui.write("%s: %s" % (" ".join(p), l[1]))
144
144
145 def archive(ui, repo, dest, **opts):
145 def archive(ui, repo, dest, **opts):
146 '''create unversioned archive of a repository revision
146 '''create unversioned archive of a repository revision
147
147
148 By default, the revision used is the parent of the working
148 By default, the revision used is the parent of the working
149 directory; use "-r" to specify a different revision.
149 directory; use "-r" to specify a different revision.
150
150
151 To specify the type of archive to create, use "-t". Valid
151 To specify the type of archive to create, use "-t". Valid
152 types are:
152 types are:
153
153
154 "files" (default): a directory full of files
154 "files" (default): a directory full of files
155 "tar": tar archive, uncompressed
155 "tar": tar archive, uncompressed
156 "tbz2": tar archive, compressed using bzip2
156 "tbz2": tar archive, compressed using bzip2
157 "tgz": tar archive, compressed using gzip
157 "tgz": tar archive, compressed using gzip
158 "uzip": zip archive, uncompressed
158 "uzip": zip archive, uncompressed
159 "zip": zip archive, compressed using deflate
159 "zip": zip archive, compressed using deflate
160
160
161 The exact name of the destination archive or directory is given
161 The exact name of the destination archive or directory is given
162 using a format string; see "hg help export" for details.
162 using a format string; see "hg help export" for details.
163
163
164 Each member added to an archive file has a directory prefix
164 Each member added to an archive file has a directory prefix
165 prepended. Use "-p" to specify a format string for the prefix.
165 prepended. Use "-p" to specify a format string for the prefix.
166 The default is the basename of the archive, with suffixes removed.
166 The default is the basename of the archive, with suffixes removed.
167 '''
167 '''
168
168
169 node = repo.changectx(opts['rev']).node()
169 node = repo.changectx(opts['rev']).node()
170 dest = cmdutil.make_filename(repo, dest, node)
170 dest = cmdutil.make_filename(repo, dest, node)
171 if os.path.realpath(dest) == repo.root:
171 if os.path.realpath(dest) == repo.root:
172 raise util.Abort(_('repository root cannot be destination'))
172 raise util.Abort(_('repository root cannot be destination'))
173 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
173 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
174 kind = opts.get('type') or 'files'
174 kind = opts.get('type') or 'files'
175 prefix = opts['prefix']
175 prefix = opts['prefix']
176 if dest == '-':
176 if dest == '-':
177 if kind == 'files':
177 if kind == 'files':
178 raise util.Abort(_('cannot archive plain files to stdout'))
178 raise util.Abort(_('cannot archive plain files to stdout'))
179 dest = sys.stdout
179 dest = sys.stdout
180 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
180 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
181 prefix = cmdutil.make_filename(repo, prefix, node)
181 prefix = cmdutil.make_filename(repo, prefix, node)
182 archival.archive(repo, dest, node, kind, not opts['no_decode'],
182 archival.archive(repo, dest, node, kind, not opts['no_decode'],
183 matchfn, prefix)
183 matchfn, prefix)
184
184
185 def backout(ui, repo, rev, **opts):
185 def backout(ui, repo, rev, **opts):
186 '''reverse effect of earlier changeset
186 '''reverse effect of earlier changeset
187
187
188 Commit the backed out changes as a new changeset. The new
188 Commit the backed out changes as a new changeset. The new
189 changeset is a child of the backed out changeset.
189 changeset is a child of the backed out changeset.
190
190
191 If you back out a changeset other than the tip, a new head is
191 If you back out a changeset other than the tip, a new head is
192 created. This head is the parent of the working directory. If
192 created. This head is the parent of the working directory. If
193 you back out an old changeset, your working directory will appear
193 you back out an old changeset, your working directory will appear
194 old after the backout. You should merge the backout changeset
194 old after the backout. You should merge the backout changeset
195 with another head.
195 with another head.
196
196
197 The --merge option remembers the parent of the working directory
197 The --merge option remembers the parent of the working directory
198 before starting the backout, then merges the new head with that
198 before starting the backout, then merges the new head with that
199 changeset afterwards. This saves you from doing the merge by
199 changeset afterwards. This saves you from doing the merge by
200 hand. The result of this merge is not committed, as for a normal
200 hand. The result of this merge is not committed, as for a normal
201 merge.'''
201 merge.'''
202
202
203 bail_if_changed(repo)
203 bail_if_changed(repo)
204 op1, op2 = repo.dirstate.parents()
204 op1, op2 = repo.dirstate.parents()
205 if op2 != nullid:
205 if op2 != nullid:
206 raise util.Abort(_('outstanding uncommitted merge'))
206 raise util.Abort(_('outstanding uncommitted merge'))
207 node = repo.lookup(rev)
207 node = repo.lookup(rev)
208 p1, p2 = repo.changelog.parents(node)
208 p1, p2 = repo.changelog.parents(node)
209 if p1 == nullid:
209 if p1 == nullid:
210 raise util.Abort(_('cannot back out a change with no parents'))
210 raise util.Abort(_('cannot back out a change with no parents'))
211 if p2 != nullid:
211 if p2 != nullid:
212 if not opts['parent']:
212 if not opts['parent']:
213 raise util.Abort(_('cannot back out a merge changeset without '
213 raise util.Abort(_('cannot back out a merge changeset without '
214 '--parent'))
214 '--parent'))
215 p = repo.lookup(opts['parent'])
215 p = repo.lookup(opts['parent'])
216 if p not in (p1, p2):
216 if p not in (p1, p2):
217 raise util.Abort(_('%s is not a parent of %s') %
217 raise util.Abort(_('%s is not a parent of %s') %
218 (short(p), short(node)))
218 (short(p), short(node)))
219 parent = p
219 parent = p
220 else:
220 else:
221 if opts['parent']:
221 if opts['parent']:
222 raise util.Abort(_('cannot use --parent on non-merge changeset'))
222 raise util.Abort(_('cannot use --parent on non-merge changeset'))
223 parent = p1
223 parent = p1
224 hg.clean(repo, node, show_stats=False)
224 hg.clean(repo, node, show_stats=False)
225 revert_opts = opts.copy()
225 revert_opts = opts.copy()
226 revert_opts['date'] = None
226 revert_opts['date'] = None
227 revert_opts['all'] = True
227 revert_opts['all'] = True
228 revert_opts['rev'] = hex(parent)
228 revert_opts['rev'] = hex(parent)
229 revert(ui, repo, **revert_opts)
229 revert(ui, repo, **revert_opts)
230 commit_opts = opts.copy()
230 commit_opts = opts.copy()
231 commit_opts['addremove'] = False
231 commit_opts['addremove'] = False
232 if not commit_opts['message'] and not commit_opts['logfile']:
232 if not commit_opts['message'] and not commit_opts['logfile']:
233 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
233 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
234 commit_opts['force_editor'] = True
234 commit_opts['force_editor'] = True
235 commit(ui, repo, **commit_opts)
235 commit(ui, repo, **commit_opts)
236 def nice(node):
236 def nice(node):
237 return '%d:%s' % (repo.changelog.rev(node), short(node))
237 return '%d:%s' % (repo.changelog.rev(node), short(node))
238 ui.status(_('changeset %s backs out changeset %s\n') %
238 ui.status(_('changeset %s backs out changeset %s\n') %
239 (nice(repo.changelog.tip()), nice(node)))
239 (nice(repo.changelog.tip()), nice(node)))
240 if op1 != node:
240 if op1 != node:
241 if opts['merge']:
241 if opts['merge']:
242 ui.status(_('merging with changeset %s\n') % nice(op1))
242 ui.status(_('merging with changeset %s\n') % nice(op1))
243 hg.merge(repo, hex(op1))
243 hg.merge(repo, hex(op1))
244 else:
244 else:
245 ui.status(_('the backout changeset is a new head - '
245 ui.status(_('the backout changeset is a new head - '
246 'do not forget to merge\n'))
246 'do not forget to merge\n'))
247 ui.status(_('(use "backout --merge" '
247 ui.status(_('(use "backout --merge" '
248 'if you want to auto-merge)\n'))
248 'if you want to auto-merge)\n'))
249
249
250 def branch(ui, repo, label=None):
250 def branch(ui, repo, label=None):
251 """set or show the current branch name
251 """set or show the current branch name
252
252
253 With <name>, set the current branch name. Otherwise, show the
253 With <name>, set the current branch name. Otherwise, show the
254 current branch name.
254 current branch name.
255 """
255 """
256
256
257 if label is not None:
257 if label is not None:
258 repo.opener("branch", "w").write(util.fromlocal(label) + '\n')
258 repo.opener("branch", "w").write(util.fromlocal(label) + '\n')
259 else:
259 else:
260 b = util.tolocal(repo.workingctx().branch())
260 b = util.tolocal(repo.workingctx().branch())
261 if b:
261 if b:
262 ui.write("%s\n" % b)
262 ui.write("%s\n" % b)
263
263
264 def branches(ui, repo):
264 def branches(ui, repo):
265 """list repository named branches
265 """list repository named branches
266
266
267 List the repository's named branches.
267 List the repository's named branches.
268 """
268 """
269 b = repo.branchtags()
269 b = repo.branchtags()
270 l = [(-repo.changelog.rev(n), n, t) for t, n in b.items()]
270 l = [(-repo.changelog.rev(n), n, t) for t, n in b.items()]
271 l.sort()
271 l.sort()
272 for r, n, t in l:
272 for r, n, t in l:
273 hexfunc = ui.debugflag and hex or short
273 hexfunc = ui.debugflag and hex or short
274 if ui.quiet:
274 if ui.quiet:
275 ui.write("%s\n" % t)
275 ui.write("%s\n" % t)
276 else:
276 else:
277 t = util.localsub(t, 30)
277 t = util.localsub(t, 30)
278 t += " " * (30 - util.locallen(t))
278 t += " " * (30 - util.locallen(t))
279 ui.write("%s %s:%s\n" % (t, -r, hexfunc(n)))
279 ui.write("%s %s:%s\n" % (t, -r, hexfunc(n)))
280
280
281 def bundle(ui, repo, fname, dest=None, **opts):
281 def bundle(ui, repo, fname, dest=None, **opts):
282 """create a changegroup file
282 """create a changegroup file
283
283
284 Generate a compressed changegroup file collecting changesets not
284 Generate a compressed changegroup file collecting changesets not
285 found in the other repository.
285 found in the other repository.
286
286
287 If no destination repository is specified the destination is assumed
287 If no destination repository is specified the destination is assumed
288 to have all the nodes specified by one or more --base parameters.
288 to have all the nodes specified by one or more --base parameters.
289
289
290 The bundle file can then be transferred using conventional means and
290 The bundle file can then be transferred using conventional means and
291 applied to another repository with the unbundle or pull command.
291 applied to another repository with the unbundle or pull command.
292 This is useful when direct push and pull are not available or when
292 This is useful when direct push and pull are not available or when
293 exporting an entire repository is undesirable.
293 exporting an entire repository is undesirable.
294
294
295 Applying bundles preserves all changeset contents including
295 Applying bundles preserves all changeset contents including
296 permissions, copy/rename information, and revision history.
296 permissions, copy/rename information, and revision history.
297 """
297 """
298 revs = opts.get('rev') or None
298 revs = opts.get('rev') or None
299 if revs:
299 if revs:
300 revs = [repo.lookup(rev) for rev in revs]
300 revs = [repo.lookup(rev) for rev in revs]
301 base = opts.get('base')
301 base = opts.get('base')
302 if base:
302 if base:
303 if dest:
303 if dest:
304 raise util.Abort(_("--base is incompatible with specifiying "
304 raise util.Abort(_("--base is incompatible with specifiying "
305 "a destination"))
305 "a destination"))
306 base = [repo.lookup(rev) for rev in base]
306 base = [repo.lookup(rev) for rev in base]
307 # create the right base
307 # create the right base
308 # XXX: nodesbetween / changegroup* should be "fixed" instead
308 # XXX: nodesbetween / changegroup* should be "fixed" instead
309 o = []
309 o = []
310 has = {nullid: None}
310 has = {nullid: None}
311 for n in base:
311 for n in base:
312 has.update(repo.changelog.reachable(n))
312 has.update(repo.changelog.reachable(n))
313 if revs:
313 if revs:
314 visit = list(revs)
314 visit = list(revs)
315 else:
315 else:
316 visit = repo.changelog.heads()
316 visit = repo.changelog.heads()
317 seen = {}
317 seen = {}
318 while visit:
318 while visit:
319 n = visit.pop(0)
319 n = visit.pop(0)
320 parents = [p for p in repo.changelog.parents(n) if p not in has]
320 parents = [p for p in repo.changelog.parents(n) if p not in has]
321 if len(parents) == 0:
321 if len(parents) == 0:
322 o.insert(0, n)
322 o.insert(0, n)
323 else:
323 else:
324 for p in parents:
324 for p in parents:
325 if p not in seen:
325 if p not in seen:
326 seen[p] = 1
326 seen[p] = 1
327 visit.append(p)
327 visit.append(p)
328 else:
328 else:
329 setremoteconfig(ui, opts)
329 setremoteconfig(ui, opts)
330 dest = ui.expandpath(dest or 'default-push', dest or 'default')
330 dest = ui.expandpath(dest or 'default-push', dest or 'default')
331 other = hg.repository(ui, dest)
331 other = hg.repository(ui, dest)
332 o = repo.findoutgoing(other, force=opts['force'])
332 o = repo.findoutgoing(other, force=opts['force'])
333
333
334 if revs:
334 if revs:
335 cg = repo.changegroupsubset(o, revs, 'bundle')
335 cg = repo.changegroupsubset(o, revs, 'bundle')
336 else:
336 else:
337 cg = repo.changegroup(o, 'bundle')
337 cg = repo.changegroup(o, 'bundle')
338 changegroup.writebundle(cg, fname, "HG10BZ")
338 changegroup.writebundle(cg, fname, "HG10BZ")
339
339
340 def cat(ui, repo, file1, *pats, **opts):
340 def cat(ui, repo, file1, *pats, **opts):
341 """output the current or given revision of files
341 """output the current or given revision of files
342
342
343 Print the specified files as they were at the given revision.
343 Print the specified files as they were at the given revision.
344 If no revision is given, the parent of the working directory is used,
344 If no revision is given, the parent of the working directory is used,
345 or tip if no revision is checked out.
345 or tip if no revision is checked out.
346
346
347 Output may be to a file, in which case the name of the file is
347 Output may be to a file, in which case the name of the file is
348 given using a format string. The formatting rules are the same as
348 given using a format string. The formatting rules are the same as
349 for the export command, with the following additions:
349 for the export command, with the following additions:
350
350
351 %s basename of file being printed
351 %s basename of file being printed
352 %d dirname of file being printed, or '.' if in repo root
352 %d dirname of file being printed, or '.' if in repo root
353 %p root-relative path name of file being printed
353 %p root-relative path name of file being printed
354 """
354 """
355 ctx = repo.changectx(opts['rev'])
355 ctx = repo.changectx(opts['rev'])
356 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
356 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
357 ctx.node()):
357 ctx.node()):
358 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
358 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
359 fp.write(ctx.filectx(abs).data())
359 fp.write(ctx.filectx(abs).data())
360
360
361 def clone(ui, source, dest=None, **opts):
361 def clone(ui, source, dest=None, **opts):
362 """make a copy of an existing repository
362 """make a copy of an existing repository
363
363
364 Create a copy of an existing repository in a new directory.
364 Create a copy of an existing repository in a new directory.
365
365
366 If no destination directory name is specified, it defaults to the
366 If no destination directory name is specified, it defaults to the
367 basename of the source.
367 basename of the source.
368
368
369 The location of the source is added to the new repository's
369 The location of the source is added to the new repository's
370 .hg/hgrc file, as the default to be used for future pulls.
370 .hg/hgrc file, as the default to be used for future pulls.
371
371
372 For efficiency, hardlinks are used for cloning whenever the source
372 For efficiency, hardlinks are used for cloning whenever the source
373 and destination are on the same filesystem (note this applies only
373 and destination are on the same filesystem (note this applies only
374 to the repository data, not to the checked out files). Some
374 to the repository data, not to the checked out files). Some
375 filesystems, such as AFS, implement hardlinking incorrectly, but
375 filesystems, such as AFS, implement hardlinking incorrectly, but
376 do not report errors. In these cases, use the --pull option to
376 do not report errors. In these cases, use the --pull option to
377 avoid hardlinking.
377 avoid hardlinking.
378
378
379 You can safely clone repositories and checked out files using full
379 You can safely clone repositories and checked out files using full
380 hardlinks with
380 hardlinks with
381
381
382 $ cp -al REPO REPOCLONE
382 $ cp -al REPO REPOCLONE
383
383
384 which is the fastest way to clone. However, the operation is not
384 which is the fastest way to clone. However, the operation is not
385 atomic (making sure REPO is not modified during the operation is
385 atomic (making sure REPO is not modified during the operation is
386 up to you) and you have to make sure your editor breaks hardlinks
386 up to you) and you have to make sure your editor breaks hardlinks
387 (Emacs and most Linux Kernel tools do so).
387 (Emacs and most Linux Kernel tools do so).
388
388
389 If you use the -r option to clone up to a specific revision, no
389 If you use the -r option to clone up to a specific revision, no
390 subsequent revisions will be present in the cloned repository.
390 subsequent revisions will be present in the cloned repository.
391 This option implies --pull, even on local repositories.
391 This option implies --pull, even on local repositories.
392
392
393 See pull for valid source format details.
393 See pull for valid source format details.
394
394
395 It is possible to specify an ssh:// URL as the destination, but no
395 It is possible to specify an ssh:// URL as the destination, but no
396 .hg/hgrc and working directory will be created on the remote side.
396 .hg/hgrc and working directory will be created on the remote side.
397 Look at the help text for the pull command for important details
397 Look at the help text for the pull command for important details
398 about ssh:// URLs.
398 about ssh:// URLs.
399 """
399 """
400 setremoteconfig(ui, opts)
400 setremoteconfig(ui, opts)
401 hg.clone(ui, ui.expandpath(source), dest,
401 hg.clone(ui, ui.expandpath(source), dest,
402 pull=opts['pull'],
402 pull=opts['pull'],
403 stream=opts['uncompressed'],
403 stream=opts['uncompressed'],
404 rev=opts['rev'],
404 rev=opts['rev'],
405 update=not opts['noupdate'])
405 update=not opts['noupdate'])
406
406
407 def commit(ui, repo, *pats, **opts):
407 def commit(ui, repo, *pats, **opts):
408 """commit the specified files or all outstanding changes
408 """commit the specified files or all outstanding changes
409
409
410 Commit changes to the given files into the repository.
410 Commit changes to the given files into the repository.
411
411
412 If a list of files is omitted, all changes reported by "hg status"
412 If a list of files is omitted, all changes reported by "hg status"
413 will be committed.
413 will be committed.
414
414
415 If no commit message is specified, the editor configured in your hgrc
415 If no commit message is specified, the editor configured in your hgrc
416 or in the EDITOR environment variable is started to enter a message.
416 or in the EDITOR environment variable is started to enter a message.
417 """
417 """
418 message = logmessage(opts)
418 message = logmessage(opts)
419
419
420 if opts['addremove']:
420 if opts['addremove']:
421 cmdutil.addremove(repo, pats, opts)
421 cmdutil.addremove(repo, pats, opts)
422 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
422 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
423 if pats:
423 if pats:
424 status = repo.status(files=fns, match=match)
424 status = repo.status(files=fns, match=match)
425 modified, added, removed, deleted, unknown = status[:5]
425 modified, added, removed, deleted, unknown = status[:5]
426 files = modified + added + removed
426 files = modified + added + removed
427 slist = None
427 slist = None
428 for f in fns:
428 for f in fns:
429 if f not in files:
429 if f not in files:
430 rf = repo.wjoin(f)
430 rf = repo.wjoin(f)
431 if f in unknown:
431 if f in unknown:
432 raise util.Abort(_("file %s not tracked!") % rf)
432 raise util.Abort(_("file %s not tracked!") % rf)
433 try:
433 try:
434 mode = os.lstat(rf)[stat.ST_MODE]
434 mode = os.lstat(rf)[stat.ST_MODE]
435 except OSError:
435 except OSError:
436 raise util.Abort(_("file %s not found!") % rf)
436 raise util.Abort(_("file %s not found!") % rf)
437 if stat.S_ISDIR(mode):
437 if stat.S_ISDIR(mode):
438 name = f + '/'
438 name = f + '/'
439 if slist is None:
439 if slist is None:
440 slist = list(files)
440 slist = list(files)
441 slist.sort()
441 slist.sort()
442 i = bisect.bisect(slist, name)
442 i = bisect.bisect(slist, name)
443 if i >= len(slist) or not slist[i].startswith(name):
443 if i >= len(slist) or not slist[i].startswith(name):
444 raise util.Abort(_("no match under directory %s!")
444 raise util.Abort(_("no match under directory %s!")
445 % rf)
445 % rf)
446 elif not stat.S_ISREG(mode):
446 elif not stat.S_ISREG(mode):
447 raise util.Abort(_("can't commit %s: "
447 raise util.Abort(_("can't commit %s: "
448 "unsupported file type!") % rf)
448 "unsupported file type!") % rf)
449 else:
449 else:
450 files = []
450 files = []
451 try:
451 try:
452 repo.commit(files, message, opts['user'], opts['date'], match,
452 repo.commit(files, message, opts['user'], opts['date'], match,
453 force_editor=opts.get('force_editor'))
453 force_editor=opts.get('force_editor'))
454 except ValueError, inst:
454 except ValueError, inst:
455 raise util.Abort(str(inst))
455 raise util.Abort(str(inst))
456
456
457 def docopy(ui, repo, pats, opts, wlock):
457 def docopy(ui, repo, pats, opts, wlock):
458 # called with the repo lock held
458 # called with the repo lock held
459 #
459 #
460 # hgsep => pathname that uses "/" to separate directories
460 # hgsep => pathname that uses "/" to separate directories
461 # ossep => pathname that uses os.sep to separate directories
461 # ossep => pathname that uses os.sep to separate directories
462 cwd = repo.getcwd()
462 cwd = repo.getcwd()
463 errors = 0
463 errors = 0
464 copied = []
464 copied = []
465 targets = {}
465 targets = {}
466
466
467 # abs: hgsep
467 # abs: hgsep
468 # rel: ossep
468 # rel: ossep
469 # return: hgsep
469 # return: hgsep
470 def okaytocopy(abs, rel, exact):
470 def okaytocopy(abs, rel, exact):
471 reasons = {'?': _('is not managed'),
471 reasons = {'?': _('is not managed'),
472 'a': _('has been marked for add'),
472 'a': _('has been marked for add'),
473 'r': _('has been marked for remove')}
473 'r': _('has been marked for remove')}
474 state = repo.dirstate.state(abs)
474 state = repo.dirstate.state(abs)
475 reason = reasons.get(state)
475 reason = reasons.get(state)
476 if reason:
476 if reason:
477 if state == 'a':
477 if state == 'a':
478 origsrc = repo.dirstate.copied(abs)
478 origsrc = repo.dirstate.copied(abs)
479 if origsrc is not None:
479 if origsrc is not None:
480 return origsrc
480 return origsrc
481 if exact:
481 if exact:
482 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
482 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
483 else:
483 else:
484 return abs
484 return abs
485
485
486 # origsrc: hgsep
486 # origsrc: hgsep
487 # abssrc: hgsep
487 # abssrc: hgsep
488 # relsrc: ossep
488 # relsrc: ossep
489 # target: ossep
489 # target: ossep
490 def copy(origsrc, abssrc, relsrc, target, exact):
490 def copy(origsrc, abssrc, relsrc, target, exact):
491 abstarget = util.canonpath(repo.root, cwd, target)
491 abstarget = util.canonpath(repo.root, cwd, target)
492 reltarget = util.pathto(cwd, abstarget)
492 reltarget = util.pathto(cwd, abstarget)
493 prevsrc = targets.get(abstarget)
493 prevsrc = targets.get(abstarget)
494 if prevsrc is not None:
494 if prevsrc is not None:
495 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
495 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
496 (reltarget, util.localpath(abssrc),
496 (reltarget, util.localpath(abssrc),
497 util.localpath(prevsrc)))
497 util.localpath(prevsrc)))
498 return
498 return
499 if (not opts['after'] and os.path.exists(reltarget) or
499 if (not opts['after'] and os.path.exists(reltarget) or
500 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
500 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
501 if not opts['force']:
501 if not opts['force']:
502 ui.warn(_('%s: not overwriting - file exists\n') %
502 ui.warn(_('%s: not overwriting - file exists\n') %
503 reltarget)
503 reltarget)
504 return
504 return
505 if not opts['after'] and not opts.get('dry_run'):
505 if not opts['after'] and not opts.get('dry_run'):
506 os.unlink(reltarget)
506 os.unlink(reltarget)
507 if opts['after']:
507 if opts['after']:
508 if not os.path.exists(reltarget):
508 if not os.path.exists(reltarget):
509 return
509 return
510 else:
510 else:
511 targetdir = os.path.dirname(reltarget) or '.'
511 targetdir = os.path.dirname(reltarget) or '.'
512 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
512 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
513 os.makedirs(targetdir)
513 os.makedirs(targetdir)
514 try:
514 try:
515 restore = repo.dirstate.state(abstarget) == 'r'
515 restore = repo.dirstate.state(abstarget) == 'r'
516 if restore and not opts.get('dry_run'):
516 if restore and not opts.get('dry_run'):
517 repo.undelete([abstarget], wlock)
517 repo.undelete([abstarget], wlock)
518 try:
518 try:
519 if not opts.get('dry_run'):
519 if not opts.get('dry_run'):
520 util.copyfile(relsrc, reltarget)
520 util.copyfile(relsrc, reltarget)
521 restore = False
521 restore = False
522 finally:
522 finally:
523 if restore:
523 if restore:
524 repo.remove([abstarget], wlock)
524 repo.remove([abstarget], wlock)
525 except IOError, inst:
525 except IOError, inst:
526 if inst.errno == errno.ENOENT:
526 if inst.errno == errno.ENOENT:
527 ui.warn(_('%s: deleted in working copy\n') % relsrc)
527 ui.warn(_('%s: deleted in working copy\n') % relsrc)
528 else:
528 else:
529 ui.warn(_('%s: cannot copy - %s\n') %
529 ui.warn(_('%s: cannot copy - %s\n') %
530 (relsrc, inst.strerror))
530 (relsrc, inst.strerror))
531 errors += 1
531 errors += 1
532 return
532 return
533 if ui.verbose or not exact:
533 if ui.verbose or not exact:
534 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
534 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
535 targets[abstarget] = abssrc
535 targets[abstarget] = abssrc
536 if abstarget != origsrc and not opts.get('dry_run'):
536 if abstarget != origsrc and not opts.get('dry_run'):
537 repo.copy(origsrc, abstarget, wlock)
537 repo.copy(origsrc, abstarget, wlock)
538 copied.append((abssrc, relsrc, exact))
538 copied.append((abssrc, relsrc, exact))
539
539
540 # pat: ossep
540 # pat: ossep
541 # dest ossep
541 # dest ossep
542 # srcs: list of (hgsep, hgsep, ossep, bool)
542 # srcs: list of (hgsep, hgsep, ossep, bool)
543 # return: function that takes hgsep and returns ossep
543 # return: function that takes hgsep and returns ossep
544 def targetpathfn(pat, dest, srcs):
544 def targetpathfn(pat, dest, srcs):
545 if os.path.isdir(pat):
545 if os.path.isdir(pat):
546 abspfx = util.canonpath(repo.root, cwd, pat)
546 abspfx = util.canonpath(repo.root, cwd, pat)
547 abspfx = util.localpath(abspfx)
547 abspfx = util.localpath(abspfx)
548 if destdirexists:
548 if destdirexists:
549 striplen = len(os.path.split(abspfx)[0])
549 striplen = len(os.path.split(abspfx)[0])
550 else:
550 else:
551 striplen = len(abspfx)
551 striplen = len(abspfx)
552 if striplen:
552 if striplen:
553 striplen += len(os.sep)
553 striplen += len(os.sep)
554 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
554 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
555 elif destdirexists:
555 elif destdirexists:
556 res = lambda p: os.path.join(dest,
556 res = lambda p: os.path.join(dest,
557 os.path.basename(util.localpath(p)))
557 os.path.basename(util.localpath(p)))
558 else:
558 else:
559 res = lambda p: dest
559 res = lambda p: dest
560 return res
560 return res
561
561
562 # pat: ossep
562 # pat: ossep
563 # dest ossep
563 # dest ossep
564 # srcs: list of (hgsep, hgsep, ossep, bool)
564 # srcs: list of (hgsep, hgsep, ossep, bool)
565 # return: function that takes hgsep and returns ossep
565 # return: function that takes hgsep and returns ossep
566 def targetpathafterfn(pat, dest, srcs):
566 def targetpathafterfn(pat, dest, srcs):
567 if util.patkind(pat, None)[0]:
567 if util.patkind(pat, None)[0]:
568 # a mercurial pattern
568 # a mercurial pattern
569 res = lambda p: os.path.join(dest,
569 res = lambda p: os.path.join(dest,
570 os.path.basename(util.localpath(p)))
570 os.path.basename(util.localpath(p)))
571 else:
571 else:
572 abspfx = util.canonpath(repo.root, cwd, pat)
572 abspfx = util.canonpath(repo.root, cwd, pat)
573 if len(abspfx) < len(srcs[0][0]):
573 if len(abspfx) < len(srcs[0][0]):
574 # A directory. Either the target path contains the last
574 # A directory. Either the target path contains the last
575 # component of the source path or it does not.
575 # component of the source path or it does not.
576 def evalpath(striplen):
576 def evalpath(striplen):
577 score = 0
577 score = 0
578 for s in srcs:
578 for s in srcs:
579 t = os.path.join(dest, util.localpath(s[0])[striplen:])
579 t = os.path.join(dest, util.localpath(s[0])[striplen:])
580 if os.path.exists(t):
580 if os.path.exists(t):
581 score += 1
581 score += 1
582 return score
582 return score
583
583
584 abspfx = util.localpath(abspfx)
584 abspfx = util.localpath(abspfx)
585 striplen = len(abspfx)
585 striplen = len(abspfx)
586 if striplen:
586 if striplen:
587 striplen += len(os.sep)
587 striplen += len(os.sep)
588 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
588 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
589 score = evalpath(striplen)
589 score = evalpath(striplen)
590 striplen1 = len(os.path.split(abspfx)[0])
590 striplen1 = len(os.path.split(abspfx)[0])
591 if striplen1:
591 if striplen1:
592 striplen1 += len(os.sep)
592 striplen1 += len(os.sep)
593 if evalpath(striplen1) > score:
593 if evalpath(striplen1) > score:
594 striplen = striplen1
594 striplen = striplen1
595 res = lambda p: os.path.join(dest,
595 res = lambda p: os.path.join(dest,
596 util.localpath(p)[striplen:])
596 util.localpath(p)[striplen:])
597 else:
597 else:
598 # a file
598 # a file
599 if destdirexists:
599 if destdirexists:
600 res = lambda p: os.path.join(dest,
600 res = lambda p: os.path.join(dest,
601 os.path.basename(util.localpath(p)))
601 os.path.basename(util.localpath(p)))
602 else:
602 else:
603 res = lambda p: dest
603 res = lambda p: dest
604 return res
604 return res
605
605
606
606
607 pats = list(pats)
607 pats = list(pats)
608 if not pats:
608 if not pats:
609 raise util.Abort(_('no source or destination specified'))
609 raise util.Abort(_('no source or destination specified'))
610 if len(pats) == 1:
610 if len(pats) == 1:
611 raise util.Abort(_('no destination specified'))
611 raise util.Abort(_('no destination specified'))
612 dest = pats.pop()
612 dest = pats.pop()
613 destdirexists = os.path.isdir(dest)
613 destdirexists = os.path.isdir(dest)
614 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
614 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
615 raise util.Abort(_('with multiple sources, destination must be an '
615 raise util.Abort(_('with multiple sources, destination must be an '
616 'existing directory'))
616 'existing directory'))
617 if opts['after']:
617 if opts['after']:
618 tfn = targetpathafterfn
618 tfn = targetpathafterfn
619 else:
619 else:
620 tfn = targetpathfn
620 tfn = targetpathfn
621 copylist = []
621 copylist = []
622 for pat in pats:
622 for pat in pats:
623 srcs = []
623 srcs = []
624 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
624 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
625 origsrc = okaytocopy(abssrc, relsrc, exact)
625 origsrc = okaytocopy(abssrc, relsrc, exact)
626 if origsrc:
626 if origsrc:
627 srcs.append((origsrc, abssrc, relsrc, exact))
627 srcs.append((origsrc, abssrc, relsrc, exact))
628 if not srcs:
628 if not srcs:
629 continue
629 continue
630 copylist.append((tfn(pat, dest, srcs), srcs))
630 copylist.append((tfn(pat, dest, srcs), srcs))
631 if not copylist:
631 if not copylist:
632 raise util.Abort(_('no files to copy'))
632 raise util.Abort(_('no files to copy'))
633
633
634 for targetpath, srcs in copylist:
634 for targetpath, srcs in copylist:
635 for origsrc, abssrc, relsrc, exact in srcs:
635 for origsrc, abssrc, relsrc, exact in srcs:
636 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
636 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
637
637
638 if errors:
638 if errors:
639 ui.warn(_('(consider using --after)\n'))
639 ui.warn(_('(consider using --after)\n'))
640 return errors, copied
640 return errors, copied
641
641
642 def copy(ui, repo, *pats, **opts):
642 def copy(ui, repo, *pats, **opts):
643 """mark files as copied for the next commit
643 """mark files as copied for the next commit
644
644
645 Mark dest as having copies of source files. If dest is a
645 Mark dest as having copies of source files. If dest is a
646 directory, copies are put in that directory. If dest is a file,
646 directory, copies are put in that directory. If dest is a file,
647 there can only be one source.
647 there can only be one source.
648
648
649 By default, this command copies the contents of files as they
649 By default, this command copies the contents of files as they
650 stand in the working directory. If invoked with --after, the
650 stand in the working directory. If invoked with --after, the
651 operation is recorded, but no copying is performed.
651 operation is recorded, but no copying is performed.
652
652
653 This command takes effect in the next commit. To undo a copy
653 This command takes effect in the next commit. To undo a copy
654 before that, see hg revert.
654 before that, see hg revert.
655 """
655 """
656 wlock = repo.wlock(0)
656 wlock = repo.wlock(0)
657 errs, copied = docopy(ui, repo, pats, opts, wlock)
657 errs, copied = docopy(ui, repo, pats, opts, wlock)
658 return errs
658 return errs
659
659
660 def debugancestor(ui, index, rev1, rev2):
660 def debugancestor(ui, index, rev1, rev2):
661 """find the ancestor revision of two revisions in a given index"""
661 """find the ancestor revision of two revisions in a given index"""
662 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
662 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
663 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
663 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
664 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
664 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
665
665
666 def debugcomplete(ui, cmd='', **opts):
666 def debugcomplete(ui, cmd='', **opts):
667 """returns the completion list associated with the given command"""
667 """returns the completion list associated with the given command"""
668
668
669 if opts['options']:
669 if opts['options']:
670 options = []
670 options = []
671 otables = [globalopts]
671 otables = [globalopts]
672 if cmd:
672 if cmd:
673 aliases, entry = findcmd(ui, cmd)
673 aliases, entry = findcmd(ui, cmd)
674 otables.append(entry[1])
674 otables.append(entry[1])
675 for t in otables:
675 for t in otables:
676 for o in t:
676 for o in t:
677 if o[0]:
677 if o[0]:
678 options.append('-%s' % o[0])
678 options.append('-%s' % o[0])
679 options.append('--%s' % o[1])
679 options.append('--%s' % o[1])
680 ui.write("%s\n" % "\n".join(options))
680 ui.write("%s\n" % "\n".join(options))
681 return
681 return
682
682
683 clist = findpossible(ui, cmd).keys()
683 clist = findpossible(ui, cmd).keys()
684 clist.sort()
684 clist.sort()
685 ui.write("%s\n" % "\n".join(clist))
685 ui.write("%s\n" % "\n".join(clist))
686
686
687 def debugrebuildstate(ui, repo, rev=None):
687 def debugrebuildstate(ui, repo, rev=None):
688 """rebuild the dirstate as it would look like for the given revision"""
688 """rebuild the dirstate as it would look like for the given revision"""
689 if not rev:
689 if not rev:
690 rev = repo.changelog.tip()
690 rev = repo.changelog.tip()
691 else:
691 else:
692 rev = repo.lookup(rev)
692 rev = repo.lookup(rev)
693 change = repo.changelog.read(rev)
693 change = repo.changelog.read(rev)
694 n = change[0]
694 n = change[0]
695 files = repo.manifest.read(n)
695 files = repo.manifest.read(n)
696 wlock = repo.wlock()
696 wlock = repo.wlock()
697 repo.dirstate.rebuild(rev, files)
697 repo.dirstate.rebuild(rev, files)
698
698
699 def debugcheckstate(ui, repo):
699 def debugcheckstate(ui, repo):
700 """validate the correctness of the current dirstate"""
700 """validate the correctness of the current dirstate"""
701 parent1, parent2 = repo.dirstate.parents()
701 parent1, parent2 = repo.dirstate.parents()
702 repo.dirstate.read()
702 repo.dirstate.read()
703 dc = repo.dirstate.map
703 dc = repo.dirstate.map
704 keys = dc.keys()
704 keys = dc.keys()
705 keys.sort()
705 keys.sort()
706 m1n = repo.changelog.read(parent1)[0]
706 m1n = repo.changelog.read(parent1)[0]
707 m2n = repo.changelog.read(parent2)[0]
707 m2n = repo.changelog.read(parent2)[0]
708 m1 = repo.manifest.read(m1n)
708 m1 = repo.manifest.read(m1n)
709 m2 = repo.manifest.read(m2n)
709 m2 = repo.manifest.read(m2n)
710 errors = 0
710 errors = 0
711 for f in dc:
711 for f in dc:
712 state = repo.dirstate.state(f)
712 state = repo.dirstate.state(f)
713 if state in "nr" and f not in m1:
713 if state in "nr" and f not in m1:
714 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
714 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
715 errors += 1
715 errors += 1
716 if state in "a" and f in m1:
716 if state in "a" and f in m1:
717 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
717 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
718 errors += 1
718 errors += 1
719 if state in "m" and f not in m1 and f not in m2:
719 if state in "m" and f not in m1 and f not in m2:
720 ui.warn(_("%s in state %s, but not in either manifest\n") %
720 ui.warn(_("%s in state %s, but not in either manifest\n") %
721 (f, state))
721 (f, state))
722 errors += 1
722 errors += 1
723 for f in m1:
723 for f in m1:
724 state = repo.dirstate.state(f)
724 state = repo.dirstate.state(f)
725 if state not in "nrm":
725 if state not in "nrm":
726 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
726 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
727 errors += 1
727 errors += 1
728 if errors:
728 if errors:
729 error = _(".hg/dirstate inconsistent with current parent's manifest")
729 error = _(".hg/dirstate inconsistent with current parent's manifest")
730 raise util.Abort(error)
730 raise util.Abort(error)
731
731
732 def showconfig(ui, repo, *values, **opts):
732 def showconfig(ui, repo, *values, **opts):
733 """show combined config settings from all hgrc files
733 """show combined config settings from all hgrc files
734
734
735 With no args, print names and values of all config items.
735 With no args, print names and values of all config items.
736
736
737 With one arg of the form section.name, print just the value of
737 With one arg of the form section.name, print just the value of
738 that config item.
738 that config item.
739
739
740 With multiple args, print names and values of all config items
740 With multiple args, print names and values of all config items
741 with matching section names."""
741 with matching section names."""
742
742
743 untrusted = bool(opts.get('untrusted'))
743 untrusted = bool(opts.get('untrusted'))
744 if values:
744 if values:
745 if len([v for v in values if '.' in v]) > 1:
745 if len([v for v in values if '.' in v]) > 1:
746 raise util.Abort(_('only one config item permitted'))
746 raise util.Abort(_('only one config item permitted'))
747 for section, name, value in ui.walkconfig(untrusted=untrusted):
747 for section, name, value in ui.walkconfig(untrusted=untrusted):
748 sectname = section + '.' + name
748 sectname = section + '.' + name
749 if values:
749 if values:
750 for v in values:
750 for v in values:
751 if v == section:
751 if v == section:
752 ui.write('%s=%s\n' % (sectname, value))
752 ui.write('%s=%s\n' % (sectname, value))
753 elif v == sectname:
753 elif v == sectname:
754 ui.write(value, '\n')
754 ui.write(value, '\n')
755 else:
755 else:
756 ui.write('%s=%s\n' % (sectname, value))
756 ui.write('%s=%s\n' % (sectname, value))
757
757
758 def debugsetparents(ui, repo, rev1, rev2=None):
758 def debugsetparents(ui, repo, rev1, rev2=None):
759 """manually set the parents of the current working directory
759 """manually set the parents of the current working directory
760
760
761 This is useful for writing repository conversion tools, but should
761 This is useful for writing repository conversion tools, but should
762 be used with care.
762 be used with care.
763 """
763 """
764
764
765 if not rev2:
765 if not rev2:
766 rev2 = hex(nullid)
766 rev2 = hex(nullid)
767
767
768 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
768 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
769
769
770 def debugstate(ui, repo):
770 def debugstate(ui, repo):
771 """show the contents of the current dirstate"""
771 """show the contents of the current dirstate"""
772 repo.dirstate.read()
772 repo.dirstate.read()
773 dc = repo.dirstate.map
773 dc = repo.dirstate.map
774 keys = dc.keys()
774 keys = dc.keys()
775 keys.sort()
775 keys.sort()
776 for file_ in keys:
776 for file_ in keys:
777 ui.write("%c %3o %10d %s %s\n"
777 ui.write("%c %3o %10d %s %s\n"
778 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
778 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
779 time.strftime("%x %X",
779 time.strftime("%x %X",
780 time.localtime(dc[file_][3])), file_))
780 time.localtime(dc[file_][3])), file_))
781 for f in repo.dirstate.copies():
781 for f in repo.dirstate.copies():
782 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
782 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
783
783
784 def debugdata(ui, file_, rev):
784 def debugdata(ui, file_, rev):
785 """dump the contents of an data file revision"""
785 """dump the contents of an data file revision"""
786 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
786 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
787 file_[:-2] + ".i", file_, 0)
787 file_[:-2] + ".i", file_, 0)
788 try:
788 try:
789 ui.write(r.revision(r.lookup(rev)))
789 ui.write(r.revision(r.lookup(rev)))
790 except KeyError:
790 except KeyError:
791 raise util.Abort(_('invalid revision identifier %s') % rev)
791 raise util.Abort(_('invalid revision identifier %s') % rev)
792
792
793 def debugdate(ui, date, range=None, **opts):
793 def debugdate(ui, date, range=None, **opts):
794 """parse and display a date"""
794 """parse and display a date"""
795 if opts["extended"]:
795 if opts["extended"]:
796 d = util.parsedate(date, util.extendeddateformats)
796 d = util.parsedate(date, util.extendeddateformats)
797 else:
797 else:
798 d = util.parsedate(date)
798 d = util.parsedate(date)
799 ui.write("internal: %s %s\n" % d)
799 ui.write("internal: %s %s\n" % d)
800 ui.write("standard: %s\n" % util.datestr(d))
800 ui.write("standard: %s\n" % util.datestr(d))
801 if range:
801 if range:
802 m = util.matchdate(range)
802 m = util.matchdate(range)
803 ui.write("match: %s\n" % m(d[0]))
803 ui.write("match: %s\n" % m(d[0]))
804
804
805 def debugindex(ui, file_):
805 def debugindex(ui, file_):
806 """dump the contents of an index file"""
806 """dump the contents of an index file"""
807 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
807 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
808 ui.write(" rev offset length base linkrev" +
808 ui.write(" rev offset length base linkrev" +
809 " nodeid p1 p2\n")
809 " nodeid p1 p2\n")
810 for i in xrange(r.count()):
810 for i in xrange(r.count()):
811 node = r.node(i)
811 node = r.node(i)
812 pp = r.parents(node)
812 pp = r.parents(node)
813 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
813 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
814 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
814 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
815 short(node), short(pp[0]), short(pp[1])))
815 short(node), short(pp[0]), short(pp[1])))
816
816
817 def debugindexdot(ui, file_):
817 def debugindexdot(ui, file_):
818 """dump an index DAG as a .dot file"""
818 """dump an index DAG as a .dot file"""
819 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
819 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
820 ui.write("digraph G {\n")
820 ui.write("digraph G {\n")
821 for i in xrange(r.count()):
821 for i in xrange(r.count()):
822 node = r.node(i)
822 node = r.node(i)
823 pp = r.parents(node)
823 pp = r.parents(node)
824 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
824 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
825 if pp[1] != nullid:
825 if pp[1] != nullid:
826 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
826 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
827 ui.write("}\n")
827 ui.write("}\n")
828
828
829 def debuginstall(ui):
829 def debuginstall(ui):
830 '''test Mercurial installation'''
830 '''test Mercurial installation'''
831
831
832 def writetemp(contents):
832 def writetemp(contents):
833 (fd, name) = tempfile.mkstemp()
833 (fd, name) = tempfile.mkstemp()
834 f = os.fdopen(fd, "wb")
834 f = os.fdopen(fd, "wb")
835 f.write(contents)
835 f.write(contents)
836 f.close()
836 f.close()
837 return name
837 return name
838
838
839 problems = 0
839 problems = 0
840
840
841 # encoding
841 # encoding
842 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
842 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
843 try:
843 try:
844 util.fromlocal("test")
844 util.fromlocal("test")
845 except util.Abort, inst:
845 except util.Abort, inst:
846 ui.write(" %s\n" % inst)
846 ui.write(" %s\n" % inst)
847 ui.write(_(" (check that your locale is properly set)\n"))
847 ui.write(_(" (check that your locale is properly set)\n"))
848 problems += 1
848 problems += 1
849
849
850 # compiled modules
850 # compiled modules
851 ui.status(_("Checking extensions...\n"))
851 ui.status(_("Checking extensions...\n"))
852 try:
852 try:
853 import bdiff, mpatch, base85
853 import bdiff, mpatch, base85
854 except Exception, inst:
854 except Exception, inst:
855 ui.write(" %s\n" % inst)
855 ui.write(" %s\n" % inst)
856 ui.write(_(" One or more extensions could not be found"))
856 ui.write(_(" One or more extensions could not be found"))
857 ui.write(_(" (check that you compiled the extensions)\n"))
857 ui.write(_(" (check that you compiled the extensions)\n"))
858 problems += 1
858 problems += 1
859
859
860 # templates
860 # templates
861 ui.status(_("Checking templates...\n"))
861 ui.status(_("Checking templates...\n"))
862 try:
862 try:
863 import templater
863 import templater
864 t = templater.templater(templater.templatepath("map-cmdline.default"))
864 t = templater.templater(templater.templatepath("map-cmdline.default"))
865 except Exception, inst:
865 except Exception, inst:
866 ui.write(" %s\n" % inst)
866 ui.write(" %s\n" % inst)
867 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
867 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
868 problems += 1
868 problems += 1
869
869
870 # patch
870 # patch
871 ui.status(_("Checking patch...\n"))
871 ui.status(_("Checking patch...\n"))
872 path = os.environ.get('PATH', '')
872 path = os.environ.get('PATH', '')
873 patcher = util.find_in_path('gpatch', path,
873 patcher = util.find_in_path('gpatch', path,
874 util.find_in_path('patch', path, None))
874 util.find_in_path('patch', path, None))
875 if not patcher:
875 if not patcher:
876 ui.write(_(" Can't find patch or gpatch in PATH\n"))
876 ui.write(_(" Can't find patch or gpatch in PATH\n"))
877 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
877 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
878 problems += 1
878 problems += 1
879 else:
879 else:
880 # actually attempt a patch here
880 # actually attempt a patch here
881 a = "1\n2\n3\n4\n"
881 a = "1\n2\n3\n4\n"
882 b = "1\n2\n3\ninsert\n4\n"
882 b = "1\n2\n3\ninsert\n4\n"
883 d = mdiff.unidiff(a, None, b, None, "a")
883 d = mdiff.unidiff(a, None, b, None, "a")
884 fa = writetemp(a)
884 fa = writetemp(a)
885 fd = writetemp(d)
885 fd = writetemp(d)
886 fp = os.popen('%s %s %s' % (patcher, fa, fd))
886 fp = os.popen('%s %s %s' % (patcher, fa, fd))
887 files = []
887 files = []
888 output = ""
888 output = ""
889 for line in fp:
889 for line in fp:
890 output += line
890 output += line
891 if line.startswith('patching file '):
891 if line.startswith('patching file '):
892 pf = util.parse_patch_output(line.rstrip())
892 pf = util.parse_patch_output(line.rstrip())
893 files.append(pf)
893 files.append(pf)
894 if files != [fa]:
894 if files != [fa]:
895 ui.write(_(" unexpected patch output!"))
895 ui.write(_(" unexpected patch output!"))
896 ui.write(_(" (you may have an incompatible version of patch)\n"))
896 ui.write(_(" (you may have an incompatible version of patch)\n"))
897 ui.write(output)
897 ui.write(output)
898 problems += 1
898 problems += 1
899 a = file(fa).read()
899 a = file(fa).read()
900 if a != b:
900 if a != b:
901 ui.write(_(" patch test failed!"))
901 ui.write(_(" patch test failed!"))
902 ui.write(_(" (you may have an incompatible version of patch)\n"))
902 ui.write(_(" (you may have an incompatible version of patch)\n"))
903 problems += 1
903 problems += 1
904 os.unlink(fa)
904 os.unlink(fa)
905 os.unlink(fd)
905 os.unlink(fd)
906
906
907 # merge helper
907 # merge helper
908 ui.status(_("Checking merge helper...\n"))
908 ui.status(_("Checking merge helper...\n"))
909 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
909 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
910 or "hgmerge")
910 or "hgmerge")
911 cmdpath = util.find_in_path(cmd, path)
911 cmdpath = util.find_in_path(cmd, path)
912 if not cmdpath:
912 if not cmdpath:
913 cmdpath = util.find_in_path(cmd.split()[0], path)
913 cmdpath = util.find_in_path(cmd.split()[0], path)
914 if not cmdpath:
914 if not cmdpath:
915 if cmd == 'hgmerge':
915 if cmd == 'hgmerge':
916 ui.write(_(" No merge helper set and can't find default"
916 ui.write(_(" No merge helper set and can't find default"
917 " hgmerge script in PATH\n"))
917 " hgmerge script in PATH\n"))
918 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
918 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
919 else:
919 else:
920 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
920 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
921 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
921 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
922 problems += 1
922 problems += 1
923 else:
923 else:
924 # actually attempt a patch here
924 # actually attempt a patch here
925 fa = writetemp("1\n2\n3\n4\n")
925 fa = writetemp("1\n2\n3\n4\n")
926 fl = writetemp("1\n2\n3\ninsert\n4\n")
926 fl = writetemp("1\n2\n3\ninsert\n4\n")
927 fr = writetemp("begin\n1\n2\n3\n4\n")
927 fr = writetemp("begin\n1\n2\n3\n4\n")
928 r = os.system('%s %s %s %s' % (cmd, fl, fa, fr))
928 r = os.system('%s %s %s %s' % (cmd, fl, fa, fr))
929 if r:
929 if r:
930 ui.write(_(" got unexpected merge error %d!") % r)
930 ui.write(_(" got unexpected merge error %d!") % r)
931 problems += 1
931 problems += 1
932 m = file(fl).read()
932 m = file(fl).read()
933 if m != "begin\n1\n2\n3\ninsert\n4\n":
933 if m != "begin\n1\n2\n3\ninsert\n4\n":
934 ui.write(_(" got unexpected merge results!") % r)
934 ui.write(_(" got unexpected merge results!") % r)
935 ui.write(_(" (your merge helper may have the"
935 ui.write(_(" (your merge helper may have the"
936 " wrong argument order)\n"))
936 " wrong argument order)\n"))
937 ui.write(m)
937 ui.write(m)
938 os.unlink(fa)
938 os.unlink(fa)
939 os.unlink(fl)
939 os.unlink(fl)
940 os.unlink(fr)
940 os.unlink(fr)
941
941
942 # editor
942 # editor
943 ui.status(_("Checking commit editor...\n"))
943 ui.status(_("Checking commit editor...\n"))
944 editor = (os.environ.get("HGEDITOR") or
944 editor = (os.environ.get("HGEDITOR") or
945 ui.config("ui", "editor") or
945 ui.config("ui", "editor") or
946 os.environ.get("EDITOR", "vi"))
946 os.environ.get("EDITOR", "vi"))
947 cmdpath = util.find_in_path(editor, path)
947 cmdpath = util.find_in_path(editor, path)
948 if not cmdpath:
948 if not cmdpath:
949 cmdpath = util.find_in_path(editor.split()[0], path)
949 cmdpath = util.find_in_path(editor.split()[0], path)
950 if not cmdpath:
950 if not cmdpath:
951 if editor == 'vi':
951 if editor == 'vi':
952 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
952 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
953 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
953 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
954 else:
954 else:
955 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
955 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
956 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
956 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
957 problems += 1
957 problems += 1
958
958
959 # check username
959 # check username
960 ui.status(_("Checking username...\n"))
960 ui.status(_("Checking username...\n"))
961 user = os.environ.get("HGUSER")
961 user = os.environ.get("HGUSER")
962 if user is None:
962 if user is None:
963 user = ui.config("ui", "username")
963 user = ui.config("ui", "username")
964 if user is None:
964 if user is None:
965 user = os.environ.get("EMAIL")
965 user = os.environ.get("EMAIL")
966 if not user:
966 if not user:
967 ui.warn(" ")
967 ui.warn(" ")
968 ui.username()
968 ui.username()
969 ui.write(_(" (specify a username in your .hgrc file)\n"))
969 ui.write(_(" (specify a username in your .hgrc file)\n"))
970
970
971 if not problems:
971 if not problems:
972 ui.status(_("No problems detected\n"))
972 ui.status(_("No problems detected\n"))
973 else:
973 else:
974 ui.write(_("%s problems detected,"
974 ui.write(_("%s problems detected,"
975 " please check your install!\n") % problems)
975 " please check your install!\n") % problems)
976
976
977 return problems
977 return problems
978
978
979 def debugrename(ui, repo, file1, *pats, **opts):
979 def debugrename(ui, repo, file1, *pats, **opts):
980 """dump rename information"""
980 """dump rename information"""
981
981
982 ctx = repo.changectx(opts.get('rev', 'tip'))
982 ctx = repo.changectx(opts.get('rev', 'tip'))
983 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
983 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
984 ctx.node()):
984 ctx.node()):
985 m = ctx.filectx(abs).renamed()
985 m = ctx.filectx(abs).renamed()
986 if m:
986 if m:
987 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
987 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
988 else:
988 else:
989 ui.write(_("%s not renamed\n") % rel)
989 ui.write(_("%s not renamed\n") % rel)
990
990
991 def debugwalk(ui, repo, *pats, **opts):
991 def debugwalk(ui, repo, *pats, **opts):
992 """show how files match on given patterns"""
992 """show how files match on given patterns"""
993 items = list(cmdutil.walk(repo, pats, opts))
993 items = list(cmdutil.walk(repo, pats, opts))
994 if not items:
994 if not items:
995 return
995 return
996 fmt = '%%s %%-%ds %%-%ds %%s' % (
996 fmt = '%%s %%-%ds %%-%ds %%s' % (
997 max([len(abs) for (src, abs, rel, exact) in items]),
997 max([len(abs) for (src, abs, rel, exact) in items]),
998 max([len(rel) for (src, abs, rel, exact) in items]))
998 max([len(rel) for (src, abs, rel, exact) in items]))
999 for src, abs, rel, exact in items:
999 for src, abs, rel, exact in items:
1000 line = fmt % (src, abs, rel, exact and 'exact' or '')
1000 line = fmt % (src, abs, rel, exact and 'exact' or '')
1001 ui.write("%s\n" % line.rstrip())
1001 ui.write("%s\n" % line.rstrip())
1002
1002
1003 def diff(ui, repo, *pats, **opts):
1003 def diff(ui, repo, *pats, **opts):
1004 """diff repository (or selected files)
1004 """diff repository (or selected files)
1005
1005
1006 Show differences between revisions for the specified files.
1006 Show differences between revisions for the specified files.
1007
1007
1008 Differences between files are shown using the unified diff format.
1008 Differences between files are shown using the unified diff format.
1009
1009
1010 NOTE: diff may generate unexpected results for merges, as it will
1010 NOTE: diff may generate unexpected results for merges, as it will
1011 default to comparing against the working directory's first parent
1011 default to comparing against the working directory's first parent
1012 changeset if no revisions are specified.
1012 changeset if no revisions are specified.
1013
1013
1014 When two revision arguments are given, then changes are shown
1014 When two revision arguments are given, then changes are shown
1015 between those revisions. If only one revision is specified then
1015 between those revisions. If only one revision is specified then
1016 that revision is compared to the working directory, and, when no
1016 that revision is compared to the working directory, and, when no
1017 revisions are specified, the working directory files are compared
1017 revisions are specified, the working directory files are compared
1018 to its parent.
1018 to its parent.
1019
1019
1020 Without the -a option, diff will avoid generating diffs of files
1020 Without the -a option, diff will avoid generating diffs of files
1021 it detects as binary. With -a, diff will generate a diff anyway,
1021 it detects as binary. With -a, diff will generate a diff anyway,
1022 probably with undesirable results.
1022 probably with undesirable results.
1023 """
1023 """
1024 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1024 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1025
1025
1026 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1026 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1027
1027
1028 patch.diff(repo, node1, node2, fns, match=matchfn,
1028 patch.diff(repo, node1, node2, fns, match=matchfn,
1029 opts=patch.diffopts(ui, opts))
1029 opts=patch.diffopts(ui, opts))
1030
1030
1031 def export(ui, repo, *changesets, **opts):
1031 def export(ui, repo, *changesets, **opts):
1032 """dump the header and diffs for one or more changesets
1032 """dump the header and diffs for one or more changesets
1033
1033
1034 Print the changeset header and diffs for one or more revisions.
1034 Print the changeset header and diffs for one or more revisions.
1035
1035
1036 The information shown in the changeset header is: author,
1036 The information shown in the changeset header is: author,
1037 changeset hash, parent(s) and commit comment.
1037 changeset hash, parent(s) and commit comment.
1038
1038
1039 NOTE: export may generate unexpected diff output for merge changesets,
1039 NOTE: export may generate unexpected diff output for merge changesets,
1040 as it will compare the merge changeset against its first parent only.
1040 as it will compare the merge changeset against its first parent only.
1041
1041
1042 Output may be to a file, in which case the name of the file is
1042 Output may be to a file, in which case the name of the file is
1043 given using a format string. The formatting rules are as follows:
1043 given using a format string. The formatting rules are as follows:
1044
1044
1045 %% literal "%" character
1045 %% literal "%" character
1046 %H changeset hash (40 bytes of hexadecimal)
1046 %H changeset hash (40 bytes of hexadecimal)
1047 %N number of patches being generated
1047 %N number of patches being generated
1048 %R changeset revision number
1048 %R changeset revision number
1049 %b basename of the exporting repository
1049 %b basename of the exporting repository
1050 %h short-form changeset hash (12 bytes of hexadecimal)
1050 %h short-form changeset hash (12 bytes of hexadecimal)
1051 %n zero-padded sequence number, starting at 1
1051 %n zero-padded sequence number, starting at 1
1052 %r zero-padded changeset revision number
1052 %r zero-padded changeset revision number
1053
1053
1054 Without the -a option, export will avoid generating diffs of files
1054 Without the -a option, export will avoid generating diffs of files
1055 it detects as binary. With -a, export will generate a diff anyway,
1055 it detects as binary. With -a, export will generate a diff anyway,
1056 probably with undesirable results.
1056 probably with undesirable results.
1057
1057
1058 With the --switch-parent option, the diff will be against the second
1058 With the --switch-parent option, the diff will be against the second
1059 parent. It can be useful to review a merge.
1059 parent. It can be useful to review a merge.
1060 """
1060 """
1061 if not changesets:
1061 if not changesets:
1062 raise util.Abort(_("export requires at least one changeset"))
1062 raise util.Abort(_("export requires at least one changeset"))
1063 revs = cmdutil.revrange(repo, changesets)
1063 revs = cmdutil.revrange(repo, changesets)
1064 if len(revs) > 1:
1064 if len(revs) > 1:
1065 ui.note(_('exporting patches:\n'))
1065 ui.note(_('exporting patches:\n'))
1066 else:
1066 else:
1067 ui.note(_('exporting patch:\n'))
1067 ui.note(_('exporting patch:\n'))
1068 patch.export(repo, revs, template=opts['output'],
1068 patch.export(repo, revs, template=opts['output'],
1069 switch_parent=opts['switch_parent'],
1069 switch_parent=opts['switch_parent'],
1070 opts=patch.diffopts(ui, opts))
1070 opts=patch.diffopts(ui, opts))
1071
1071
1072 def grep(ui, repo, pattern, *pats, **opts):
1072 def grep(ui, repo, pattern, *pats, **opts):
1073 """search for a pattern in specified files and revisions
1073 """search for a pattern in specified files and revisions
1074
1074
1075 Search revisions of files for a regular expression.
1075 Search revisions of files for a regular expression.
1076
1076
1077 This command behaves differently than Unix grep. It only accepts
1077 This command behaves differently than Unix grep. It only accepts
1078 Python/Perl regexps. It searches repository history, not the
1078 Python/Perl regexps. It searches repository history, not the
1079 working directory. It always prints the revision number in which
1079 working directory. It always prints the revision number in which
1080 a match appears.
1080 a match appears.
1081
1081
1082 By default, grep only prints output for the first revision of a
1082 By default, grep only prints output for the first revision of a
1083 file in which it finds a match. To get it to print every revision
1083 file in which it finds a match. To get it to print every revision
1084 that contains a change in match status ("-" for a match that
1084 that contains a change in match status ("-" for a match that
1085 becomes a non-match, or "+" for a non-match that becomes a match),
1085 becomes a non-match, or "+" for a non-match that becomes a match),
1086 use the --all flag.
1086 use the --all flag.
1087 """
1087 """
1088 reflags = 0
1088 reflags = 0
1089 if opts['ignore_case']:
1089 if opts['ignore_case']:
1090 reflags |= re.I
1090 reflags |= re.I
1091 regexp = re.compile(pattern, reflags)
1091 regexp = re.compile(pattern, reflags)
1092 sep, eol = ':', '\n'
1092 sep, eol = ':', '\n'
1093 if opts['print0']:
1093 if opts['print0']:
1094 sep = eol = '\0'
1094 sep = eol = '\0'
1095
1095
1096 fcache = {}
1096 fcache = {}
1097 def getfile(fn):
1097 def getfile(fn):
1098 if fn not in fcache:
1098 if fn not in fcache:
1099 fcache[fn] = repo.file(fn)
1099 fcache[fn] = repo.file(fn)
1100 return fcache[fn]
1100 return fcache[fn]
1101
1101
1102 def matchlines(body):
1102 def matchlines(body):
1103 begin = 0
1103 begin = 0
1104 linenum = 0
1104 linenum = 0
1105 while True:
1105 while True:
1106 match = regexp.search(body, begin)
1106 match = regexp.search(body, begin)
1107 if not match:
1107 if not match:
1108 break
1108 break
1109 mstart, mend = match.span()
1109 mstart, mend = match.span()
1110 linenum += body.count('\n', begin, mstart) + 1
1110 linenum += body.count('\n', begin, mstart) + 1
1111 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1111 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1112 lend = body.find('\n', mend)
1112 lend = body.find('\n', mend)
1113 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1113 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1114 begin = lend + 1
1114 begin = lend + 1
1115
1115
1116 class linestate(object):
1116 class linestate(object):
1117 def __init__(self, line, linenum, colstart, colend):
1117 def __init__(self, line, linenum, colstart, colend):
1118 self.line = line
1118 self.line = line
1119 self.linenum = linenum
1119 self.linenum = linenum
1120 self.colstart = colstart
1120 self.colstart = colstart
1121 self.colend = colend
1121 self.colend = colend
1122
1122
1123 def __eq__(self, other):
1123 def __eq__(self, other):
1124 return self.line == other.line
1124 return self.line == other.line
1125
1125
1126 matches = {}
1126 matches = {}
1127 copies = {}
1127 copies = {}
1128 def grepbody(fn, rev, body):
1128 def grepbody(fn, rev, body):
1129 matches[rev].setdefault(fn, [])
1129 matches[rev].setdefault(fn, [])
1130 m = matches[rev][fn]
1130 m = matches[rev][fn]
1131 for lnum, cstart, cend, line in matchlines(body):
1131 for lnum, cstart, cend, line in matchlines(body):
1132 s = linestate(line, lnum, cstart, cend)
1132 s = linestate(line, lnum, cstart, cend)
1133 m.append(s)
1133 m.append(s)
1134
1134
1135 def difflinestates(a, b):
1135 def difflinestates(a, b):
1136 sm = difflib.SequenceMatcher(None, a, b)
1136 sm = difflib.SequenceMatcher(None, a, b)
1137 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1137 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1138 if tag == 'insert':
1138 if tag == 'insert':
1139 for i in xrange(blo, bhi):
1139 for i in xrange(blo, bhi):
1140 yield ('+', b[i])
1140 yield ('+', b[i])
1141 elif tag == 'delete':
1141 elif tag == 'delete':
1142 for i in xrange(alo, ahi):
1142 for i in xrange(alo, ahi):
1143 yield ('-', a[i])
1143 yield ('-', a[i])
1144 elif tag == 'replace':
1144 elif tag == 'replace':
1145 for i in xrange(alo, ahi):
1145 for i in xrange(alo, ahi):
1146 yield ('-', a[i])
1146 yield ('-', a[i])
1147 for i in xrange(blo, bhi):
1147 for i in xrange(blo, bhi):
1148 yield ('+', b[i])
1148 yield ('+', b[i])
1149
1149
1150 prev = {}
1150 prev = {}
1151 def display(fn, rev, states, prevstates):
1151 def display(fn, rev, states, prevstates):
1152 counts = {'-': 0, '+': 0}
1152 counts = {'-': 0, '+': 0}
1153 filerevmatches = {}
1153 filerevmatches = {}
1154 if incrementing or not opts['all']:
1154 if incrementing or not opts['all']:
1155 a, b, r = prevstates, states, rev
1155 a, b, r = prevstates, states, rev
1156 else:
1156 else:
1157 a, b, r = states, prevstates, prev.get(fn, -1)
1157 a, b, r = states, prevstates, prev.get(fn, -1)
1158 for change, l in difflinestates(a, b):
1158 for change, l in difflinestates(a, b):
1159 cols = [fn, str(r)]
1159 cols = [fn, str(r)]
1160 if opts['line_number']:
1160 if opts['line_number']:
1161 cols.append(str(l.linenum))
1161 cols.append(str(l.linenum))
1162 if opts['all']:
1162 if opts['all']:
1163 cols.append(change)
1163 cols.append(change)
1164 if opts['user']:
1164 if opts['user']:
1165 cols.append(ui.shortuser(get(r)[1]))
1165 cols.append(ui.shortuser(get(r)[1]))
1166 if opts['files_with_matches']:
1166 if opts['files_with_matches']:
1167 c = (fn, r)
1167 c = (fn, r)
1168 if c in filerevmatches:
1168 if c in filerevmatches:
1169 continue
1169 continue
1170 filerevmatches[c] = 1
1170 filerevmatches[c] = 1
1171 else:
1171 else:
1172 cols.append(l.line)
1172 cols.append(l.line)
1173 ui.write(sep.join(cols), eol)
1173 ui.write(sep.join(cols), eol)
1174 counts[change] += 1
1174 counts[change] += 1
1175 return counts['+'], counts['-']
1175 return counts['+'], counts['-']
1176
1176
1177 fstate = {}
1177 fstate = {}
1178 skip = {}
1178 skip = {}
1179 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1179 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1180 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1180 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1181 count = 0
1181 count = 0
1182 incrementing = False
1182 incrementing = False
1183 follow = opts.get('follow')
1183 follow = opts.get('follow')
1184 for st, rev, fns in changeiter:
1184 for st, rev, fns in changeiter:
1185 if st == 'window':
1185 if st == 'window':
1186 incrementing = rev
1186 incrementing = rev
1187 matches.clear()
1187 matches.clear()
1188 elif st == 'add':
1188 elif st == 'add':
1189 mf = repo.changectx(rev).manifest()
1189 mf = repo.changectx(rev).manifest()
1190 matches[rev] = {}
1190 matches[rev] = {}
1191 for fn in fns:
1191 for fn in fns:
1192 if fn in skip:
1192 if fn in skip:
1193 continue
1193 continue
1194 fstate.setdefault(fn, {})
1194 fstate.setdefault(fn, {})
1195 try:
1195 try:
1196 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1196 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1197 if follow:
1197 if follow:
1198 copied = getfile(fn).renamed(mf[fn])
1198 copied = getfile(fn).renamed(mf[fn])
1199 if copied:
1199 if copied:
1200 copies.setdefault(rev, {})[fn] = copied[0]
1200 copies.setdefault(rev, {})[fn] = copied[0]
1201 except KeyError:
1201 except KeyError:
1202 pass
1202 pass
1203 elif st == 'iter':
1203 elif st == 'iter':
1204 states = matches[rev].items()
1204 states = matches[rev].items()
1205 states.sort()
1205 states.sort()
1206 for fn, m in states:
1206 for fn, m in states:
1207 copy = copies.get(rev, {}).get(fn)
1207 copy = copies.get(rev, {}).get(fn)
1208 if fn in skip:
1208 if fn in skip:
1209 if copy:
1209 if copy:
1210 skip[copy] = True
1210 skip[copy] = True
1211 continue
1211 continue
1212 if incrementing or not opts['all'] or fstate[fn]:
1212 if incrementing or not opts['all'] or fstate[fn]:
1213 pos, neg = display(fn, rev, m, fstate[fn])
1213 pos, neg = display(fn, rev, m, fstate[fn])
1214 count += pos + neg
1214 count += pos + neg
1215 if pos and not opts['all']:
1215 if pos and not opts['all']:
1216 skip[fn] = True
1216 skip[fn] = True
1217 if copy:
1217 if copy:
1218 skip[copy] = True
1218 skip[copy] = True
1219 fstate[fn] = m
1219 fstate[fn] = m
1220 if copy:
1220 if copy:
1221 fstate[copy] = m
1221 fstate[copy] = m
1222 prev[fn] = rev
1222 prev[fn] = rev
1223
1223
1224 if not incrementing:
1224 if not incrementing:
1225 fstate = fstate.items()
1225 fstate = fstate.items()
1226 fstate.sort()
1226 fstate.sort()
1227 for fn, state in fstate:
1227 for fn, state in fstate:
1228 if fn in skip:
1228 if fn in skip:
1229 continue
1229 continue
1230 if fn not in copies.get(prev[fn], {}):
1230 if fn not in copies.get(prev[fn], {}):
1231 display(fn, rev, {}, state)
1231 display(fn, rev, {}, state)
1232 return (count == 0 and 1) or 0
1232 return (count == 0 and 1) or 0
1233
1233
1234 def heads(ui, repo, **opts):
1234 def heads(ui, repo, **opts):
1235 """show current repository heads
1235 """show current repository heads
1236
1236
1237 Show all repository head changesets.
1237 Show all repository head changesets.
1238
1238
1239 Repository "heads" are changesets that don't have children
1239 Repository "heads" are changesets that don't have children
1240 changesets. They are where development generally takes place and
1240 changesets. They are where development generally takes place and
1241 are the usual targets for update and merge operations.
1241 are the usual targets for update and merge operations.
1242 """
1242 """
1243 if opts['rev']:
1243 if opts['rev']:
1244 heads = repo.heads(repo.lookup(opts['rev']))
1244 heads = repo.heads(repo.lookup(opts['rev']))
1245 else:
1245 else:
1246 heads = repo.heads()
1246 heads = repo.heads()
1247 displayer = cmdutil.show_changeset(ui, repo, opts)
1247 displayer = cmdutil.show_changeset(ui, repo, opts)
1248 for n in heads:
1248 for n in heads:
1249 displayer.show(changenode=n)
1249 displayer.show(changenode=n)
1250
1250
1251 def help_(ui, name=None, with_version=False):
1251 def help_(ui, name=None, with_version=False):
1252 """show help for a command, extension, or list of commands
1252 """show help for a command, extension, or list of commands
1253
1253
1254 With no arguments, print a list of commands and short help.
1254 With no arguments, print a list of commands and short help.
1255
1255
1256 Given a command name, print help for that command.
1256 Given a command name, print help for that command.
1257
1257
1258 Given an extension name, print help for that extension, and the
1258 Given an extension name, print help for that extension, and the
1259 commands it provides."""
1259 commands it provides."""
1260 option_lists = []
1260 option_lists = []
1261
1261
1262 def helpcmd(name):
1262 def helpcmd(name):
1263 if with_version:
1263 if with_version:
1264 version_(ui)
1264 version_(ui)
1265 ui.write('\n')
1265 ui.write('\n')
1266 aliases, i = findcmd(ui, name)
1266 aliases, i = findcmd(ui, name)
1267 # synopsis
1267 # synopsis
1268 ui.write("%s\n\n" % i[2])
1268 ui.write("%s\n\n" % i[2])
1269
1269
1270 # description
1270 # description
1271 doc = i[0].__doc__
1271 doc = i[0].__doc__
1272 if not doc:
1272 if not doc:
1273 doc = _("(No help text available)")
1273 doc = _("(No help text available)")
1274 if ui.quiet:
1274 if ui.quiet:
1275 doc = doc.splitlines(0)[0]
1275 doc = doc.splitlines(0)[0]
1276 ui.write("%s\n" % doc.rstrip())
1276 ui.write("%s\n" % doc.rstrip())
1277
1277
1278 if not ui.quiet:
1278 if not ui.quiet:
1279 # aliases
1279 # aliases
1280 if len(aliases) > 1:
1280 if len(aliases) > 1:
1281 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1281 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1282
1282
1283 # options
1283 # options
1284 if i[1]:
1284 if i[1]:
1285 option_lists.append(("options", i[1]))
1285 option_lists.append(("options", i[1]))
1286
1286
1287 def helplist(select=None):
1287 def helplist(select=None):
1288 h = {}
1288 h = {}
1289 cmds = {}
1289 cmds = {}
1290 for c, e in table.items():
1290 for c, e in table.items():
1291 f = c.split("|", 1)[0]
1291 f = c.split("|", 1)[0]
1292 if select and not select(f):
1292 if select and not select(f):
1293 continue
1293 continue
1294 if name == "shortlist" and not f.startswith("^"):
1294 if name == "shortlist" and not f.startswith("^"):
1295 continue
1295 continue
1296 f = f.lstrip("^")
1296 f = f.lstrip("^")
1297 if not ui.debugflag and f.startswith("debug"):
1297 if not ui.debugflag and f.startswith("debug"):
1298 continue
1298 continue
1299 doc = e[0].__doc__
1299 doc = e[0].__doc__
1300 if not doc:
1300 if not doc:
1301 doc = _("(No help text available)")
1301 doc = _("(No help text available)")
1302 h[f] = doc.splitlines(0)[0].rstrip()
1302 h[f] = doc.splitlines(0)[0].rstrip()
1303 cmds[f] = c.lstrip("^")
1303 cmds[f] = c.lstrip("^")
1304
1304
1305 fns = h.keys()
1305 fns = h.keys()
1306 fns.sort()
1306 fns.sort()
1307 m = max(map(len, fns))
1307 m = max(map(len, fns))
1308 for f in fns:
1308 for f in fns:
1309 if ui.verbose:
1309 if ui.verbose:
1310 commands = cmds[f].replace("|",", ")
1310 commands = cmds[f].replace("|",", ")
1311 ui.write(" %s:\n %s\n"%(commands, h[f]))
1311 ui.write(" %s:\n %s\n"%(commands, h[f]))
1312 else:
1312 else:
1313 ui.write(' %-*s %s\n' % (m, f, h[f]))
1313 ui.write(' %-*s %s\n' % (m, f, h[f]))
1314
1314
1315 def helptopic(name):
1315 def helptopic(name):
1316 v = None
1316 v = None
1317 for i in help.helptable:
1317 for i in help.helptable:
1318 l = i.split('|')
1318 l = i.split('|')
1319 if name in l:
1319 if name in l:
1320 v = i
1320 v = i
1321 header = l[-1]
1321 header = l[-1]
1322 if not v:
1322 if not v:
1323 raise UnknownCommand(name)
1323 raise UnknownCommand(name)
1324
1324
1325 # description
1325 # description
1326 doc = help.helptable[v]
1326 doc = help.helptable[v]
1327 if not doc:
1327 if not doc:
1328 doc = _("(No help text available)")
1328 doc = _("(No help text available)")
1329 if callable(doc):
1329 if callable(doc):
1330 doc = doc()
1330 doc = doc()
1331
1331
1332 ui.write("%s\n" % header)
1332 ui.write("%s\n" % header)
1333 ui.write("%s\n" % doc.rstrip())
1333 ui.write("%s\n" % doc.rstrip())
1334
1334
1335 def helpext(name):
1335 def helpext(name):
1336 try:
1336 try:
1337 mod = findext(name)
1337 mod = findext(name)
1338 except KeyError:
1338 except KeyError:
1339 raise UnknownCommand(name)
1339 raise UnknownCommand(name)
1340
1340
1341 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1341 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1342 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1342 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1343 for d in doc[1:]:
1343 for d in doc[1:]:
1344 ui.write(d, '\n')
1344 ui.write(d, '\n')
1345
1345
1346 ui.status('\n')
1346 ui.status('\n')
1347 if ui.verbose:
1347 if ui.verbose:
1348 ui.status(_('list of commands:\n\n'))
1348 ui.status(_('list of commands:\n\n'))
1349 else:
1349 else:
1350 ui.status(_('list of commands (use "hg help -v %s" '
1350 ui.status(_('list of commands (use "hg help -v %s" '
1351 'to show aliases and global options):\n\n') % name)
1351 'to show aliases and global options):\n\n') % name)
1352
1352
1353 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
1353 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
1354 helplist(modcmds.has_key)
1354 helplist(modcmds.has_key)
1355
1355
1356 if name and name != 'shortlist':
1356 if name and name != 'shortlist':
1357 i = None
1357 i = None
1358 for f in (helpcmd, helptopic, helpext):
1358 for f in (helpcmd, helptopic, helpext):
1359 try:
1359 try:
1360 f(name)
1360 f(name)
1361 i = None
1361 i = None
1362 break
1362 break
1363 except UnknownCommand, inst:
1363 except UnknownCommand, inst:
1364 i = inst
1364 i = inst
1365 if i:
1365 if i:
1366 raise i
1366 raise i
1367
1367
1368 else:
1368 else:
1369 # program name
1369 # program name
1370 if ui.verbose or with_version:
1370 if ui.verbose or with_version:
1371 version_(ui)
1371 version_(ui)
1372 else:
1372 else:
1373 ui.status(_("Mercurial Distributed SCM\n"))
1373 ui.status(_("Mercurial Distributed SCM\n"))
1374 ui.status('\n')
1374 ui.status('\n')
1375
1375
1376 # list of commands
1376 # list of commands
1377 if name == "shortlist":
1377 if name == "shortlist":
1378 ui.status(_('basic commands (use "hg help" '
1378 ui.status(_('basic commands (use "hg help" '
1379 'for the full list or option "-v" for details):\n\n'))
1379 'for the full list or option "-v" for details):\n\n'))
1380 elif ui.verbose:
1380 elif ui.verbose:
1381 ui.status(_('list of commands:\n\n'))
1381 ui.status(_('list of commands:\n\n'))
1382 else:
1382 else:
1383 ui.status(_('list of commands (use "hg help -v" '
1383 ui.status(_('list of commands (use "hg help -v" '
1384 'to show aliases and global options):\n\n'))
1384 'to show aliases and global options):\n\n'))
1385
1385
1386 helplist()
1386 helplist()
1387
1387
1388 # global options
1388 # global options
1389 if ui.verbose:
1389 if ui.verbose:
1390 option_lists.append(("global options", globalopts))
1390 option_lists.append(("global options", globalopts))
1391
1391
1392 # list all option lists
1392 # list all option lists
1393 opt_output = []
1393 opt_output = []
1394 for title, options in option_lists:
1394 for title, options in option_lists:
1395 opt_output.append(("\n%s:\n" % title, None))
1395 opt_output.append(("\n%s:\n" % title, None))
1396 for shortopt, longopt, default, desc in options:
1396 for shortopt, longopt, default, desc in options:
1397 if "DEPRECATED" in desc and not ui.verbose: continue
1397 if "DEPRECATED" in desc and not ui.verbose: continue
1398 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1398 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1399 longopt and " --%s" % longopt),
1399 longopt and " --%s" % longopt),
1400 "%s%s" % (desc,
1400 "%s%s" % (desc,
1401 default
1401 default
1402 and _(" (default: %s)") % default
1402 and _(" (default: %s)") % default
1403 or "")))
1403 or "")))
1404
1404
1405 if opt_output:
1405 if opt_output:
1406 opts_len = max([len(line[0]) for line in opt_output if line[1]])
1406 opts_len = max([len(line[0]) for line in opt_output if line[1]])
1407 for first, second in opt_output:
1407 for first, second in opt_output:
1408 if second:
1408 if second:
1409 ui.write(" %-*s %s\n" % (opts_len, first, second))
1409 ui.write(" %-*s %s\n" % (opts_len, first, second))
1410 else:
1410 else:
1411 ui.write("%s\n" % first)
1411 ui.write("%s\n" % first)
1412
1412
1413 def identify(ui, repo):
1413 def identify(ui, repo):
1414 """print information about the working copy
1414 """print information about the working copy
1415
1415
1416 Print a short summary of the current state of the repo.
1416 Print a short summary of the current state of the repo.
1417
1417
1418 This summary identifies the repository state using one or two parent
1418 This summary identifies the repository state using one or two parent
1419 hash identifiers, followed by a "+" if there are uncommitted changes
1419 hash identifiers, followed by a "+" if there are uncommitted changes
1420 in the working directory, followed by a list of tags for this revision.
1420 in the working directory, followed by a list of tags for this revision.
1421 """
1421 """
1422 parents = [p for p in repo.dirstate.parents() if p != nullid]
1422 parents = [p for p in repo.dirstate.parents() if p != nullid]
1423 if not parents:
1423 if not parents:
1424 ui.write(_("unknown\n"))
1424 ui.write(_("unknown\n"))
1425 return
1425 return
1426
1426
1427 hexfunc = ui.debugflag and hex or short
1427 hexfunc = ui.debugflag and hex or short
1428 modified, added, removed, deleted = repo.status()[:4]
1428 modified, added, removed, deleted = repo.status()[:4]
1429 output = ["%s%s" %
1429 output = ["%s%s" %
1430 ('+'.join([hexfunc(parent) for parent in parents]),
1430 ('+'.join([hexfunc(parent) for parent in parents]),
1431 (modified or added or removed or deleted) and "+" or "")]
1431 (modified or added or removed or deleted) and "+" or "")]
1432
1432
1433 if not ui.quiet:
1433 if not ui.quiet:
1434
1434
1435 branch = util.tolocal(repo.workingctx().branch())
1435 branch = util.tolocal(repo.workingctx().branch())
1436 if branch:
1436 if branch:
1437 output.append("(%s)" % branch)
1437 output.append("(%s)" % branch)
1438
1438
1439 # multiple tags for a single parent separated by '/'
1439 # multiple tags for a single parent separated by '/'
1440 parenttags = ['/'.join(tags)
1440 parenttags = ['/'.join(tags)
1441 for tags in map(repo.nodetags, parents) if tags]
1441 for tags in map(repo.nodetags, parents) if tags]
1442 # tags for multiple parents separated by ' + '
1442 # tags for multiple parents separated by ' + '
1443 if parenttags:
1443 if parenttags:
1444 output.append(' + '.join(parenttags))
1444 output.append(' + '.join(parenttags))
1445
1445
1446 ui.write("%s\n" % ' '.join(output))
1446 ui.write("%s\n" % ' '.join(output))
1447
1447
1448 def import_(ui, repo, patch1, *patches, **opts):
1448 def import_(ui, repo, patch1, *patches, **opts):
1449 """import an ordered set of patches
1449 """import an ordered set of patches
1450
1450
1451 Import a list of patches and commit them individually.
1451 Import a list of patches and commit them individually.
1452
1452
1453 If there are outstanding changes in the working directory, import
1453 If there are outstanding changes in the working directory, import
1454 will abort unless given the -f flag.
1454 will abort unless given the -f flag.
1455
1455
1456 You can import a patch straight from a mail message. Even patches
1456 You can import a patch straight from a mail message. Even patches
1457 as attachments work (body part must be type text/plain or
1457 as attachments work (body part must be type text/plain or
1458 text/x-patch to be used). From and Subject headers of email
1458 text/x-patch to be used). From and Subject headers of email
1459 message are used as default committer and commit message. All
1459 message are used as default committer and commit message. All
1460 text/plain body parts before first diff are added to commit
1460 text/plain body parts before first diff are added to commit
1461 message.
1461 message.
1462
1462
1463 If imported patch was generated by hg export, user and description
1463 If imported patch was generated by hg export, user and description
1464 from patch override values from message headers and body. Values
1464 from patch override values from message headers and body. Values
1465 given on command line with -m and -u override these.
1465 given on command line with -m and -u override these.
1466
1466
1467 To read a patch from standard input, use patch name "-".
1467 To read a patch from standard input, use patch name "-".
1468 """
1468 """
1469 patches = (patch1,) + patches
1469 patches = (patch1,) + patches
1470
1470
1471 if not opts['force']:
1471 if not opts['force']:
1472 bail_if_changed(repo)
1472 bail_if_changed(repo)
1473
1473
1474 d = opts["base"]
1474 d = opts["base"]
1475 strip = opts["strip"]
1475 strip = opts["strip"]
1476
1476
1477 wlock = repo.wlock()
1477 wlock = repo.wlock()
1478 lock = repo.lock()
1478 lock = repo.lock()
1479
1479
1480 for p in patches:
1480 for p in patches:
1481 pf = os.path.join(d, p)
1481 pf = os.path.join(d, p)
1482
1482
1483 if pf == '-':
1483 if pf == '-':
1484 ui.status(_("applying patch from stdin\n"))
1484 ui.status(_("applying patch from stdin\n"))
1485 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1485 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1486 else:
1486 else:
1487 ui.status(_("applying %s\n") % p)
1487 ui.status(_("applying %s\n") % p)
1488 tmpname, message, user, date = patch.extract(ui, file(pf))
1488 tmpname, message, user, date = patch.extract(ui, file(pf))
1489
1489
1490 if tmpname is None:
1490 if tmpname is None:
1491 raise util.Abort(_('no diffs found'))
1491 raise util.Abort(_('no diffs found'))
1492
1492
1493 try:
1493 try:
1494 cmdline_message = logmessage(opts)
1494 cmdline_message = logmessage(opts)
1495 if cmdline_message:
1495 if cmdline_message:
1496 # pickup the cmdline msg
1496 # pickup the cmdline msg
1497 message = cmdline_message
1497 message = cmdline_message
1498 elif message:
1498 elif message:
1499 # pickup the patch msg
1499 # pickup the patch msg
1500 message = message.strip()
1500 message = message.strip()
1501 else:
1501 else:
1502 # launch the editor
1502 # launch the editor
1503 message = None
1503 message = None
1504 ui.debug(_('message:\n%s\n') % message)
1504 ui.debug(_('message:\n%s\n') % message)
1505
1505
1506 files = {}
1506 files = {}
1507 try:
1507 try:
1508 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1508 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1509 files=files)
1509 files=files)
1510 finally:
1510 finally:
1511 files = patch.updatedir(ui, repo, files, wlock=wlock)
1511 files = patch.updatedir(ui, repo, files, wlock=wlock)
1512 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1512 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1513 finally:
1513 finally:
1514 os.unlink(tmpname)
1514 os.unlink(tmpname)
1515
1515
1516 def incoming(ui, repo, source="default", **opts):
1516 def incoming(ui, repo, source="default", **opts):
1517 """show new changesets found in source
1517 """show new changesets found in source
1518
1518
1519 Show new changesets found in the specified path/URL or the default
1519 Show new changesets found in the specified path/URL or the default
1520 pull location. These are the changesets that would be pulled if a pull
1520 pull location. These are the changesets that would be pulled if a pull
1521 was requested.
1521 was requested.
1522
1522
1523 For remote repository, using --bundle avoids downloading the changesets
1523 For remote repository, using --bundle avoids downloading the changesets
1524 twice if the incoming is followed by a pull.
1524 twice if the incoming is followed by a pull.
1525
1525
1526 See pull for valid source format details.
1526 See pull for valid source format details.
1527 """
1527 """
1528 source = ui.expandpath(source)
1528 source = ui.expandpath(source)
1529 setremoteconfig(ui, opts)
1529 setremoteconfig(ui, opts)
1530
1530
1531 other = hg.repository(ui, source)
1531 other = hg.repository(ui, source)
1532 incoming = repo.findincoming(other, force=opts["force"])
1532 incoming = repo.findincoming(other, force=opts["force"])
1533 if not incoming:
1533 if not incoming:
1534 ui.status(_("no changes found\n"))
1534 ui.status(_("no changes found\n"))
1535 return
1535 return
1536
1536
1537 cleanup = None
1537 cleanup = None
1538 try:
1538 try:
1539 fname = opts["bundle"]
1539 fname = opts["bundle"]
1540 if fname or not other.local():
1540 if fname or not other.local():
1541 # create a bundle (uncompressed if other repo is not local)
1541 # create a bundle (uncompressed if other repo is not local)
1542 cg = other.changegroup(incoming, "incoming")
1542 cg = other.changegroup(incoming, "incoming")
1543 bundletype = other.local() and "HG10BZ" or "HG10UN"
1543 bundletype = other.local() and "HG10BZ" or "HG10UN"
1544 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1544 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1545 # keep written bundle?
1545 # keep written bundle?
1546 if opts["bundle"]:
1546 if opts["bundle"]:
1547 cleanup = None
1547 cleanup = None
1548 if not other.local():
1548 if not other.local():
1549 # use the created uncompressed bundlerepo
1549 # use the created uncompressed bundlerepo
1550 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1550 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1551
1551
1552 revs = None
1552 revs = None
1553 if opts['rev']:
1553 if opts['rev']:
1554 revs = [other.lookup(rev) for rev in opts['rev']]
1554 revs = [other.lookup(rev) for rev in opts['rev']]
1555 o = other.changelog.nodesbetween(incoming, revs)[0]
1555 o = other.changelog.nodesbetween(incoming, revs)[0]
1556 if opts['newest_first']:
1556 if opts['newest_first']:
1557 o.reverse()
1557 o.reverse()
1558 displayer = cmdutil.show_changeset(ui, other, opts)
1558 displayer = cmdutil.show_changeset(ui, other, opts)
1559 for n in o:
1559 for n in o:
1560 parents = [p for p in other.changelog.parents(n) if p != nullid]
1560 parents = [p for p in other.changelog.parents(n) if p != nullid]
1561 if opts['no_merges'] and len(parents) == 2:
1561 if opts['no_merges'] and len(parents) == 2:
1562 continue
1562 continue
1563 displayer.show(changenode=n)
1563 displayer.show(changenode=n)
1564 finally:
1564 finally:
1565 if hasattr(other, 'close'):
1565 if hasattr(other, 'close'):
1566 other.close()
1566 other.close()
1567 if cleanup:
1567 if cleanup:
1568 os.unlink(cleanup)
1568 os.unlink(cleanup)
1569
1569
1570 def init(ui, dest=".", **opts):
1570 def init(ui, dest=".", **opts):
1571 """create a new repository in the given directory
1571 """create a new repository in the given directory
1572
1572
1573 Initialize a new repository in the given directory. If the given
1573 Initialize a new repository in the given directory. If the given
1574 directory does not exist, it is created.
1574 directory does not exist, it is created.
1575
1575
1576 If no directory is given, the current directory is used.
1576 If no directory is given, the current directory is used.
1577
1577
1578 It is possible to specify an ssh:// URL as the destination.
1578 It is possible to specify an ssh:// URL as the destination.
1579 Look at the help text for the pull command for important details
1579 Look at the help text for the pull command for important details
1580 about ssh:// URLs.
1580 about ssh:// URLs.
1581 """
1581 """
1582 setremoteconfig(ui, opts)
1582 setremoteconfig(ui, opts)
1583 hg.repository(ui, dest, create=1)
1583 hg.repository(ui, dest, create=1)
1584
1584
1585 def locate(ui, repo, *pats, **opts):
1585 def locate(ui, repo, *pats, **opts):
1586 """locate files matching specific patterns
1586 """locate files matching specific patterns
1587
1587
1588 Print all files under Mercurial control whose names match the
1588 Print all files under Mercurial control whose names match the
1589 given patterns.
1589 given patterns.
1590
1590
1591 This command searches the current directory and its
1591 This command searches the current directory and its
1592 subdirectories. To search an entire repository, move to the root
1592 subdirectories. To search an entire repository, move to the root
1593 of the repository.
1593 of the repository.
1594
1594
1595 If no patterns are given to match, this command prints all file
1595 If no patterns are given to match, this command prints all file
1596 names.
1596 names.
1597
1597
1598 If you want to feed the output of this command into the "xargs"
1598 If you want to feed the output of this command into the "xargs"
1599 command, use the "-0" option to both this command and "xargs".
1599 command, use the "-0" option to both this command and "xargs".
1600 This will avoid the problem of "xargs" treating single filenames
1600 This will avoid the problem of "xargs" treating single filenames
1601 that contain white space as multiple filenames.
1601 that contain white space as multiple filenames.
1602 """
1602 """
1603 end = opts['print0'] and '\0' or '\n'
1603 end = opts['print0'] and '\0' or '\n'
1604 rev = opts['rev']
1604 rev = opts['rev']
1605 if rev:
1605 if rev:
1606 node = repo.lookup(rev)
1606 node = repo.lookup(rev)
1607 else:
1607 else:
1608 node = None
1608 node = None
1609
1609
1610 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1610 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1611 head='(?:.*/|)'):
1611 head='(?:.*/|)'):
1612 if not node and repo.dirstate.state(abs) == '?':
1612 if not node and repo.dirstate.state(abs) == '?':
1613 continue
1613 continue
1614 if opts['fullpath']:
1614 if opts['fullpath']:
1615 ui.write(os.path.join(repo.root, abs), end)
1615 ui.write(os.path.join(repo.root, abs), end)
1616 else:
1616 else:
1617 ui.write(((pats and rel) or abs), end)
1617 ui.write(((pats and rel) or abs), end)
1618
1618
1619 def log(ui, repo, *pats, **opts):
1619 def log(ui, repo, *pats, **opts):
1620 """show revision history of entire repository or files
1620 """show revision history of entire repository or files
1621
1621
1622 Print the revision history of the specified files or the entire
1622 Print the revision history of the specified files or the entire
1623 project.
1623 project.
1624
1624
1625 File history is shown without following rename or copy history of
1625 File history is shown without following rename or copy history of
1626 files. Use -f/--follow with a file name to follow history across
1626 files. Use -f/--follow with a file name to follow history across
1627 renames and copies. --follow without a file name will only show
1627 renames and copies. --follow without a file name will only show
1628 ancestors or descendants of the starting revision. --follow-first
1628 ancestors or descendants of the starting revision. --follow-first
1629 only follows the first parent of merge revisions.
1629 only follows the first parent of merge revisions.
1630
1630
1631 If no revision range is specified, the default is tip:0 unless
1631 If no revision range is specified, the default is tip:0 unless
1632 --follow is set, in which case the working directory parent is
1632 --follow is set, in which case the working directory parent is
1633 used as the starting revision.
1633 used as the starting revision.
1634
1634
1635 By default this command outputs: changeset id and hash, tags,
1635 By default this command outputs: changeset id and hash, tags,
1636 non-trivial parents, user, date and time, and a summary for each
1636 non-trivial parents, user, date and time, and a summary for each
1637 commit. When the -v/--verbose switch is used, the list of changed
1637 commit. When the -v/--verbose switch is used, the list of changed
1638 files and full commit message is shown.
1638 files and full commit message is shown.
1639
1639
1640 NOTE: log -p may generate unexpected diff output for merge
1640 NOTE: log -p may generate unexpected diff output for merge
1641 changesets, as it will compare the merge changeset against its
1641 changesets, as it will compare the merge changeset against its
1642 first parent only. Also, the files: list will only reflect files
1642 first parent only. Also, the files: list will only reflect files
1643 that are different from BOTH parents.
1643 that are different from BOTH parents.
1644
1644
1645 """
1645 """
1646
1646
1647 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1647 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1648 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1648 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1649
1649
1650 if opts['limit']:
1650 if opts['limit']:
1651 try:
1651 try:
1652 limit = int(opts['limit'])
1652 limit = int(opts['limit'])
1653 except ValueError:
1653 except ValueError:
1654 raise util.Abort(_('limit must be a positive integer'))
1654 raise util.Abort(_('limit must be a positive integer'))
1655 if limit <= 0: raise util.Abort(_('limit must be positive'))
1655 if limit <= 0: raise util.Abort(_('limit must be positive'))
1656 else:
1656 else:
1657 limit = sys.maxint
1657 limit = sys.maxint
1658 count = 0
1658 count = 0
1659
1659
1660 if opts['copies'] and opts['rev']:
1660 if opts['copies'] and opts['rev']:
1661 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1661 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1662 else:
1662 else:
1663 endrev = repo.changelog.count()
1663 endrev = repo.changelog.count()
1664 rcache = {}
1664 rcache = {}
1665 ncache = {}
1665 ncache = {}
1666 dcache = []
1666 dcache = []
1667 def getrenamed(fn, rev, man):
1667 def getrenamed(fn, rev, man):
1668 '''looks up all renames for a file (up to endrev) the first
1668 '''looks up all renames for a file (up to endrev) the first
1669 time the file is given. It indexes on the changerev and only
1669 time the file is given. It indexes on the changerev and only
1670 parses the manifest if linkrev != changerev.
1670 parses the manifest if linkrev != changerev.
1671 Returns rename info for fn at changerev rev.'''
1671 Returns rename info for fn at changerev rev.'''
1672 if fn not in rcache:
1672 if fn not in rcache:
1673 rcache[fn] = {}
1673 rcache[fn] = {}
1674 ncache[fn] = {}
1674 ncache[fn] = {}
1675 fl = repo.file(fn)
1675 fl = repo.file(fn)
1676 for i in xrange(fl.count()):
1676 for i in xrange(fl.count()):
1677 node = fl.node(i)
1677 node = fl.node(i)
1678 lr = fl.linkrev(node)
1678 lr = fl.linkrev(node)
1679 renamed = fl.renamed(node)
1679 renamed = fl.renamed(node)
1680 rcache[fn][lr] = renamed
1680 rcache[fn][lr] = renamed
1681 if renamed:
1681 if renamed:
1682 ncache[fn][node] = renamed
1682 ncache[fn][node] = renamed
1683 if lr >= endrev:
1683 if lr >= endrev:
1684 break
1684 break
1685 if rev in rcache[fn]:
1685 if rev in rcache[fn]:
1686 return rcache[fn][rev]
1686 return rcache[fn][rev]
1687 mr = repo.manifest.rev(man)
1687 mr = repo.manifest.rev(man)
1688 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1688 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1689 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1689 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1690 if not dcache or dcache[0] != man:
1690 if not dcache or dcache[0] != man:
1691 dcache[:] = [man, repo.manifest.readdelta(man)]
1691 dcache[:] = [man, repo.manifest.readdelta(man)]
1692 if fn in dcache[1]:
1692 if fn in dcache[1]:
1693 return ncache[fn].get(dcache[1][fn])
1693 return ncache[fn].get(dcache[1][fn])
1694 return None
1694 return None
1695
1695
1696 df = False
1696 df = False
1697 if opts["date"]:
1697 if opts["date"]:
1698 df = util.matchdate(opts["date"])
1698 df = util.matchdate(opts["date"])
1699
1699
1700 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1700 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1701 for st, rev, fns in changeiter:
1701 for st, rev, fns in changeiter:
1702 if st == 'add':
1702 if st == 'add':
1703 changenode = repo.changelog.node(rev)
1703 changenode = repo.changelog.node(rev)
1704 parents = [p for p in repo.changelog.parentrevs(rev)
1704 parents = [p for p in repo.changelog.parentrevs(rev)
1705 if p != nullrev]
1705 if p != nullrev]
1706 if opts['no_merges'] and len(parents) == 2:
1706 if opts['no_merges'] and len(parents) == 2:
1707 continue
1707 continue
1708 if opts['only_merges'] and len(parents) != 2:
1708 if opts['only_merges'] and len(parents) != 2:
1709 continue
1709 continue
1710
1710
1711 if df:
1711 if df:
1712 changes = get(rev)
1712 changes = get(rev)
1713 if not df(changes[2][0]):
1713 if not df(changes[2][0]):
1714 continue
1714 continue
1715
1715
1716 if opts['keyword']:
1716 if opts['keyword']:
1717 changes = get(rev)
1717 changes = get(rev)
1718 miss = 0
1718 miss = 0
1719 for k in [kw.lower() for kw in opts['keyword']]:
1719 for k in [kw.lower() for kw in opts['keyword']]:
1720 if not (k in changes[1].lower() or
1720 if not (k in changes[1].lower() or
1721 k in changes[4].lower() or
1721 k in changes[4].lower() or
1722 k in " ".join(changes[3][:20]).lower()):
1722 k in " ".join(changes[3][:20]).lower()):
1723 miss = 1
1723 miss = 1
1724 break
1724 break
1725 if miss:
1725 if miss:
1726 continue
1726 continue
1727
1727
1728 copies = []
1728 copies = []
1729 if opts.get('copies') and rev:
1729 if opts.get('copies') and rev:
1730 mf = get(rev)[0]
1730 mf = get(rev)[0]
1731 for fn in get(rev)[3]:
1731 for fn in get(rev)[3]:
1732 rename = getrenamed(fn, rev, mf)
1732 rename = getrenamed(fn, rev, mf)
1733 if rename:
1733 if rename:
1734 copies.append((fn, rename[0]))
1734 copies.append((fn, rename[0]))
1735 displayer.show(rev, changenode, copies=copies)
1735 displayer.show(rev, changenode, copies=copies)
1736 elif st == 'iter':
1736 elif st == 'iter':
1737 if count == limit: break
1737 if count == limit: break
1738 if displayer.flush(rev):
1738 if displayer.flush(rev):
1739 count += 1
1739 count += 1
1740
1740
1741 def manifest(ui, repo, rev=None):
1741 def manifest(ui, repo, rev=None):
1742 """output the current or given revision of the project manifest
1742 """output the current or given revision of the project manifest
1743
1743
1744 Print a list of version controlled files for the given revision.
1744 Print a list of version controlled files for the given revision.
1745 If no revision is given, the parent of the working directory is used,
1745 If no revision is given, the parent of the working directory is used,
1746 or tip if no revision is checked out.
1746 or tip if no revision is checked out.
1747
1747
1748 The manifest is the list of files being version controlled. If no revision
1748 The manifest is the list of files being version controlled. If no revision
1749 is given then the first parent of the working directory is used.
1749 is given then the first parent of the working directory is used.
1750
1750
1751 With -v flag, print file permissions. With --debug flag, print
1751 With -v flag, print file permissions. With --debug flag, print
1752 file revision hashes.
1752 file revision hashes.
1753 """
1753 """
1754
1754
1755 m = repo.changectx(rev).manifest()
1755 m = repo.changectx(rev).manifest()
1756 files = m.keys()
1756 files = m.keys()
1757 files.sort()
1757 files.sort()
1758
1758
1759 for f in files:
1759 for f in files:
1760 if ui.debugflag:
1760 if ui.debugflag:
1761 ui.write("%40s " % hex(m[f]))
1761 ui.write("%40s " % hex(m[f]))
1762 if ui.verbose:
1762 if ui.verbose:
1763 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1763 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1764 ui.write("%s\n" % f)
1764 ui.write("%s\n" % f)
1765
1765
1766 def merge(ui, repo, node=None, force=None):
1766 def merge(ui, repo, node=None, force=None):
1767 """Merge working directory with another revision
1767 """Merge working directory with another revision
1768
1768
1769 Merge the contents of the current working directory and the
1769 Merge the contents of the current working directory and the
1770 requested revision. Files that changed between either parent are
1770 requested revision. Files that changed between either parent are
1771 marked as changed for the next commit and a commit must be
1771 marked as changed for the next commit and a commit must be
1772 performed before any further updates are allowed.
1772 performed before any further updates are allowed.
1773
1773
1774 If no revision is specified, the working directory's parent is a
1774 If no revision is specified, the working directory's parent is a
1775 head revision, and the repository contains exactly one other head,
1775 head revision, and the repository contains exactly one other head,
1776 the other head is merged with by default. Otherwise, an explicit
1776 the other head is merged with by default. Otherwise, an explicit
1777 revision to merge with must be provided.
1777 revision to merge with must be provided.
1778 """
1778 """
1779
1779
1780 if not node:
1780 if not node:
1781 heads = repo.heads()
1781 heads = repo.heads()
1782 if len(heads) > 2:
1782 if len(heads) > 2:
1783 raise util.Abort(_('repo has %d heads - '
1783 raise util.Abort(_('repo has %d heads - '
1784 'please merge with an explicit rev') %
1784 'please merge with an explicit rev') %
1785 len(heads))
1785 len(heads))
1786 if len(heads) == 1:
1786 if len(heads) == 1:
1787 raise util.Abort(_('there is nothing to merge - '
1787 raise util.Abort(_('there is nothing to merge - '
1788 'use "hg update" instead'))
1788 'use "hg update" instead'))
1789 parent = repo.dirstate.parents()[0]
1789 parent = repo.dirstate.parents()[0]
1790 if parent not in heads:
1790 if parent not in heads:
1791 raise util.Abort(_('working dir not at a head rev - '
1791 raise util.Abort(_('working dir not at a head rev - '
1792 'use "hg update" or merge with an explicit rev'))
1792 'use "hg update" or merge with an explicit rev'))
1793 node = parent == heads[0] and heads[-1] or heads[0]
1793 node = parent == heads[0] and heads[-1] or heads[0]
1794 return hg.merge(repo, node, force=force)
1794 return hg.merge(repo, node, force=force)
1795
1795
1796 def outgoing(ui, repo, dest=None, **opts):
1796 def outgoing(ui, repo, dest=None, **opts):
1797 """show changesets not found in destination
1797 """show changesets not found in destination
1798
1798
1799 Show changesets not found in the specified destination repository or
1799 Show changesets not found in the specified destination repository or
1800 the default push location. These are the changesets that would be pushed
1800 the default push location. These are the changesets that would be pushed
1801 if a push was requested.
1801 if a push was requested.
1802
1802
1803 See pull for valid destination format details.
1803 See pull for valid destination format details.
1804 """
1804 """
1805 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1805 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1806 setremoteconfig(ui, opts)
1806 setremoteconfig(ui, opts)
1807 revs = None
1807 revs = None
1808 if opts['rev']:
1808 if opts['rev']:
1809 revs = [repo.lookup(rev) for rev in opts['rev']]
1809 revs = [repo.lookup(rev) for rev in opts['rev']]
1810
1810
1811 other = hg.repository(ui, dest)
1811 other = hg.repository(ui, dest)
1812 o = repo.findoutgoing(other, force=opts['force'])
1812 o = repo.findoutgoing(other, force=opts['force'])
1813 if not o:
1813 if not o:
1814 ui.status(_("no changes found\n"))
1814 ui.status(_("no changes found\n"))
1815 return
1815 return
1816 o = repo.changelog.nodesbetween(o, revs)[0]
1816 o = repo.changelog.nodesbetween(o, revs)[0]
1817 if opts['newest_first']:
1817 if opts['newest_first']:
1818 o.reverse()
1818 o.reverse()
1819 displayer = cmdutil.show_changeset(ui, repo, opts)
1819 displayer = cmdutil.show_changeset(ui, repo, opts)
1820 for n in o:
1820 for n in o:
1821 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1821 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1822 if opts['no_merges'] and len(parents) == 2:
1822 if opts['no_merges'] and len(parents) == 2:
1823 continue
1823 continue
1824 displayer.show(changenode=n)
1824 displayer.show(changenode=n)
1825
1825
1826 def parents(ui, repo, file_=None, **opts):
1826 def parents(ui, repo, file_=None, **opts):
1827 """show the parents of the working dir or revision
1827 """show the parents of the working dir or revision
1828
1828
1829 Print the working directory's parent revisions.
1829 Print the working directory's parent revisions.
1830 """
1830 """
1831 rev = opts.get('rev')
1831 rev = opts.get('rev')
1832 if rev:
1832 if rev:
1833 if file_:
1833 if file_:
1834 ctx = repo.filectx(file_, changeid=rev)
1834 ctx = repo.filectx(file_, changeid=rev)
1835 else:
1835 else:
1836 ctx = repo.changectx(rev)
1836 ctx = repo.changectx(rev)
1837 p = [cp.node() for cp in ctx.parents()]
1837 p = [cp.node() for cp in ctx.parents()]
1838 else:
1838 else:
1839 p = repo.dirstate.parents()
1839 p = repo.dirstate.parents()
1840
1840
1841 displayer = cmdutil.show_changeset(ui, repo, opts)
1841 displayer = cmdutil.show_changeset(ui, repo, opts)
1842 for n in p:
1842 for n in p:
1843 if n != nullid:
1843 if n != nullid:
1844 displayer.show(changenode=n)
1844 displayer.show(changenode=n)
1845
1845
1846 def paths(ui, repo, search=None):
1846 def paths(ui, repo, search=None):
1847 """show definition of symbolic path names
1847 """show definition of symbolic path names
1848
1848
1849 Show definition of symbolic path name NAME. If no name is given, show
1849 Show definition of symbolic path name NAME. If no name is given, show
1850 definition of available names.
1850 definition of available names.
1851
1851
1852 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1852 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1853 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1853 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1854 """
1854 """
1855 if search:
1855 if search:
1856 for name, path in ui.configitems("paths"):
1856 for name, path in ui.configitems("paths"):
1857 if name == search:
1857 if name == search:
1858 ui.write("%s\n" % path)
1858 ui.write("%s\n" % path)
1859 return
1859 return
1860 ui.warn(_("not found!\n"))
1860 ui.warn(_("not found!\n"))
1861 return 1
1861 return 1
1862 else:
1862 else:
1863 for name, path in ui.configitems("paths"):
1863 for name, path in ui.configitems("paths"):
1864 ui.write("%s = %s\n" % (name, path))
1864 ui.write("%s = %s\n" % (name, path))
1865
1865
1866 def postincoming(ui, repo, modheads, optupdate):
1866 def postincoming(ui, repo, modheads, optupdate):
1867 if modheads == 0:
1867 if modheads == 0:
1868 return
1868 return
1869 if optupdate:
1869 if optupdate:
1870 if modheads == 1:
1870 if modheads == 1:
1871 return hg.update(repo, repo.changelog.tip()) # update
1871 return hg.update(repo, repo.changelog.tip()) # update
1872 else:
1872 else:
1873 ui.status(_("not updating, since new heads added\n"))
1873 ui.status(_("not updating, since new heads added\n"))
1874 if modheads > 1:
1874 if modheads > 1:
1875 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1875 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1876 else:
1876 else:
1877 ui.status(_("(run 'hg update' to get a working copy)\n"))
1877 ui.status(_("(run 'hg update' to get a working copy)\n"))
1878
1878
1879 def pull(ui, repo, source="default", **opts):
1879 def pull(ui, repo, source="default", **opts):
1880 """pull changes from the specified source
1880 """pull changes from the specified source
1881
1881
1882 Pull changes from a remote repository to a local one.
1882 Pull changes from a remote repository to a local one.
1883
1883
1884 This finds all changes from the repository at the specified path
1884 This finds all changes from the repository at the specified path
1885 or URL and adds them to the local repository. By default, this
1885 or URL and adds them to the local repository. By default, this
1886 does not update the copy of the project in the working directory.
1886 does not update the copy of the project in the working directory.
1887
1887
1888 Valid URLs are of the form:
1888 Valid URLs are of the form:
1889
1889
1890 local/filesystem/path (or file://local/filesystem/path)
1890 local/filesystem/path (or file://local/filesystem/path)
1891 http://[user@]host[:port]/[path]
1891 http://[user@]host[:port]/[path]
1892 https://[user@]host[:port]/[path]
1892 https://[user@]host[:port]/[path]
1893 ssh://[user@]host[:port]/[path]
1893 ssh://[user@]host[:port]/[path]
1894 static-http://host[:port]/[path]
1894 static-http://host[:port]/[path]
1895
1895
1896 Paths in the local filesystem can either point to Mercurial
1896 Paths in the local filesystem can either point to Mercurial
1897 repositories or to bundle files (as created by 'hg bundle' or
1897 repositories or to bundle files (as created by 'hg bundle' or
1898 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1898 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1899 allows access to a Mercurial repository where you simply use a web
1899 allows access to a Mercurial repository where you simply use a web
1900 server to publish the .hg directory as static content.
1900 server to publish the .hg directory as static content.
1901
1901
1902 Some notes about using SSH with Mercurial:
1902 Some notes about using SSH with Mercurial:
1903 - SSH requires an accessible shell account on the destination machine
1903 - SSH requires an accessible shell account on the destination machine
1904 and a copy of hg in the remote path or specified with as remotecmd.
1904 and a copy of hg in the remote path or specified with as remotecmd.
1905 - path is relative to the remote user's home directory by default.
1905 - path is relative to the remote user's home directory by default.
1906 Use an extra slash at the start of a path to specify an absolute path:
1906 Use an extra slash at the start of a path to specify an absolute path:
1907 ssh://example.com//tmp/repository
1907 ssh://example.com//tmp/repository
1908 - Mercurial doesn't use its own compression via SSH; the right thing
1908 - Mercurial doesn't use its own compression via SSH; the right thing
1909 to do is to configure it in your ~/.ssh/config, e.g.:
1909 to do is to configure it in your ~/.ssh/config, e.g.:
1910 Host *.mylocalnetwork.example.com
1910 Host *.mylocalnetwork.example.com
1911 Compression no
1911 Compression no
1912 Host *
1912 Host *
1913 Compression yes
1913 Compression yes
1914 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1914 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1915 with the --ssh command line option.
1915 with the --ssh command line option.
1916 """
1916 """
1917 source = ui.expandpath(source)
1917 source = ui.expandpath(source)
1918 setremoteconfig(ui, opts)
1918 setremoteconfig(ui, opts)
1919
1919
1920 other = hg.repository(ui, source)
1920 other = hg.repository(ui, source)
1921 ui.status(_('pulling from %s\n') % (source))
1921 ui.status(_('pulling from %s\n') % (source))
1922 revs = None
1922 revs = None
1923 if opts['rev']:
1923 if opts['rev']:
1924 if 'lookup' in other.capabilities:
1924 if 'lookup' in other.capabilities:
1925 revs = [other.lookup(rev) for rev in opts['rev']]
1925 revs = [other.lookup(rev) for rev in opts['rev']]
1926 else:
1926 else:
1927 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1927 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1928 raise util.Abort(error)
1928 raise util.Abort(error)
1929 modheads = repo.pull(other, heads=revs, force=opts['force'])
1929 modheads = repo.pull(other, heads=revs, force=opts['force'])
1930 return postincoming(ui, repo, modheads, opts['update'])
1930 return postincoming(ui, repo, modheads, opts['update'])
1931
1931
1932 def push(ui, repo, dest=None, **opts):
1932 def push(ui, repo, dest=None, **opts):
1933 """push changes to the specified destination
1933 """push changes to the specified destination
1934
1934
1935 Push changes from the local repository to the given destination.
1935 Push changes from the local repository to the given destination.
1936
1936
1937 This is the symmetrical operation for pull. It helps to move
1937 This is the symmetrical operation for pull. It helps to move
1938 changes from the current repository to a different one. If the
1938 changes from the current repository to a different one. If the
1939 destination is local this is identical to a pull in that directory
1939 destination is local this is identical to a pull in that directory
1940 from the current one.
1940 from the current one.
1941
1941
1942 By default, push will refuse to run if it detects the result would
1942 By default, push will refuse to run if it detects the result would
1943 increase the number of remote heads. This generally indicates the
1943 increase the number of remote heads. This generally indicates the
1944 the client has forgotten to sync and merge before pushing.
1944 the client has forgotten to sync and merge before pushing.
1945
1945
1946 Valid URLs are of the form:
1946 Valid URLs are of the form:
1947
1947
1948 local/filesystem/path (or file://local/filesystem/path)
1948 local/filesystem/path (or file://local/filesystem/path)
1949 ssh://[user@]host[:port]/[path]
1949 ssh://[user@]host[:port]/[path]
1950 http://[user@]host[:port]/[path]
1950 http://[user@]host[:port]/[path]
1951 https://[user@]host[:port]/[path]
1951 https://[user@]host[:port]/[path]
1952
1952
1953 Look at the help text for the pull command for important details
1953 Look at the help text for the pull command for important details
1954 about ssh:// URLs.
1954 about ssh:// URLs.
1955
1955
1956 Pushing to http:// and https:// URLs is only possible, if this
1956 Pushing to http:// and https:// URLs is only possible, if this
1957 feature is explicitly enabled on the remote Mercurial server.
1957 feature is explicitly enabled on the remote Mercurial server.
1958 """
1958 """
1959 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1959 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1960 setremoteconfig(ui, opts)
1960 setremoteconfig(ui, opts)
1961
1961
1962 other = hg.repository(ui, dest)
1962 other = hg.repository(ui, dest)
1963 ui.status('pushing to %s\n' % (dest))
1963 ui.status('pushing to %s\n' % (dest))
1964 revs = None
1964 revs = None
1965 if opts['rev']:
1965 if opts['rev']:
1966 revs = [repo.lookup(rev) for rev in opts['rev']]
1966 revs = [repo.lookup(rev) for rev in opts['rev']]
1967 r = repo.push(other, opts['force'], revs=revs)
1967 r = repo.push(other, opts['force'], revs=revs)
1968 return r == 0
1968 return r == 0
1969
1969
1970 def rawcommit(ui, repo, *pats, **opts):
1970 def rawcommit(ui, repo, *pats, **opts):
1971 """raw commit interface (DEPRECATED)
1971 """raw commit interface (DEPRECATED)
1972
1972
1973 (DEPRECATED)
1973 (DEPRECATED)
1974 Lowlevel commit, for use in helper scripts.
1974 Lowlevel commit, for use in helper scripts.
1975
1975
1976 This command is not intended to be used by normal users, as it is
1976 This command is not intended to be used by normal users, as it is
1977 primarily useful for importing from other SCMs.
1977 primarily useful for importing from other SCMs.
1978
1978
1979 This command is now deprecated and will be removed in a future
1979 This command is now deprecated and will be removed in a future
1980 release, please use debugsetparents and commit instead.
1980 release, please use debugsetparents and commit instead.
1981 """
1981 """
1982
1982
1983 ui.warn(_("(the rawcommit command is deprecated)\n"))
1983 ui.warn(_("(the rawcommit command is deprecated)\n"))
1984
1984
1985 message = logmessage(opts)
1985 message = logmessage(opts)
1986
1986
1987 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
1987 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
1988 if opts['files']:
1988 if opts['files']:
1989 files += open(opts['files']).read().splitlines()
1989 files += open(opts['files']).read().splitlines()
1990
1990
1991 parents = [repo.lookup(p) for p in opts['parent']]
1991 parents = [repo.lookup(p) for p in opts['parent']]
1992
1992
1993 try:
1993 try:
1994 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
1994 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
1995 except ValueError, inst:
1995 except ValueError, inst:
1996 raise util.Abort(str(inst))
1996 raise util.Abort(str(inst))
1997
1997
1998 def recover(ui, repo):
1998 def recover(ui, repo):
1999 """roll back an interrupted transaction
1999 """roll back an interrupted transaction
2000
2000
2001 Recover from an interrupted commit or pull.
2001 Recover from an interrupted commit or pull.
2002
2002
2003 This command tries to fix the repository status after an interrupted
2003 This command tries to fix the repository status after an interrupted
2004 operation. It should only be necessary when Mercurial suggests it.
2004 operation. It should only be necessary when Mercurial suggests it.
2005 """
2005 """
2006 if repo.recover():
2006 if repo.recover():
2007 return hg.verify(repo)
2007 return hg.verify(repo)
2008 return 1
2008 return 1
2009
2009
2010 def remove(ui, repo, *pats, **opts):
2010 def remove(ui, repo, *pats, **opts):
2011 """remove the specified files on the next commit
2011 """remove the specified files on the next commit
2012
2012
2013 Schedule the indicated files for removal from the repository.
2013 Schedule the indicated files for removal from the repository.
2014
2014
2015 This only removes files from the current branch, not from the
2015 This only removes files from the current branch, not from the
2016 entire project history. If the files still exist in the working
2016 entire project history. If the files still exist in the working
2017 directory, they will be deleted from it. If invoked with --after,
2017 directory, they will be deleted from it. If invoked with --after,
2018 files that have been manually deleted are marked as removed.
2018 files that have been manually deleted are marked as removed.
2019
2019
2020 This command schedules the files to be removed at the next commit.
2020 This command schedules the files to be removed at the next commit.
2021 To undo a remove before that, see hg revert.
2021 To undo a remove before that, see hg revert.
2022
2022
2023 Modified files and added files are not removed by default. To
2023 Modified files and added files are not removed by default. To
2024 remove them, use the -f/--force option.
2024 remove them, use the -f/--force option.
2025 """
2025 """
2026 names = []
2026 names = []
2027 if not opts['after'] and not pats:
2027 if not opts['after'] and not pats:
2028 raise util.Abort(_('no files specified'))
2028 raise util.Abort(_('no files specified'))
2029 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2029 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2030 exact = dict.fromkeys(files)
2030 exact = dict.fromkeys(files)
2031 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2031 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2032 modified, added, removed, deleted, unknown = mardu
2032 modified, added, removed, deleted, unknown = mardu
2033 remove, forget = [], []
2033 remove, forget = [], []
2034 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2034 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2035 reason = None
2035 reason = None
2036 if abs not in deleted and opts['after']:
2036 if abs not in deleted and opts['after']:
2037 reason = _('is still present')
2037 reason = _('is still present')
2038 elif abs in modified and not opts['force']:
2038 elif abs in modified and not opts['force']:
2039 reason = _('is modified (use -f to force removal)')
2039 reason = _('is modified (use -f to force removal)')
2040 elif abs in added:
2040 elif abs in added:
2041 if opts['force']:
2041 if opts['force']:
2042 forget.append(abs)
2042 forget.append(abs)
2043 continue
2043 continue
2044 reason = _('has been marked for add (use -f to force removal)')
2044 reason = _('has been marked for add (use -f to force removal)')
2045 elif abs in unknown:
2045 elif abs in unknown:
2046 reason = _('is not managed')
2046 reason = _('is not managed')
2047 elif abs in removed:
2047 elif abs in removed:
2048 continue
2048 continue
2049 if reason:
2049 if reason:
2050 if exact:
2050 if exact:
2051 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2051 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2052 else:
2052 else:
2053 if ui.verbose or not exact:
2053 if ui.verbose or not exact:
2054 ui.status(_('removing %s\n') % rel)
2054 ui.status(_('removing %s\n') % rel)
2055 remove.append(abs)
2055 remove.append(abs)
2056 repo.forget(forget)
2056 repo.forget(forget)
2057 repo.remove(remove, unlink=not opts['after'])
2057 repo.remove(remove, unlink=not opts['after'])
2058
2058
2059 def rename(ui, repo, *pats, **opts):
2059 def rename(ui, repo, *pats, **opts):
2060 """rename files; equivalent of copy + remove
2060 """rename files; equivalent of copy + remove
2061
2061
2062 Mark dest as copies of sources; mark sources for deletion. If
2062 Mark dest as copies of sources; mark sources for deletion. If
2063 dest is a directory, copies are put in that directory. If dest is
2063 dest is a directory, copies are put in that directory. If dest is
2064 a file, there can only be one source.
2064 a file, there can only be one source.
2065
2065
2066 By default, this command copies the contents of files as they
2066 By default, this command copies the contents of files as they
2067 stand in the working directory. If invoked with --after, the
2067 stand in the working directory. If invoked with --after, the
2068 operation is recorded, but no copying is performed.
2068 operation is recorded, but no copying is performed.
2069
2069
2070 This command takes effect in the next commit. To undo a rename
2070 This command takes effect in the next commit. To undo a rename
2071 before that, see hg revert.
2071 before that, see hg revert.
2072 """
2072 """
2073 wlock = repo.wlock(0)
2073 wlock = repo.wlock(0)
2074 errs, copied = docopy(ui, repo, pats, opts, wlock)
2074 errs, copied = docopy(ui, repo, pats, opts, wlock)
2075 names = []
2075 names = []
2076 for abs, rel, exact in copied:
2076 for abs, rel, exact in copied:
2077 if ui.verbose or not exact:
2077 if ui.verbose or not exact:
2078 ui.status(_('removing %s\n') % rel)
2078 ui.status(_('removing %s\n') % rel)
2079 names.append(abs)
2079 names.append(abs)
2080 if not opts.get('dry_run'):
2080 if not opts.get('dry_run'):
2081 repo.remove(names, True, wlock)
2081 repo.remove(names, True, wlock)
2082 return errs
2082 return errs
2083
2083
2084 def revert(ui, repo, *pats, **opts):
2084 def revert(ui, repo, *pats, **opts):
2085 """revert files or dirs to their states as of some revision
2085 """revert files or dirs to their states as of some revision
2086
2086
2087 With no revision specified, revert the named files or directories
2087 With no revision specified, revert the named files or directories
2088 to the contents they had in the parent of the working directory.
2088 to the contents they had in the parent of the working directory.
2089 This restores the contents of the affected files to an unmodified
2089 This restores the contents of the affected files to an unmodified
2090 state and unschedules adds, removes, copies, and renames. If the
2090 state and unschedules adds, removes, copies, and renames. If the
2091 working directory has two parents, you must explicitly specify the
2091 working directory has two parents, you must explicitly specify the
2092 revision to revert to.
2092 revision to revert to.
2093
2093
2094 Modified files are saved with a .orig suffix before reverting.
2094 Modified files are saved with a .orig suffix before reverting.
2095 To disable these backups, use --no-backup.
2095 To disable these backups, use --no-backup.
2096
2096
2097 Using the -r option, revert the given files or directories to their
2097 Using the -r option, revert the given files or directories to their
2098 contents as of a specific revision. This can be helpful to "roll
2098 contents as of a specific revision. This can be helpful to "roll
2099 back" some or all of a change that should not have been committed.
2099 back" some or all of a change that should not have been committed.
2100
2100
2101 Revert modifies the working directory. It does not commit any
2101 Revert modifies the working directory. It does not commit any
2102 changes, or change the parent of the working directory. If you
2102 changes, or change the parent of the working directory. If you
2103 revert to a revision other than the parent of the working
2103 revert to a revision other than the parent of the working
2104 directory, the reverted files will thus appear modified
2104 directory, the reverted files will thus appear modified
2105 afterwards.
2105 afterwards.
2106
2106
2107 If a file has been deleted, it is recreated. If the executable
2107 If a file has been deleted, it is recreated. If the executable
2108 mode of a file was changed, it is reset.
2108 mode of a file was changed, it is reset.
2109
2109
2110 If names are given, all files matching the names are reverted.
2110 If names are given, all files matching the names are reverted.
2111
2111
2112 If no arguments are given, no files are reverted.
2112 If no arguments are given, no files are reverted.
2113 """
2113 """
2114
2114
2115 if opts["date"]:
2115 if opts["date"]:
2116 if opts["rev"]:
2116 if opts["rev"]:
2117 raise util.Abort(_("you can't specify a revision and a date"))
2117 raise util.Abort(_("you can't specify a revision and a date"))
2118 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2118 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2119
2119
2120 if not pats and not opts['all']:
2120 if not pats and not opts['all']:
2121 raise util.Abort(_('no files or directories specified; '
2121 raise util.Abort(_('no files or directories specified; '
2122 'use --all to revert the whole repo'))
2122 'use --all to revert the whole repo'))
2123
2123
2124 parent, p2 = repo.dirstate.parents()
2124 parent, p2 = repo.dirstate.parents()
2125 if not opts['rev'] and p2 != nullid:
2125 if not opts['rev'] and p2 != nullid:
2126 raise util.Abort(_('uncommitted merge - please provide a '
2126 raise util.Abort(_('uncommitted merge - please provide a '
2127 'specific revision'))
2127 'specific revision'))
2128 node = repo.changectx(opts['rev']).node()
2128 node = repo.changectx(opts['rev']).node()
2129 mf = repo.manifest.read(repo.changelog.read(node)[0])
2129 mf = repo.manifest.read(repo.changelog.read(node)[0])
2130 if node == parent:
2130 if node == parent:
2131 pmf = mf
2131 pmf = mf
2132 else:
2132 else:
2133 pmf = None
2133 pmf = None
2134
2134
2135 wlock = repo.wlock()
2135 wlock = repo.wlock()
2136
2136
2137 # need all matching names in dirstate and manifest of target rev,
2137 # need all matching names in dirstate and manifest of target rev,
2138 # so have to walk both. do not print errors if files exist in one
2138 # so have to walk both. do not print errors if files exist in one
2139 # but not other.
2139 # but not other.
2140
2140
2141 names = {}
2141 names = {}
2142 target_only = {}
2142 target_only = {}
2143
2143
2144 # walk dirstate.
2144 # walk dirstate.
2145
2145
2146 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2146 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2147 badmatch=mf.has_key):
2147 badmatch=mf.has_key):
2148 names[abs] = (rel, exact)
2148 names[abs] = (rel, exact)
2149 if src == 'b':
2149 if src == 'b':
2150 target_only[abs] = True
2150 target_only[abs] = True
2151
2151
2152 # walk target manifest.
2152 # walk target manifest.
2153
2153
2154 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2154 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2155 badmatch=names.has_key):
2155 badmatch=names.has_key):
2156 if abs in names: continue
2156 if abs in names: continue
2157 names[abs] = (rel, exact)
2157 names[abs] = (rel, exact)
2158 target_only[abs] = True
2158 target_only[abs] = True
2159
2159
2160 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2160 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2161 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2161 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2162
2162
2163 revert = ([], _('reverting %s\n'))
2163 revert = ([], _('reverting %s\n'))
2164 add = ([], _('adding %s\n'))
2164 add = ([], _('adding %s\n'))
2165 remove = ([], _('removing %s\n'))
2165 remove = ([], _('removing %s\n'))
2166 forget = ([], _('forgetting %s\n'))
2166 forget = ([], _('forgetting %s\n'))
2167 undelete = ([], _('undeleting %s\n'))
2167 undelete = ([], _('undeleting %s\n'))
2168 update = {}
2168 update = {}
2169
2169
2170 disptable = (
2170 disptable = (
2171 # dispatch table:
2171 # dispatch table:
2172 # file state
2172 # file state
2173 # action if in target manifest
2173 # action if in target manifest
2174 # action if not in target manifest
2174 # action if not in target manifest
2175 # make backup if in target manifest
2175 # make backup if in target manifest
2176 # make backup if not in target manifest
2176 # make backup if not in target manifest
2177 (modified, revert, remove, True, True),
2177 (modified, revert, remove, True, True),
2178 (added, revert, forget, True, False),
2178 (added, revert, forget, True, False),
2179 (removed, undelete, None, False, False),
2179 (removed, undelete, None, False, False),
2180 (deleted, revert, remove, False, False),
2180 (deleted, revert, remove, False, False),
2181 (unknown, add, None, True, False),
2181 (unknown, add, None, True, False),
2182 (target_only, add, None, False, False),
2182 (target_only, add, None, False, False),
2183 )
2183 )
2184
2184
2185 entries = names.items()
2185 entries = names.items()
2186 entries.sort()
2186 entries.sort()
2187
2187
2188 for abs, (rel, exact) in entries:
2188 for abs, (rel, exact) in entries:
2189 mfentry = mf.get(abs)
2189 mfentry = mf.get(abs)
2190 def handle(xlist, dobackup):
2190 def handle(xlist, dobackup):
2191 xlist[0].append(abs)
2191 xlist[0].append(abs)
2192 update[abs] = 1
2192 update[abs] = 1
2193 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2193 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2194 bakname = "%s.orig" % rel
2194 bakname = "%s.orig" % rel
2195 ui.note(_('saving current version of %s as %s\n') %
2195 ui.note(_('saving current version of %s as %s\n') %
2196 (rel, bakname))
2196 (rel, bakname))
2197 if not opts.get('dry_run'):
2197 if not opts.get('dry_run'):
2198 util.copyfile(rel, bakname)
2198 util.copyfile(rel, bakname)
2199 if ui.verbose or not exact:
2199 if ui.verbose or not exact:
2200 ui.status(xlist[1] % rel)
2200 ui.status(xlist[1] % rel)
2201 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2201 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2202 if abs not in table: continue
2202 if abs not in table: continue
2203 # file has changed in dirstate
2203 # file has changed in dirstate
2204 if mfentry:
2204 if mfentry:
2205 handle(hitlist, backuphit)
2205 handle(hitlist, backuphit)
2206 elif misslist is not None:
2206 elif misslist is not None:
2207 handle(misslist, backupmiss)
2207 handle(misslist, backupmiss)
2208 else:
2208 else:
2209 if exact: ui.warn(_('file not managed: %s\n') % rel)
2209 if exact: ui.warn(_('file not managed: %s\n') % rel)
2210 break
2210 break
2211 else:
2211 else:
2212 # file has not changed in dirstate
2212 # file has not changed in dirstate
2213 if node == parent:
2213 if node == parent:
2214 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2214 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2215 continue
2215 continue
2216 if pmf is None:
2216 if pmf is None:
2217 # only need parent manifest in this unlikely case,
2217 # only need parent manifest in this unlikely case,
2218 # so do not read by default
2218 # so do not read by default
2219 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2219 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2220 if abs in pmf:
2220 if abs in pmf:
2221 if mfentry:
2221 if mfentry:
2222 # if version of file is same in parent and target
2222 # if version of file is same in parent and target
2223 # manifests, do nothing
2223 # manifests, do nothing
2224 if pmf[abs] != mfentry:
2224 if pmf[abs] != mfentry:
2225 handle(revert, False)
2225 handle(revert, False)
2226 else:
2226 else:
2227 handle(remove, False)
2227 handle(remove, False)
2228
2228
2229 if not opts.get('dry_run'):
2229 if not opts.get('dry_run'):
2230 repo.dirstate.forget(forget[0])
2230 repo.dirstate.forget(forget[0])
2231 r = hg.revert(repo, node, update.has_key, wlock)
2231 r = hg.revert(repo, node, update.has_key, wlock)
2232 repo.dirstate.update(add[0], 'a')
2232 repo.dirstate.update(add[0], 'a')
2233 repo.dirstate.update(undelete[0], 'n')
2233 repo.dirstate.update(undelete[0], 'n')
2234 repo.dirstate.update(remove[0], 'r')
2234 repo.dirstate.update(remove[0], 'r')
2235 return r
2235 return r
2236
2236
2237 def rollback(ui, repo):
2237 def rollback(ui, repo):
2238 """roll back the last transaction in this repository
2238 """roll back the last transaction in this repository
2239
2239
2240 Roll back the last transaction in this repository, restoring the
2240 Roll back the last transaction in this repository, restoring the
2241 project to its state prior to the transaction.
2241 project to its state prior to the transaction.
2242
2242
2243 Transactions are used to encapsulate the effects of all commands
2243 Transactions are used to encapsulate the effects of all commands
2244 that create new changesets or propagate existing changesets into a
2244 that create new changesets or propagate existing changesets into a
2245 repository. For example, the following commands are transactional,
2245 repository. For example, the following commands are transactional,
2246 and their effects can be rolled back:
2246 and their effects can be rolled back:
2247
2247
2248 commit
2248 commit
2249 import
2249 import
2250 pull
2250 pull
2251 push (with this repository as destination)
2251 push (with this repository as destination)
2252 unbundle
2252 unbundle
2253
2253
2254 This command should be used with care. There is only one level of
2254 This command should be used with care. There is only one level of
2255 rollback, and there is no way to undo a rollback.
2255 rollback, and there is no way to undo a rollback.
2256
2256
2257 This command is not intended for use on public repositories. Once
2257 This command is not intended for use on public repositories. Once
2258 changes are visible for pull by other users, rolling a transaction
2258 changes are visible for pull by other users, rolling a transaction
2259 back locally is ineffective (someone else may already have pulled
2259 back locally is ineffective (someone else may already have pulled
2260 the changes). Furthermore, a race is possible with readers of the
2260 the changes). Furthermore, a race is possible with readers of the
2261 repository; for example an in-progress pull from the repository
2261 repository; for example an in-progress pull from the repository
2262 may fail if a rollback is performed.
2262 may fail if a rollback is performed.
2263 """
2263 """
2264 repo.rollback()
2264 repo.rollback()
2265
2265
2266 def root(ui, repo):
2266 def root(ui, repo):
2267 """print the root (top) of the current working dir
2267 """print the root (top) of the current working dir
2268
2268
2269 Print the root directory of the current repository.
2269 Print the root directory of the current repository.
2270 """
2270 """
2271 ui.write(repo.root + "\n")
2271 ui.write(repo.root + "\n")
2272
2272
2273 def serve(ui, repo, **opts):
2273 def serve(ui, repo, **opts):
2274 """export the repository via HTTP
2274 """export the repository via HTTP
2275
2275
2276 Start a local HTTP repository browser and pull server.
2276 Start a local HTTP repository browser and pull server.
2277
2277
2278 By default, the server logs accesses to stdout and errors to
2278 By default, the server logs accesses to stdout and errors to
2279 stderr. Use the "-A" and "-E" options to log to files.
2279 stderr. Use the "-A" and "-E" options to log to files.
2280 """
2280 """
2281
2281
2282 if opts["stdio"]:
2282 if opts["stdio"]:
2283 if repo is None:
2283 if repo is None:
2284 raise hg.RepoError(_("There is no Mercurial repository here"
2284 raise hg.RepoError(_("There is no Mercurial repository here"
2285 " (.hg not found)"))
2285 " (.hg not found)"))
2286 s = sshserver.sshserver(ui, repo)
2286 s = sshserver.sshserver(ui, repo)
2287 s.serve_forever()
2287 s.serve_forever()
2288
2288
2289 optlist = ("name templates style address port ipv6"
2289 optlist = ("name templates style address port ipv6"
2290 " accesslog errorlog webdir_conf")
2290 " accesslog errorlog webdir_conf")
2291 for o in optlist.split():
2291 for o in optlist.split():
2292 if opts[o]:
2292 if opts[o]:
2293 ui.setconfig("web", o, str(opts[o]))
2293 ui.setconfig("web", o, str(opts[o]))
2294
2294
2295 if repo is None and not ui.config("web", "webdir_conf"):
2295 if repo is None and not ui.config("web", "webdir_conf"):
2296 raise hg.RepoError(_("There is no Mercurial repository here"
2296 raise hg.RepoError(_("There is no Mercurial repository here"
2297 " (.hg not found)"))
2297 " (.hg not found)"))
2298
2298
2299 if opts['daemon'] and not opts['daemon_pipefds']:
2299 if opts['daemon'] and not opts['daemon_pipefds']:
2300 rfd, wfd = os.pipe()
2300 rfd, wfd = os.pipe()
2301 args = sys.argv[:]
2301 args = sys.argv[:]
2302 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2302 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2303 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2303 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2304 args[0], args)
2304 args[0], args)
2305 os.close(wfd)
2305 os.close(wfd)
2306 os.read(rfd, 1)
2306 os.read(rfd, 1)
2307 os._exit(0)
2307 os._exit(0)
2308
2308
2309 httpd = hgweb.server.create_server(ui, repo)
2309 httpd = hgweb.server.create_server(ui, repo)
2310
2310
2311 if ui.verbose:
2311 if ui.verbose:
2312 if httpd.port != 80:
2312 if httpd.port != 80:
2313 ui.status(_('listening at http://%s:%d/\n') %
2313 ui.status(_('listening at http://%s:%d/\n') %
2314 (httpd.addr, httpd.port))
2314 (httpd.addr, httpd.port))
2315 else:
2315 else:
2316 ui.status(_('listening at http://%s/\n') % httpd.addr)
2316 ui.status(_('listening at http://%s/\n') % httpd.addr)
2317
2317
2318 if opts['pid_file']:
2318 if opts['pid_file']:
2319 fp = open(opts['pid_file'], 'w')
2319 fp = open(opts['pid_file'], 'w')
2320 fp.write(str(os.getpid()) + '\n')
2320 fp.write(str(os.getpid()) + '\n')
2321 fp.close()
2321 fp.close()
2322
2322
2323 if opts['daemon_pipefds']:
2323 if opts['daemon_pipefds']:
2324 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2324 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2325 os.close(rfd)
2325 os.close(rfd)
2326 os.write(wfd, 'y')
2326 os.write(wfd, 'y')
2327 os.close(wfd)
2327 os.close(wfd)
2328 sys.stdout.flush()
2328 sys.stdout.flush()
2329 sys.stderr.flush()
2329 sys.stderr.flush()
2330 fd = os.open(util.nulldev, os.O_RDWR)
2330 fd = os.open(util.nulldev, os.O_RDWR)
2331 if fd != 0: os.dup2(fd, 0)
2331 if fd != 0: os.dup2(fd, 0)
2332 if fd != 1: os.dup2(fd, 1)
2332 if fd != 1: os.dup2(fd, 1)
2333 if fd != 2: os.dup2(fd, 2)
2333 if fd != 2: os.dup2(fd, 2)
2334 if fd not in (0, 1, 2): os.close(fd)
2334 if fd not in (0, 1, 2): os.close(fd)
2335
2335
2336 httpd.serve_forever()
2336 httpd.serve_forever()
2337
2337
2338 def status(ui, repo, *pats, **opts):
2338 def status(ui, repo, *pats, **opts):
2339 """show changed files in the working directory
2339 """show changed files in the working directory
2340
2340
2341 Show status of files in the repository. If names are given, only
2341 Show status of files in the repository. If names are given, only
2342 files that match are shown. Files that are clean or ignored, are
2342 files that match are shown. Files that are clean or ignored, are
2343 not listed unless -c (clean), -i (ignored) or -A is given.
2343 not listed unless -c (clean), -i (ignored) or -A is given.
2344
2344
2345 NOTE: status may appear to disagree with diff if permissions have
2345 NOTE: status may appear to disagree with diff if permissions have
2346 changed or a merge has occurred. The standard diff format does not
2346 changed or a merge has occurred. The standard diff format does not
2347 report permission changes and diff only reports changes relative
2347 report permission changes and diff only reports changes relative
2348 to one merge parent.
2348 to one merge parent.
2349
2349
2350 If one revision is given, it is used as the base revision.
2350 If one revision is given, it is used as the base revision.
2351 If two revisions are given, the difference between them is shown.
2351 If two revisions are given, the difference between them is shown.
2352
2352
2353 The codes used to show the status of files are:
2353 The codes used to show the status of files are:
2354 M = modified
2354 M = modified
2355 A = added
2355 A = added
2356 R = removed
2356 R = removed
2357 C = clean
2357 C = clean
2358 ! = deleted, but still tracked
2358 ! = deleted, but still tracked
2359 ? = not tracked
2359 ? = not tracked
2360 I = ignored (not shown by default)
2360 I = ignored (not shown by default)
2361 = the previous added file was copied from here
2361 = the previous added file was copied from here
2362 """
2362 """
2363
2363
2364 all = opts['all']
2364 all = opts['all']
2365 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2365 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2366
2366
2367 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2367 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2368 cwd = (pats and repo.getcwd()) or ''
2368 cwd = (pats and repo.getcwd()) or ''
2369 modified, added, removed, deleted, unknown, ignored, clean = [
2369 modified, added, removed, deleted, unknown, ignored, clean = [
2370 [util.pathto(cwd, x) for x in n]
2370 [util.pathto(cwd, x) for x in n]
2371 for n in repo.status(node1=node1, node2=node2, files=files,
2371 for n in repo.status(node1=node1, node2=node2, files=files,
2372 match=matchfn,
2372 match=matchfn,
2373 list_ignored=all or opts['ignored'],
2373 list_ignored=all or opts['ignored'],
2374 list_clean=all or opts['clean'])]
2374 list_clean=all or opts['clean'])]
2375
2375
2376 changetypes = (('modified', 'M', modified),
2376 changetypes = (('modified', 'M', modified),
2377 ('added', 'A', added),
2377 ('added', 'A', added),
2378 ('removed', 'R', removed),
2378 ('removed', 'R', removed),
2379 ('deleted', '!', deleted),
2379 ('deleted', '!', deleted),
2380 ('unknown', '?', unknown),
2380 ('unknown', '?', unknown),
2381 ('ignored', 'I', ignored))
2381 ('ignored', 'I', ignored))
2382
2382
2383 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2383 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2384
2384
2385 end = opts['print0'] and '\0' or '\n'
2385 end = opts['print0'] and '\0' or '\n'
2386
2386
2387 for opt, char, changes in ([ct for ct in explicit_changetypes
2387 for opt, char, changes in ([ct for ct in explicit_changetypes
2388 if all or opts[ct[0]]]
2388 if all or opts[ct[0]]]
2389 or changetypes):
2389 or changetypes):
2390 if opts['no_status']:
2390 if opts['no_status']:
2391 format = "%%s%s" % end
2391 format = "%%s%s" % end
2392 else:
2392 else:
2393 format = "%s %%s%s" % (char, end)
2393 format = "%s %%s%s" % (char, end)
2394
2394
2395 for f in changes:
2395 for f in changes:
2396 ui.write(format % f)
2396 ui.write(format % f)
2397 if ((all or opts.get('copies')) and not opts.get('no_status')):
2397 if ((all or opts.get('copies')) and not opts.get('no_status')):
2398 copied = repo.dirstate.copied(f)
2398 copied = repo.dirstate.copied(f)
2399 if copied:
2399 if copied:
2400 ui.write(' %s%s' % (copied, end))
2400 ui.write(' %s%s' % (copied, end))
2401
2401
2402 def tag(ui, repo, name, rev_=None, **opts):
2402 def tag(ui, repo, name, rev_=None, **opts):
2403 """add a tag for the current or given revision
2403 """add a tag for the current or given revision
2404
2404
2405 Name a particular revision using <name>.
2405 Name a particular revision using <name>.
2406
2406
2407 Tags are used to name particular revisions of the repository and are
2407 Tags are used to name particular revisions of the repository and are
2408 very useful to compare different revision, to go back to significant
2408 very useful to compare different revision, to go back to significant
2409 earlier versions or to mark branch points as releases, etc.
2409 earlier versions or to mark branch points as releases, etc.
2410
2410
2411 If no revision is given, the parent of the working directory is used,
2411 If no revision is given, the parent of the working directory is used,
2412 or tip if no revision is checked out.
2412 or tip if no revision is checked out.
2413
2413
2414 To facilitate version control, distribution, and merging of tags,
2414 To facilitate version control, distribution, and merging of tags,
2415 they are stored as a file named ".hgtags" which is managed
2415 they are stored as a file named ".hgtags" which is managed
2416 similarly to other project files and can be hand-edited if
2416 similarly to other project files and can be hand-edited if
2417 necessary. The file '.hg/localtags' is used for local tags (not
2417 necessary. The file '.hg/localtags' is used for local tags (not
2418 shared among repositories).
2418 shared among repositories).
2419 """
2419 """
2420 if name in ['tip', '.', 'null']:
2420 if name in ['tip', '.', 'null']:
2421 raise util.Abort(_("the name '%s' is reserved") % name)
2421 raise util.Abort(_("the name '%s' is reserved") % name)
2422 if rev_ is not None:
2422 if rev_ is not None:
2423 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2423 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2424 "please use 'hg tag [-r REV] NAME' instead\n"))
2424 "please use 'hg tag [-r REV] NAME' instead\n"))
2425 if opts['rev']:
2425 if opts['rev']:
2426 raise util.Abort(_("use only one form to specify the revision"))
2426 raise util.Abort(_("use only one form to specify the revision"))
2427 if opts['rev']:
2427 if opts['rev']:
2428 rev_ = opts['rev']
2428 rev_ = opts['rev']
2429 if not rev_ and repo.dirstate.parents()[1] != nullid:
2429 if not rev_ and repo.dirstate.parents()[1] != nullid:
2430 raise util.Abort(_('uncommitted merge - please provide a '
2430 raise util.Abort(_('uncommitted merge - please provide a '
2431 'specific revision'))
2431 'specific revision'))
2432 r = repo.changectx(rev_).node()
2432 r = repo.changectx(rev_).node()
2433
2433
2434 message = opts['message']
2434 message = opts['message']
2435 if not message:
2435 if not message:
2436 message = _('Added tag %s for changeset %s') % (name, short(r))
2436 message = _('Added tag %s for changeset %s') % (name, short(r))
2437
2437
2438 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2438 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2439
2439
2440 def tags(ui, repo):
2440 def tags(ui, repo):
2441 """list repository tags
2441 """list repository tags
2442
2442
2443 List the repository tags.
2443 List the repository tags.
2444
2444
2445 This lists both regular and local tags.
2445 This lists both regular and local tags.
2446 """
2446 """
2447
2447
2448 l = repo.tagslist()
2448 l = repo.tagslist()
2449 l.reverse()
2449 l.reverse()
2450 hexfunc = ui.debugflag and hex or short
2450 hexfunc = ui.debugflag and hex or short
2451 for t, n in l:
2451 for t, n in l:
2452 try:
2452 try:
2453 hn = hexfunc(n)
2453 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2454 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2454 except KeyError:
2455 except revlog.LookupError:
2455 r = " ?:?"
2456 r = " ?:%s" % hn
2456 if ui.quiet:
2457 if ui.quiet:
2457 ui.write("%s\n" % t)
2458 ui.write("%s\n" % t)
2458 else:
2459 else:
2459 t = util.localsub(t, 30)
2460 t = util.localsub(t, 30)
2460 t += " " * (30 - util.locallen(t))
2461 t += " " * (30 - util.locallen(t))
2461 ui.write("%s %s\n" % (t, r))
2462 ui.write("%s %s\n" % (t, r))
2462
2463
2463 def tip(ui, repo, **opts):
2464 def tip(ui, repo, **opts):
2464 """show the tip revision
2465 """show the tip revision
2465
2466
2466 Show the tip revision.
2467 Show the tip revision.
2467 """
2468 """
2468 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2469 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2469
2470
2470 def unbundle(ui, repo, fname, **opts):
2471 def unbundle(ui, repo, fname, **opts):
2471 """apply a changegroup file
2472 """apply a changegroup file
2472
2473
2473 Apply a compressed changegroup file generated by the bundle
2474 Apply a compressed changegroup file generated by the bundle
2474 command.
2475 command.
2475 """
2476 """
2476 gen = changegroup.readbundle(urllib.urlopen(fname), fname)
2477 gen = changegroup.readbundle(urllib.urlopen(fname), fname)
2477 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2478 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2478 return postincoming(ui, repo, modheads, opts['update'])
2479 return postincoming(ui, repo, modheads, opts['update'])
2479
2480
2480 def update(ui, repo, node=None, clean=False, date=None):
2481 def update(ui, repo, node=None, clean=False, date=None):
2481 """update or merge working directory
2482 """update or merge working directory
2482
2483
2483 Update the working directory to the specified revision.
2484 Update the working directory to the specified revision.
2484
2485
2485 If there are no outstanding changes in the working directory and
2486 If there are no outstanding changes in the working directory and
2486 there is a linear relationship between the current version and the
2487 there is a linear relationship between the current version and the
2487 requested version, the result is the requested version.
2488 requested version, the result is the requested version.
2488
2489
2489 To merge the working directory with another revision, use the
2490 To merge the working directory with another revision, use the
2490 merge command.
2491 merge command.
2491
2492
2492 By default, update will refuse to run if doing so would require
2493 By default, update will refuse to run if doing so would require
2493 merging or discarding local changes.
2494 merging or discarding local changes.
2494 """
2495 """
2495 if date:
2496 if date:
2496 if node:
2497 if node:
2497 raise util.Abort(_("you can't specify a revision and a date"))
2498 raise util.Abort(_("you can't specify a revision and a date"))
2498 node = cmdutil.finddate(ui, repo, date)
2499 node = cmdutil.finddate(ui, repo, date)
2499
2500
2500 if clean:
2501 if clean:
2501 return hg.clean(repo, node)
2502 return hg.clean(repo, node)
2502 else:
2503 else:
2503 return hg.update(repo, node)
2504 return hg.update(repo, node)
2504
2505
2505 def verify(ui, repo):
2506 def verify(ui, repo):
2506 """verify the integrity of the repository
2507 """verify the integrity of the repository
2507
2508
2508 Verify the integrity of the current repository.
2509 Verify the integrity of the current repository.
2509
2510
2510 This will perform an extensive check of the repository's
2511 This will perform an extensive check of the repository's
2511 integrity, validating the hashes and checksums of each entry in
2512 integrity, validating the hashes and checksums of each entry in
2512 the changelog, manifest, and tracked files, as well as the
2513 the changelog, manifest, and tracked files, as well as the
2513 integrity of their crosslinks and indices.
2514 integrity of their crosslinks and indices.
2514 """
2515 """
2515 return hg.verify(repo)
2516 return hg.verify(repo)
2516
2517
2517 def version_(ui):
2518 def version_(ui):
2518 """output version and copyright information"""
2519 """output version and copyright information"""
2519 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2520 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2520 % version.get_version())
2521 % version.get_version())
2521 ui.status(_(
2522 ui.status(_(
2522 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
2523 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
2523 "This is free software; see the source for copying conditions. "
2524 "This is free software; see the source for copying conditions. "
2524 "There is NO\nwarranty; "
2525 "There is NO\nwarranty; "
2525 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2526 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2526 ))
2527 ))
2527
2528
2528 # Command options and aliases are listed here, alphabetically
2529 # Command options and aliases are listed here, alphabetically
2529
2530
2530 globalopts = [
2531 globalopts = [
2531 ('R', 'repository', '',
2532 ('R', 'repository', '',
2532 _('repository root directory or symbolic path name')),
2533 _('repository root directory or symbolic path name')),
2533 ('', 'cwd', '', _('change working directory')),
2534 ('', 'cwd', '', _('change working directory')),
2534 ('y', 'noninteractive', None,
2535 ('y', 'noninteractive', None,
2535 _('do not prompt, assume \'yes\' for any required answers')),
2536 _('do not prompt, assume \'yes\' for any required answers')),
2536 ('q', 'quiet', None, _('suppress output')),
2537 ('q', 'quiet', None, _('suppress output')),
2537 ('v', 'verbose', None, _('enable additional output')),
2538 ('v', 'verbose', None, _('enable additional output')),
2538 ('', 'config', [], _('set/override config option')),
2539 ('', 'config', [], _('set/override config option')),
2539 ('', 'debug', None, _('enable debugging output')),
2540 ('', 'debug', None, _('enable debugging output')),
2540 ('', 'debugger', None, _('start debugger')),
2541 ('', 'debugger', None, _('start debugger')),
2541 ('', 'encoding', util._encoding, _('set the charset encoding')),
2542 ('', 'encoding', util._encoding, _('set the charset encoding')),
2542 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2543 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2543 ('', 'lsprof', None, _('print improved command execution profile')),
2544 ('', 'lsprof', None, _('print improved command execution profile')),
2544 ('', 'traceback', None, _('print traceback on exception')),
2545 ('', 'traceback', None, _('print traceback on exception')),
2545 ('', 'time', None, _('time how long the command takes')),
2546 ('', 'time', None, _('time how long the command takes')),
2546 ('', 'profile', None, _('print command execution profile')),
2547 ('', 'profile', None, _('print command execution profile')),
2547 ('', 'version', None, _('output version information and exit')),
2548 ('', 'version', None, _('output version information and exit')),
2548 ('h', 'help', None, _('display help and exit')),
2549 ('h', 'help', None, _('display help and exit')),
2549 ]
2550 ]
2550
2551
2551 dryrunopts = [('n', 'dry-run', None,
2552 dryrunopts = [('n', 'dry-run', None,
2552 _('do not perform actions, just print output'))]
2553 _('do not perform actions, just print output'))]
2553
2554
2554 remoteopts = [
2555 remoteopts = [
2555 ('e', 'ssh', '', _('specify ssh command to use')),
2556 ('e', 'ssh', '', _('specify ssh command to use')),
2556 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2557 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2557 ]
2558 ]
2558
2559
2559 walkopts = [
2560 walkopts = [
2560 ('I', 'include', [], _('include names matching the given patterns')),
2561 ('I', 'include', [], _('include names matching the given patterns')),
2561 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2562 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2562 ]
2563 ]
2563
2564
2564 commitopts = [
2565 commitopts = [
2565 ('m', 'message', '', _('use <text> as commit message')),
2566 ('m', 'message', '', _('use <text> as commit message')),
2566 ('l', 'logfile', '', _('read commit message from <file>')),
2567 ('l', 'logfile', '', _('read commit message from <file>')),
2567 ]
2568 ]
2568
2569
2569 table = {
2570 table = {
2570 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2571 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2571 "addremove":
2572 "addremove":
2572 (addremove,
2573 (addremove,
2573 [('s', 'similarity', '',
2574 [('s', 'similarity', '',
2574 _('guess renamed files by similarity (0<=s<=100)')),
2575 _('guess renamed files by similarity (0<=s<=100)')),
2575 ] + walkopts + dryrunopts,
2576 ] + walkopts + dryrunopts,
2576 _('hg addremove [OPTION]... [FILE]...')),
2577 _('hg addremove [OPTION]... [FILE]...')),
2577 "^annotate":
2578 "^annotate":
2578 (annotate,
2579 (annotate,
2579 [('r', 'rev', '', _('annotate the specified revision')),
2580 [('r', 'rev', '', _('annotate the specified revision')),
2580 ('f', 'follow', None, _('follow file copies and renames')),
2581 ('f', 'follow', None, _('follow file copies and renames')),
2581 ('a', 'text', None, _('treat all files as text')),
2582 ('a', 'text', None, _('treat all files as text')),
2582 ('u', 'user', None, _('list the author')),
2583 ('u', 'user', None, _('list the author')),
2583 ('d', 'date', None, _('list the date')),
2584 ('d', 'date', None, _('list the date')),
2584 ('n', 'number', None, _('list the revision number (default)')),
2585 ('n', 'number', None, _('list the revision number (default)')),
2585 ('c', 'changeset', None, _('list the changeset')),
2586 ('c', 'changeset', None, _('list the changeset')),
2586 ] + walkopts,
2587 ] + walkopts,
2587 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] FILE...')),
2588 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] FILE...')),
2588 "archive":
2589 "archive":
2589 (archive,
2590 (archive,
2590 [('', 'no-decode', None, _('do not pass files through decoders')),
2591 [('', 'no-decode', None, _('do not pass files through decoders')),
2591 ('p', 'prefix', '', _('directory prefix for files in archive')),
2592 ('p', 'prefix', '', _('directory prefix for files in archive')),
2592 ('r', 'rev', '', _('revision to distribute')),
2593 ('r', 'rev', '', _('revision to distribute')),
2593 ('t', 'type', '', _('type of distribution to create')),
2594 ('t', 'type', '', _('type of distribution to create')),
2594 ] + walkopts,
2595 ] + walkopts,
2595 _('hg archive [OPTION]... DEST')),
2596 _('hg archive [OPTION]... DEST')),
2596 "backout":
2597 "backout":
2597 (backout,
2598 (backout,
2598 [('', 'merge', None,
2599 [('', 'merge', None,
2599 _('merge with old dirstate parent after backout')),
2600 _('merge with old dirstate parent after backout')),
2600 ('d', 'date', '', _('record datecode as commit date')),
2601 ('d', 'date', '', _('record datecode as commit date')),
2601 ('', 'parent', '', _('parent to choose when backing out merge')),
2602 ('', 'parent', '', _('parent to choose when backing out merge')),
2602 ('u', 'user', '', _('record user as committer')),
2603 ('u', 'user', '', _('record user as committer')),
2603 ] + walkopts + commitopts,
2604 ] + walkopts + commitopts,
2604 _('hg backout [OPTION]... REV')),
2605 _('hg backout [OPTION]... REV')),
2605 "branch": (branch, [], _('hg branch [NAME]')),
2606 "branch": (branch, [], _('hg branch [NAME]')),
2606 "branches": (branches, [], _('hg branches')),
2607 "branches": (branches, [], _('hg branches')),
2607 "bundle":
2608 "bundle":
2608 (bundle,
2609 (bundle,
2609 [('f', 'force', None,
2610 [('f', 'force', None,
2610 _('run even when remote repository is unrelated')),
2611 _('run even when remote repository is unrelated')),
2611 ('r', 'rev', [],
2612 ('r', 'rev', [],
2612 _('a changeset you would like to bundle')),
2613 _('a changeset you would like to bundle')),
2613 ('', 'base', [],
2614 ('', 'base', [],
2614 _('a base changeset to specify instead of a destination')),
2615 _('a base changeset to specify instead of a destination')),
2615 ] + remoteopts,
2616 ] + remoteopts,
2616 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2617 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2617 "cat":
2618 "cat":
2618 (cat,
2619 (cat,
2619 [('o', 'output', '', _('print output to file with formatted name')),
2620 [('o', 'output', '', _('print output to file with formatted name')),
2620 ('r', 'rev', '', _('print the given revision')),
2621 ('r', 'rev', '', _('print the given revision')),
2621 ] + walkopts,
2622 ] + walkopts,
2622 _('hg cat [OPTION]... FILE...')),
2623 _('hg cat [OPTION]... FILE...')),
2623 "^clone":
2624 "^clone":
2624 (clone,
2625 (clone,
2625 [('U', 'noupdate', None, _('do not update the new working directory')),
2626 [('U', 'noupdate', None, _('do not update the new working directory')),
2626 ('r', 'rev', [],
2627 ('r', 'rev', [],
2627 _('a changeset you would like to have after cloning')),
2628 _('a changeset you would like to have after cloning')),
2628 ('', 'pull', None, _('use pull protocol to copy metadata')),
2629 ('', 'pull', None, _('use pull protocol to copy metadata')),
2629 ('', 'uncompressed', None,
2630 ('', 'uncompressed', None,
2630 _('use uncompressed transfer (fast over LAN)')),
2631 _('use uncompressed transfer (fast over LAN)')),
2631 ] + remoteopts,
2632 ] + remoteopts,
2632 _('hg clone [OPTION]... SOURCE [DEST]')),
2633 _('hg clone [OPTION]... SOURCE [DEST]')),
2633 "^commit|ci":
2634 "^commit|ci":
2634 (commit,
2635 (commit,
2635 [('A', 'addremove', None,
2636 [('A', 'addremove', None,
2636 _('mark new/missing files as added/removed before committing')),
2637 _('mark new/missing files as added/removed before committing')),
2637 ('d', 'date', '', _('record datecode as commit date')),
2638 ('d', 'date', '', _('record datecode as commit date')),
2638 ('u', 'user', '', _('record user as commiter')),
2639 ('u', 'user', '', _('record user as commiter')),
2639 ] + walkopts + commitopts,
2640 ] + walkopts + commitopts,
2640 _('hg commit [OPTION]... [FILE]...')),
2641 _('hg commit [OPTION]... [FILE]...')),
2641 "copy|cp":
2642 "copy|cp":
2642 (copy,
2643 (copy,
2643 [('A', 'after', None, _('record a copy that has already occurred')),
2644 [('A', 'after', None, _('record a copy that has already occurred')),
2644 ('f', 'force', None,
2645 ('f', 'force', None,
2645 _('forcibly copy over an existing managed file')),
2646 _('forcibly copy over an existing managed file')),
2646 ] + walkopts + dryrunopts,
2647 ] + walkopts + dryrunopts,
2647 _('hg copy [OPTION]... [SOURCE]... DEST')),
2648 _('hg copy [OPTION]... [SOURCE]... DEST')),
2648 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2649 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2649 "debugcomplete":
2650 "debugcomplete":
2650 (debugcomplete,
2651 (debugcomplete,
2651 [('o', 'options', None, _('show the command options'))],
2652 [('o', 'options', None, _('show the command options'))],
2652 _('debugcomplete [-o] CMD')),
2653 _('debugcomplete [-o] CMD')),
2653 "debuginstall": (debuginstall, [], _('debuginstall')),
2654 "debuginstall": (debuginstall, [], _('debuginstall')),
2654 "debugrebuildstate":
2655 "debugrebuildstate":
2655 (debugrebuildstate,
2656 (debugrebuildstate,
2656 [('r', 'rev', '', _('revision to rebuild to'))],
2657 [('r', 'rev', '', _('revision to rebuild to'))],
2657 _('debugrebuildstate [-r REV] [REV]')),
2658 _('debugrebuildstate [-r REV] [REV]')),
2658 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2659 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2659 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2660 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2660 "debugstate": (debugstate, [], _('debugstate')),
2661 "debugstate": (debugstate, [], _('debugstate')),
2661 "debugdate":
2662 "debugdate":
2662 (debugdate,
2663 (debugdate,
2663 [('e', 'extended', None, _('try extended date formats'))],
2664 [('e', 'extended', None, _('try extended date formats'))],
2664 _('debugdate [-e] DATE [RANGE]')),
2665 _('debugdate [-e] DATE [RANGE]')),
2665 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2666 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2666 "debugindex": (debugindex, [], _('debugindex FILE')),
2667 "debugindex": (debugindex, [], _('debugindex FILE')),
2667 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2668 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2668 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2669 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2669 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2670 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2670 "^diff":
2671 "^diff":
2671 (diff,
2672 (diff,
2672 [('r', 'rev', [], _('revision')),
2673 [('r', 'rev', [], _('revision')),
2673 ('a', 'text', None, _('treat all files as text')),
2674 ('a', 'text', None, _('treat all files as text')),
2674 ('p', 'show-function', None,
2675 ('p', 'show-function', None,
2675 _('show which function each change is in')),
2676 _('show which function each change is in')),
2676 ('g', 'git', None, _('use git extended diff format')),
2677 ('g', 'git', None, _('use git extended diff format')),
2677 ('', 'nodates', None, _("don't include dates in diff headers")),
2678 ('', 'nodates', None, _("don't include dates in diff headers")),
2678 ('w', 'ignore-all-space', None,
2679 ('w', 'ignore-all-space', None,
2679 _('ignore white space when comparing lines')),
2680 _('ignore white space when comparing lines')),
2680 ('b', 'ignore-space-change', None,
2681 ('b', 'ignore-space-change', None,
2681 _('ignore changes in the amount of white space')),
2682 _('ignore changes in the amount of white space')),
2682 ('B', 'ignore-blank-lines', None,
2683 ('B', 'ignore-blank-lines', None,
2683 _('ignore changes whose lines are all blank')),
2684 _('ignore changes whose lines are all blank')),
2684 ] + walkopts,
2685 ] + walkopts,
2685 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2686 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2686 "^export":
2687 "^export":
2687 (export,
2688 (export,
2688 [('o', 'output', '', _('print output to file with formatted name')),
2689 [('o', 'output', '', _('print output to file with formatted name')),
2689 ('a', 'text', None, _('treat all files as text')),
2690 ('a', 'text', None, _('treat all files as text')),
2690 ('g', 'git', None, _('use git extended diff format')),
2691 ('g', 'git', None, _('use git extended diff format')),
2691 ('', 'nodates', None, _("don't include dates in diff headers")),
2692 ('', 'nodates', None, _("don't include dates in diff headers")),
2692 ('', 'switch-parent', None, _('diff against the second parent'))],
2693 ('', 'switch-parent', None, _('diff against the second parent'))],
2693 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2694 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2694 "grep":
2695 "grep":
2695 (grep,
2696 (grep,
2696 [('0', 'print0', None, _('end fields with NUL')),
2697 [('0', 'print0', None, _('end fields with NUL')),
2697 ('', 'all', None, _('print all revisions that match')),
2698 ('', 'all', None, _('print all revisions that match')),
2698 ('f', 'follow', None,
2699 ('f', 'follow', None,
2699 _('follow changeset history, or file history across copies and renames')),
2700 _('follow changeset history, or file history across copies and renames')),
2700 ('i', 'ignore-case', None, _('ignore case when matching')),
2701 ('i', 'ignore-case', None, _('ignore case when matching')),
2701 ('l', 'files-with-matches', None,
2702 ('l', 'files-with-matches', None,
2702 _('print only filenames and revs that match')),
2703 _('print only filenames and revs that match')),
2703 ('n', 'line-number', None, _('print matching line numbers')),
2704 ('n', 'line-number', None, _('print matching line numbers')),
2704 ('r', 'rev', [], _('search in given revision range')),
2705 ('r', 'rev', [], _('search in given revision range')),
2705 ('u', 'user', None, _('print user who committed change')),
2706 ('u', 'user', None, _('print user who committed change')),
2706 ] + walkopts,
2707 ] + walkopts,
2707 _('hg grep [OPTION]... PATTERN [FILE]...')),
2708 _('hg grep [OPTION]... PATTERN [FILE]...')),
2708 "heads":
2709 "heads":
2709 (heads,
2710 (heads,
2710 [('', 'style', '', _('display using template map file')),
2711 [('', 'style', '', _('display using template map file')),
2711 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2712 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2712 ('', 'template', '', _('display with template'))],
2713 ('', 'template', '', _('display with template'))],
2713 _('hg heads [-r REV]')),
2714 _('hg heads [-r REV]')),
2714 "help": (help_, [], _('hg help [COMMAND]')),
2715 "help": (help_, [], _('hg help [COMMAND]')),
2715 "identify|id": (identify, [], _('hg identify')),
2716 "identify|id": (identify, [], _('hg identify')),
2716 "import|patch":
2717 "import|patch":
2717 (import_,
2718 (import_,
2718 [('p', 'strip', 1,
2719 [('p', 'strip', 1,
2719 _('directory strip option for patch. This has the same\n'
2720 _('directory strip option for patch. This has the same\n'
2720 'meaning as the corresponding patch option')),
2721 'meaning as the corresponding patch option')),
2721 ('b', 'base', '', _('base path')),
2722 ('b', 'base', '', _('base path')),
2722 ('f', 'force', None,
2723 ('f', 'force', None,
2723 _('skip check for outstanding uncommitted changes'))] + commitopts,
2724 _('skip check for outstanding uncommitted changes'))] + commitopts,
2724 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2725 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2725 "incoming|in": (incoming,
2726 "incoming|in": (incoming,
2726 [('M', 'no-merges', None, _('do not show merges')),
2727 [('M', 'no-merges', None, _('do not show merges')),
2727 ('f', 'force', None,
2728 ('f', 'force', None,
2728 _('run even when remote repository is unrelated')),
2729 _('run even when remote repository is unrelated')),
2729 ('', 'style', '', _('display using template map file')),
2730 ('', 'style', '', _('display using template map file')),
2730 ('n', 'newest-first', None, _('show newest record first')),
2731 ('n', 'newest-first', None, _('show newest record first')),
2731 ('', 'bundle', '', _('file to store the bundles into')),
2732 ('', 'bundle', '', _('file to store the bundles into')),
2732 ('p', 'patch', None, _('show patch')),
2733 ('p', 'patch', None, _('show patch')),
2733 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2734 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2734 ('', 'template', '', _('display with template')),
2735 ('', 'template', '', _('display with template')),
2735 ] + remoteopts,
2736 ] + remoteopts,
2736 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2737 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2737 ' [--bundle FILENAME] [SOURCE]')),
2738 ' [--bundle FILENAME] [SOURCE]')),
2738 "^init":
2739 "^init":
2739 (init,
2740 (init,
2740 remoteopts,
2741 remoteopts,
2741 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2742 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2742 "locate":
2743 "locate":
2743 (locate,
2744 (locate,
2744 [('r', 'rev', '', _('search the repository as it stood at rev')),
2745 [('r', 'rev', '', _('search the repository as it stood at rev')),
2745 ('0', 'print0', None,
2746 ('0', 'print0', None,
2746 _('end filenames with NUL, for use with xargs')),
2747 _('end filenames with NUL, for use with xargs')),
2747 ('f', 'fullpath', None,
2748 ('f', 'fullpath', None,
2748 _('print complete paths from the filesystem root')),
2749 _('print complete paths from the filesystem root')),
2749 ] + walkopts,
2750 ] + walkopts,
2750 _('hg locate [OPTION]... [PATTERN]...')),
2751 _('hg locate [OPTION]... [PATTERN]...')),
2751 "^log|history":
2752 "^log|history":
2752 (log,
2753 (log,
2753 [('f', 'follow', None,
2754 [('f', 'follow', None,
2754 _('follow changeset history, or file history across copies and renames')),
2755 _('follow changeset history, or file history across copies and renames')),
2755 ('', 'follow-first', None,
2756 ('', 'follow-first', None,
2756 _('only follow the first parent of merge changesets')),
2757 _('only follow the first parent of merge changesets')),
2757 ('d', 'date', '', _('show revs matching date spec')),
2758 ('d', 'date', '', _('show revs matching date spec')),
2758 ('C', 'copies', None, _('show copied files')),
2759 ('C', 'copies', None, _('show copied files')),
2759 ('k', 'keyword', [], _('search for a keyword')),
2760 ('k', 'keyword', [], _('search for a keyword')),
2760 ('l', 'limit', '', _('limit number of changes displayed')),
2761 ('l', 'limit', '', _('limit number of changes displayed')),
2761 ('r', 'rev', [], _('show the specified revision or range')),
2762 ('r', 'rev', [], _('show the specified revision or range')),
2762 ('', 'removed', None, _('include revs where files were removed')),
2763 ('', 'removed', None, _('include revs where files were removed')),
2763 ('M', 'no-merges', None, _('do not show merges')),
2764 ('M', 'no-merges', None, _('do not show merges')),
2764 ('', 'style', '', _('display using template map file')),
2765 ('', 'style', '', _('display using template map file')),
2765 ('m', 'only-merges', None, _('show only merges')),
2766 ('m', 'only-merges', None, _('show only merges')),
2766 ('p', 'patch', None, _('show patch')),
2767 ('p', 'patch', None, _('show patch')),
2767 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2768 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2768 ('', 'template', '', _('display with template')),
2769 ('', 'template', '', _('display with template')),
2769 ] + walkopts,
2770 ] + walkopts,
2770 _('hg log [OPTION]... [FILE]')),
2771 _('hg log [OPTION]... [FILE]')),
2771 "manifest": (manifest, [], _('hg manifest [REV]')),
2772 "manifest": (manifest, [], _('hg manifest [REV]')),
2772 "merge":
2773 "merge":
2773 (merge,
2774 (merge,
2774 [('f', 'force', None, _('force a merge with outstanding changes'))],
2775 [('f', 'force', None, _('force a merge with outstanding changes'))],
2775 _('hg merge [-f] [REV]')),
2776 _('hg merge [-f] [REV]')),
2776 "outgoing|out": (outgoing,
2777 "outgoing|out": (outgoing,
2777 [('M', 'no-merges', None, _('do not show merges')),
2778 [('M', 'no-merges', None, _('do not show merges')),
2778 ('f', 'force', None,
2779 ('f', 'force', None,
2779 _('run even when remote repository is unrelated')),
2780 _('run even when remote repository is unrelated')),
2780 ('p', 'patch', None, _('show patch')),
2781 ('p', 'patch', None, _('show patch')),
2781 ('', 'style', '', _('display using template map file')),
2782 ('', 'style', '', _('display using template map file')),
2782 ('r', 'rev', [], _('a specific revision you would like to push')),
2783 ('r', 'rev', [], _('a specific revision you would like to push')),
2783 ('n', 'newest-first', None, _('show newest record first')),
2784 ('n', 'newest-first', None, _('show newest record first')),
2784 ('', 'template', '', _('display with template')),
2785 ('', 'template', '', _('display with template')),
2785 ] + remoteopts,
2786 ] + remoteopts,
2786 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
2787 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
2787 "^parents":
2788 "^parents":
2788 (parents,
2789 (parents,
2789 [('r', 'rev', '', _('show parents from the specified rev')),
2790 [('r', 'rev', '', _('show parents from the specified rev')),
2790 ('', 'style', '', _('display using template map file')),
2791 ('', 'style', '', _('display using template map file')),
2791 ('', 'template', '', _('display with template'))],
2792 ('', 'template', '', _('display with template'))],
2792 _('hg parents [-r REV] [FILE]')),
2793 _('hg parents [-r REV] [FILE]')),
2793 "paths": (paths, [], _('hg paths [NAME]')),
2794 "paths": (paths, [], _('hg paths [NAME]')),
2794 "^pull":
2795 "^pull":
2795 (pull,
2796 (pull,
2796 [('u', 'update', None,
2797 [('u', 'update', None,
2797 _('update to new tip if changesets were pulled')),
2798 _('update to new tip if changesets were pulled')),
2798 ('f', 'force', None,
2799 ('f', 'force', None,
2799 _('run even when remote repository is unrelated')),
2800 _('run even when remote repository is unrelated')),
2800 ('r', 'rev', [],
2801 ('r', 'rev', [],
2801 _('a specific revision up to which you would like to pull')),
2802 _('a specific revision up to which you would like to pull')),
2802 ] + remoteopts,
2803 ] + remoteopts,
2803 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
2804 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
2804 "^push":
2805 "^push":
2805 (push,
2806 (push,
2806 [('f', 'force', None, _('force push')),
2807 [('f', 'force', None, _('force push')),
2807 ('r', 'rev', [], _('a specific revision you would like to push')),
2808 ('r', 'rev', [], _('a specific revision you would like to push')),
2808 ] + remoteopts,
2809 ] + remoteopts,
2809 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
2810 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
2810 "debugrawcommit|rawcommit":
2811 "debugrawcommit|rawcommit":
2811 (rawcommit,
2812 (rawcommit,
2812 [('p', 'parent', [], _('parent')),
2813 [('p', 'parent', [], _('parent')),
2813 ('d', 'date', '', _('date code')),
2814 ('d', 'date', '', _('date code')),
2814 ('u', 'user', '', _('user')),
2815 ('u', 'user', '', _('user')),
2815 ('F', 'files', '', _('file list'))
2816 ('F', 'files', '', _('file list'))
2816 ] + commitopts,
2817 ] + commitopts,
2817 _('hg debugrawcommit [OPTION]... [FILE]...')),
2818 _('hg debugrawcommit [OPTION]... [FILE]...')),
2818 "recover": (recover, [], _('hg recover')),
2819 "recover": (recover, [], _('hg recover')),
2819 "^remove|rm":
2820 "^remove|rm":
2820 (remove,
2821 (remove,
2821 [('A', 'after', None, _('record remove that has already occurred')),
2822 [('A', 'after', None, _('record remove that has already occurred')),
2822 ('f', 'force', None, _('remove file even if modified')),
2823 ('f', 'force', None, _('remove file even if modified')),
2823 ] + walkopts,
2824 ] + walkopts,
2824 _('hg remove [OPTION]... FILE...')),
2825 _('hg remove [OPTION]... FILE...')),
2825 "rename|mv":
2826 "rename|mv":
2826 (rename,
2827 (rename,
2827 [('A', 'after', None, _('record a rename that has already occurred')),
2828 [('A', 'after', None, _('record a rename that has already occurred')),
2828 ('f', 'force', None,
2829 ('f', 'force', None,
2829 _('forcibly copy over an existing managed file')),
2830 _('forcibly copy over an existing managed file')),
2830 ] + walkopts + dryrunopts,
2831 ] + walkopts + dryrunopts,
2831 _('hg rename [OPTION]... SOURCE... DEST')),
2832 _('hg rename [OPTION]... SOURCE... DEST')),
2832 "^revert":
2833 "^revert":
2833 (revert,
2834 (revert,
2834 [('a', 'all', None, _('revert all changes when no arguments given')),
2835 [('a', 'all', None, _('revert all changes when no arguments given')),
2835 ('d', 'date', '', _('tipmost revision matching date')),
2836 ('d', 'date', '', _('tipmost revision matching date')),
2836 ('r', 'rev', '', _('revision to revert to')),
2837 ('r', 'rev', '', _('revision to revert to')),
2837 ('', 'no-backup', None, _('do not save backup copies of files')),
2838 ('', 'no-backup', None, _('do not save backup copies of files')),
2838 ] + walkopts + dryrunopts,
2839 ] + walkopts + dryrunopts,
2839 _('hg revert [OPTION]... [-r REV] [NAME]...')),
2840 _('hg revert [OPTION]... [-r REV] [NAME]...')),
2840 "rollback": (rollback, [], _('hg rollback')),
2841 "rollback": (rollback, [], _('hg rollback')),
2841 "root": (root, [], _('hg root')),
2842 "root": (root, [], _('hg root')),
2842 "showconfig|debugconfig":
2843 "showconfig|debugconfig":
2843 (showconfig,
2844 (showconfig,
2844 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2845 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2845 _('showconfig [-u] [NAME]...')),
2846 _('showconfig [-u] [NAME]...')),
2846 "^serve":
2847 "^serve":
2847 (serve,
2848 (serve,
2848 [('A', 'accesslog', '', _('name of access log file to write to')),
2849 [('A', 'accesslog', '', _('name of access log file to write to')),
2849 ('d', 'daemon', None, _('run server in background')),
2850 ('d', 'daemon', None, _('run server in background')),
2850 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2851 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2851 ('E', 'errorlog', '', _('name of error log file to write to')),
2852 ('E', 'errorlog', '', _('name of error log file to write to')),
2852 ('p', 'port', 0, _('port to use (default: 8000)')),
2853 ('p', 'port', 0, _('port to use (default: 8000)')),
2853 ('a', 'address', '', _('address to use')),
2854 ('a', 'address', '', _('address to use')),
2854 ('n', 'name', '',
2855 ('n', 'name', '',
2855 _('name to show in web pages (default: working dir)')),
2856 _('name to show in web pages (default: working dir)')),
2856 ('', 'webdir-conf', '', _('name of the webdir config file'
2857 ('', 'webdir-conf', '', _('name of the webdir config file'
2857 ' (serve more than one repo)')),
2858 ' (serve more than one repo)')),
2858 ('', 'pid-file', '', _('name of file to write process ID to')),
2859 ('', 'pid-file', '', _('name of file to write process ID to')),
2859 ('', 'stdio', None, _('for remote clients')),
2860 ('', 'stdio', None, _('for remote clients')),
2860 ('t', 'templates', '', _('web templates to use')),
2861 ('t', 'templates', '', _('web templates to use')),
2861 ('', 'style', '', _('template style to use')),
2862 ('', 'style', '', _('template style to use')),
2862 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2863 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2863 _('hg serve [OPTION]...')),
2864 _('hg serve [OPTION]...')),
2864 "^status|st":
2865 "^status|st":
2865 (status,
2866 (status,
2866 [('A', 'all', None, _('show status of all files')),
2867 [('A', 'all', None, _('show status of all files')),
2867 ('m', 'modified', None, _('show only modified files')),
2868 ('m', 'modified', None, _('show only modified files')),
2868 ('a', 'added', None, _('show only added files')),
2869 ('a', 'added', None, _('show only added files')),
2869 ('r', 'removed', None, _('show only removed files')),
2870 ('r', 'removed', None, _('show only removed files')),
2870 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2871 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2871 ('c', 'clean', None, _('show only files without changes')),
2872 ('c', 'clean', None, _('show only files without changes')),
2872 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2873 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2873 ('i', 'ignored', None, _('show ignored files')),
2874 ('i', 'ignored', None, _('show ignored files')),
2874 ('n', 'no-status', None, _('hide status prefix')),
2875 ('n', 'no-status', None, _('hide status prefix')),
2875 ('C', 'copies', None, _('show source of copied files')),
2876 ('C', 'copies', None, _('show source of copied files')),
2876 ('0', 'print0', None,
2877 ('0', 'print0', None,
2877 _('end filenames with NUL, for use with xargs')),
2878 _('end filenames with NUL, for use with xargs')),
2878 ('', 'rev', [], _('show difference from revision')),
2879 ('', 'rev', [], _('show difference from revision')),
2879 ] + walkopts,
2880 ] + walkopts,
2880 _('hg status [OPTION]... [FILE]...')),
2881 _('hg status [OPTION]... [FILE]...')),
2881 "tag":
2882 "tag":
2882 (tag,
2883 (tag,
2883 [('l', 'local', None, _('make the tag local')),
2884 [('l', 'local', None, _('make the tag local')),
2884 ('m', 'message', '', _('message for tag commit log entry')),
2885 ('m', 'message', '', _('message for tag commit log entry')),
2885 ('d', 'date', '', _('record datecode as commit date')),
2886 ('d', 'date', '', _('record datecode as commit date')),
2886 ('u', 'user', '', _('record user as commiter')),
2887 ('u', 'user', '', _('record user as commiter')),
2887 ('r', 'rev', '', _('revision to tag'))],
2888 ('r', 'rev', '', _('revision to tag'))],
2888 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2889 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2889 "tags": (tags, [], _('hg tags')),
2890 "tags": (tags, [], _('hg tags')),
2890 "tip":
2891 "tip":
2891 (tip,
2892 (tip,
2892 [('', 'style', '', _('display using template map file')),
2893 [('', 'style', '', _('display using template map file')),
2893 ('p', 'patch', None, _('show patch')),
2894 ('p', 'patch', None, _('show patch')),
2894 ('', 'template', '', _('display with template'))],
2895 ('', 'template', '', _('display with template'))],
2895 _('hg tip [-p]')),
2896 _('hg tip [-p]')),
2896 "unbundle":
2897 "unbundle":
2897 (unbundle,
2898 (unbundle,
2898 [('u', 'update', None,
2899 [('u', 'update', None,
2899 _('update to new tip if changesets were unbundled'))],
2900 _('update to new tip if changesets were unbundled'))],
2900 _('hg unbundle [-u] FILE')),
2901 _('hg unbundle [-u] FILE')),
2901 "^update|up|checkout|co":
2902 "^update|up|checkout|co":
2902 (update,
2903 (update,
2903 [('C', 'clean', None, _('overwrite locally modified files')),
2904 [('C', 'clean', None, _('overwrite locally modified files')),
2904 ('d', 'date', '', _('tipmost revision matching date'))],
2905 ('d', 'date', '', _('tipmost revision matching date'))],
2905 _('hg update [-C] [-d DATE] [REV]')),
2906 _('hg update [-C] [-d DATE] [REV]')),
2906 "verify": (verify, [], _('hg verify')),
2907 "verify": (verify, [], _('hg verify')),
2907 "version": (version_, [], _('hg version')),
2908 "version": (version_, [], _('hg version')),
2908 }
2909 }
2909
2910
2910 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2911 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2911 " debugindex debugindexdot debugdate debuginstall")
2912 " debugindex debugindexdot debugdate debuginstall")
2912 optionalrepo = ("paths serve showconfig")
2913 optionalrepo = ("paths serve showconfig")
2913
2914
2914 def findpossible(ui, cmd):
2915 def findpossible(ui, cmd):
2915 """
2916 """
2916 Return cmd -> (aliases, command table entry)
2917 Return cmd -> (aliases, command table entry)
2917 for each matching command.
2918 for each matching command.
2918 Return debug commands (or their aliases) only if no normal command matches.
2919 Return debug commands (or their aliases) only if no normal command matches.
2919 """
2920 """
2920 choice = {}
2921 choice = {}
2921 debugchoice = {}
2922 debugchoice = {}
2922 for e in table.keys():
2923 for e in table.keys():
2923 aliases = e.lstrip("^").split("|")
2924 aliases = e.lstrip("^").split("|")
2924 found = None
2925 found = None
2925 if cmd in aliases:
2926 if cmd in aliases:
2926 found = cmd
2927 found = cmd
2927 elif not ui.config("ui", "strict"):
2928 elif not ui.config("ui", "strict"):
2928 for a in aliases:
2929 for a in aliases:
2929 if a.startswith(cmd):
2930 if a.startswith(cmd):
2930 found = a
2931 found = a
2931 break
2932 break
2932 if found is not None:
2933 if found is not None:
2933 if aliases[0].startswith("debug") or found.startswith("debug"):
2934 if aliases[0].startswith("debug") or found.startswith("debug"):
2934 debugchoice[found] = (aliases, table[e])
2935 debugchoice[found] = (aliases, table[e])
2935 else:
2936 else:
2936 choice[found] = (aliases, table[e])
2937 choice[found] = (aliases, table[e])
2937
2938
2938 if not choice and debugchoice:
2939 if not choice and debugchoice:
2939 choice = debugchoice
2940 choice = debugchoice
2940
2941
2941 return choice
2942 return choice
2942
2943
2943 def findcmd(ui, cmd):
2944 def findcmd(ui, cmd):
2944 """Return (aliases, command table entry) for command string."""
2945 """Return (aliases, command table entry) for command string."""
2945 choice = findpossible(ui, cmd)
2946 choice = findpossible(ui, cmd)
2946
2947
2947 if choice.has_key(cmd):
2948 if choice.has_key(cmd):
2948 return choice[cmd]
2949 return choice[cmd]
2949
2950
2950 if len(choice) > 1:
2951 if len(choice) > 1:
2951 clist = choice.keys()
2952 clist = choice.keys()
2952 clist.sort()
2953 clist.sort()
2953 raise AmbiguousCommand(cmd, clist)
2954 raise AmbiguousCommand(cmd, clist)
2954
2955
2955 if choice:
2956 if choice:
2956 return choice.values()[0]
2957 return choice.values()[0]
2957
2958
2958 raise UnknownCommand(cmd)
2959 raise UnknownCommand(cmd)
2959
2960
2960 def catchterm(*args):
2961 def catchterm(*args):
2961 raise util.SignalInterrupt
2962 raise util.SignalInterrupt
2962
2963
2963 def run():
2964 def run():
2964 sys.exit(dispatch(sys.argv[1:]))
2965 sys.exit(dispatch(sys.argv[1:]))
2965
2966
2966 class ParseError(Exception):
2967 class ParseError(Exception):
2967 """Exception raised on errors in parsing the command line."""
2968 """Exception raised on errors in parsing the command line."""
2968
2969
2969 def parse(ui, args):
2970 def parse(ui, args):
2970 options = {}
2971 options = {}
2971 cmdoptions = {}
2972 cmdoptions = {}
2972
2973
2973 try:
2974 try:
2974 args = fancyopts.fancyopts(args, globalopts, options)
2975 args = fancyopts.fancyopts(args, globalopts, options)
2975 except fancyopts.getopt.GetoptError, inst:
2976 except fancyopts.getopt.GetoptError, inst:
2976 raise ParseError(None, inst)
2977 raise ParseError(None, inst)
2977
2978
2978 if args:
2979 if args:
2979 cmd, args = args[0], args[1:]
2980 cmd, args = args[0], args[1:]
2980 aliases, i = findcmd(ui, cmd)
2981 aliases, i = findcmd(ui, cmd)
2981 cmd = aliases[0]
2982 cmd = aliases[0]
2982 defaults = ui.config("defaults", cmd)
2983 defaults = ui.config("defaults", cmd)
2983 if defaults:
2984 if defaults:
2984 args = shlex.split(defaults) + args
2985 args = shlex.split(defaults) + args
2985 c = list(i[1])
2986 c = list(i[1])
2986 else:
2987 else:
2987 cmd = None
2988 cmd = None
2988 c = []
2989 c = []
2989
2990
2990 # combine global options into local
2991 # combine global options into local
2991 for o in globalopts:
2992 for o in globalopts:
2992 c.append((o[0], o[1], options[o[1]], o[3]))
2993 c.append((o[0], o[1], options[o[1]], o[3]))
2993
2994
2994 try:
2995 try:
2995 args = fancyopts.fancyopts(args, c, cmdoptions)
2996 args = fancyopts.fancyopts(args, c, cmdoptions)
2996 except fancyopts.getopt.GetoptError, inst:
2997 except fancyopts.getopt.GetoptError, inst:
2997 raise ParseError(cmd, inst)
2998 raise ParseError(cmd, inst)
2998
2999
2999 # separate global options back out
3000 # separate global options back out
3000 for o in globalopts:
3001 for o in globalopts:
3001 n = o[1]
3002 n = o[1]
3002 options[n] = cmdoptions[n]
3003 options[n] = cmdoptions[n]
3003 del cmdoptions[n]
3004 del cmdoptions[n]
3004
3005
3005 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3006 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3006
3007
3007 external = {}
3008 external = {}
3008
3009
3009 def findext(name):
3010 def findext(name):
3010 '''return module with given extension name'''
3011 '''return module with given extension name'''
3011 try:
3012 try:
3012 return sys.modules[external[name]]
3013 return sys.modules[external[name]]
3013 except KeyError:
3014 except KeyError:
3014 for k, v in external.iteritems():
3015 for k, v in external.iteritems():
3015 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3016 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3016 return sys.modules[v]
3017 return sys.modules[v]
3017 raise KeyError(name)
3018 raise KeyError(name)
3018
3019
3019 def load_extensions(ui):
3020 def load_extensions(ui):
3020 added = []
3021 added = []
3021 for ext_name, load_from_name in ui.extensions():
3022 for ext_name, load_from_name in ui.extensions():
3022 if ext_name in external:
3023 if ext_name in external:
3023 continue
3024 continue
3024 try:
3025 try:
3025 if load_from_name:
3026 if load_from_name:
3026 # the module will be loaded in sys.modules
3027 # the module will be loaded in sys.modules
3027 # choose an unique name so that it doesn't
3028 # choose an unique name so that it doesn't
3028 # conflicts with other modules
3029 # conflicts with other modules
3029 module_name = "hgext_%s" % ext_name.replace('.', '_')
3030 module_name = "hgext_%s" % ext_name.replace('.', '_')
3030 mod = imp.load_source(module_name, load_from_name)
3031 mod = imp.load_source(module_name, load_from_name)
3031 else:
3032 else:
3032 def importh(name):
3033 def importh(name):
3033 mod = __import__(name)
3034 mod = __import__(name)
3034 components = name.split('.')
3035 components = name.split('.')
3035 for comp in components[1:]:
3036 for comp in components[1:]:
3036 mod = getattr(mod, comp)
3037 mod = getattr(mod, comp)
3037 return mod
3038 return mod
3038 try:
3039 try:
3039 mod = importh("hgext.%s" % ext_name)
3040 mod = importh("hgext.%s" % ext_name)
3040 except ImportError:
3041 except ImportError:
3041 mod = importh(ext_name)
3042 mod = importh(ext_name)
3042 external[ext_name] = mod.__name__
3043 external[ext_name] = mod.__name__
3043 added.append((mod, ext_name))
3044 added.append((mod, ext_name))
3044 except (util.SignalInterrupt, KeyboardInterrupt):
3045 except (util.SignalInterrupt, KeyboardInterrupt):
3045 raise
3046 raise
3046 except Exception, inst:
3047 except Exception, inst:
3047 ui.warn(_("*** failed to import extension %s: %s\n") %
3048 ui.warn(_("*** failed to import extension %s: %s\n") %
3048 (ext_name, inst))
3049 (ext_name, inst))
3049 if ui.print_exc():
3050 if ui.print_exc():
3050 return 1
3051 return 1
3051
3052
3052 for mod, name in added:
3053 for mod, name in added:
3053 uisetup = getattr(mod, 'uisetup', None)
3054 uisetup = getattr(mod, 'uisetup', None)
3054 if uisetup:
3055 if uisetup:
3055 uisetup(ui)
3056 uisetup(ui)
3056 cmdtable = getattr(mod, 'cmdtable', {})
3057 cmdtable = getattr(mod, 'cmdtable', {})
3057 for t in cmdtable:
3058 for t in cmdtable:
3058 if t in table:
3059 if t in table:
3059 ui.warn(_("module %s overrides %s\n") % (name, t))
3060 ui.warn(_("module %s overrides %s\n") % (name, t))
3060 table.update(cmdtable)
3061 table.update(cmdtable)
3061
3062
3062 def parseconfig(config):
3063 def parseconfig(config):
3063 """parse the --config options from the command line"""
3064 """parse the --config options from the command line"""
3064 parsed = []
3065 parsed = []
3065 for cfg in config:
3066 for cfg in config:
3066 try:
3067 try:
3067 name, value = cfg.split('=', 1)
3068 name, value = cfg.split('=', 1)
3068 section, name = name.split('.', 1)
3069 section, name = name.split('.', 1)
3069 if not section or not name:
3070 if not section or not name:
3070 raise IndexError
3071 raise IndexError
3071 parsed.append((section, name, value))
3072 parsed.append((section, name, value))
3072 except (IndexError, ValueError):
3073 except (IndexError, ValueError):
3073 raise util.Abort(_('malformed --config option: %s') % cfg)
3074 raise util.Abort(_('malformed --config option: %s') % cfg)
3074 return parsed
3075 return parsed
3075
3076
3076 def dispatch(args):
3077 def dispatch(args):
3077 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3078 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3078 num = getattr(signal, name, None)
3079 num = getattr(signal, name, None)
3079 if num: signal.signal(num, catchterm)
3080 if num: signal.signal(num, catchterm)
3080
3081
3081 try:
3082 try:
3082 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3083 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3083 except util.Abort, inst:
3084 except util.Abort, inst:
3084 sys.stderr.write(_("abort: %s\n") % inst)
3085 sys.stderr.write(_("abort: %s\n") % inst)
3085 return -1
3086 return -1
3086
3087
3087 load_extensions(u)
3088 load_extensions(u)
3088 u.addreadhook(load_extensions)
3089 u.addreadhook(load_extensions)
3089
3090
3090 try:
3091 try:
3091 cmd, func, args, options, cmdoptions = parse(u, args)
3092 cmd, func, args, options, cmdoptions = parse(u, args)
3092 if options["encoding"]:
3093 if options["encoding"]:
3093 util._encoding = options["encoding"]
3094 util._encoding = options["encoding"]
3094 if options["encodingmode"]:
3095 if options["encodingmode"]:
3095 util._encodingmode = options["encodingmode"]
3096 util._encodingmode = options["encodingmode"]
3096 if options["time"]:
3097 if options["time"]:
3097 def get_times():
3098 def get_times():
3098 t = os.times()
3099 t = os.times()
3099 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3100 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3100 t = (t[0], t[1], t[2], t[3], time.clock())
3101 t = (t[0], t[1], t[2], t[3], time.clock())
3101 return t
3102 return t
3102 s = get_times()
3103 s = get_times()
3103 def print_time():
3104 def print_time():
3104 t = get_times()
3105 t = get_times()
3105 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3106 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3106 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3107 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3107 atexit.register(print_time)
3108 atexit.register(print_time)
3108
3109
3109 # enter the debugger before command execution
3110 # enter the debugger before command execution
3110 if options['debugger']:
3111 if options['debugger']:
3111 pdb.set_trace()
3112 pdb.set_trace()
3112
3113
3113 try:
3114 try:
3114 if options['cwd']:
3115 if options['cwd']:
3115 os.chdir(options['cwd'])
3116 os.chdir(options['cwd'])
3116
3117
3117 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3118 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3118 not options["noninteractive"], options["traceback"],
3119 not options["noninteractive"], options["traceback"],
3119 parseconfig(options["config"]))
3120 parseconfig(options["config"]))
3120
3121
3121 path = u.expandpath(options["repository"]) or ""
3122 path = u.expandpath(options["repository"]) or ""
3122 repo = path and hg.repository(u, path=path) or None
3123 repo = path and hg.repository(u, path=path) or None
3123 if repo and not repo.local():
3124 if repo and not repo.local():
3124 raise util.Abort(_("repository '%s' is not local") % path)
3125 raise util.Abort(_("repository '%s' is not local") % path)
3125
3126
3126 if options['help']:
3127 if options['help']:
3127 return help_(u, cmd, options['version'])
3128 return help_(u, cmd, options['version'])
3128 elif options['version']:
3129 elif options['version']:
3129 return version_(u)
3130 return version_(u)
3130 elif not cmd:
3131 elif not cmd:
3131 return help_(u, 'shortlist')
3132 return help_(u, 'shortlist')
3132
3133
3133 if cmd not in norepo.split():
3134 if cmd not in norepo.split():
3134 try:
3135 try:
3135 if not repo:
3136 if not repo:
3136 repo = hg.repository(u, path=path)
3137 repo = hg.repository(u, path=path)
3137 u = repo.ui
3138 u = repo.ui
3138 for name in external.itervalues():
3139 for name in external.itervalues():
3139 mod = sys.modules[name]
3140 mod = sys.modules[name]
3140 if hasattr(mod, 'reposetup'):
3141 if hasattr(mod, 'reposetup'):
3141 mod.reposetup(u, repo)
3142 mod.reposetup(u, repo)
3142 hg.repo_setup_hooks.append(mod.reposetup)
3143 hg.repo_setup_hooks.append(mod.reposetup)
3143 except hg.RepoError:
3144 except hg.RepoError:
3144 if cmd not in optionalrepo.split():
3145 if cmd not in optionalrepo.split():
3145 raise
3146 raise
3146 d = lambda: func(u, repo, *args, **cmdoptions)
3147 d = lambda: func(u, repo, *args, **cmdoptions)
3147 else:
3148 else:
3148 d = lambda: func(u, *args, **cmdoptions)
3149 d = lambda: func(u, *args, **cmdoptions)
3149
3150
3150 try:
3151 try:
3151 if options['profile']:
3152 if options['profile']:
3152 import hotshot, hotshot.stats
3153 import hotshot, hotshot.stats
3153 prof = hotshot.Profile("hg.prof")
3154 prof = hotshot.Profile("hg.prof")
3154 try:
3155 try:
3155 try:
3156 try:
3156 return prof.runcall(d)
3157 return prof.runcall(d)
3157 except:
3158 except:
3158 try:
3159 try:
3159 u.warn(_('exception raised - generating '
3160 u.warn(_('exception raised - generating '
3160 'profile anyway\n'))
3161 'profile anyway\n'))
3161 except:
3162 except:
3162 pass
3163 pass
3163 raise
3164 raise
3164 finally:
3165 finally:
3165 prof.close()
3166 prof.close()
3166 stats = hotshot.stats.load("hg.prof")
3167 stats = hotshot.stats.load("hg.prof")
3167 stats.strip_dirs()
3168 stats.strip_dirs()
3168 stats.sort_stats('time', 'calls')
3169 stats.sort_stats('time', 'calls')
3169 stats.print_stats(40)
3170 stats.print_stats(40)
3170 elif options['lsprof']:
3171 elif options['lsprof']:
3171 try:
3172 try:
3172 from mercurial import lsprof
3173 from mercurial import lsprof
3173 except ImportError:
3174 except ImportError:
3174 raise util.Abort(_(
3175 raise util.Abort(_(
3175 'lsprof not available - install from '
3176 'lsprof not available - install from '
3176 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3177 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3177 p = lsprof.Profiler()
3178 p = lsprof.Profiler()
3178 p.enable(subcalls=True)
3179 p.enable(subcalls=True)
3179 try:
3180 try:
3180 return d()
3181 return d()
3181 finally:
3182 finally:
3182 p.disable()
3183 p.disable()
3183 stats = lsprof.Stats(p.getstats())
3184 stats = lsprof.Stats(p.getstats())
3184 stats.sort()
3185 stats.sort()
3185 stats.pprint(top=10, file=sys.stderr, climit=5)
3186 stats.pprint(top=10, file=sys.stderr, climit=5)
3186 else:
3187 else:
3187 return d()
3188 return d()
3188 finally:
3189 finally:
3189 u.flush()
3190 u.flush()
3190 except:
3191 except:
3191 # enter the debugger when we hit an exception
3192 # enter the debugger when we hit an exception
3192 if options['debugger']:
3193 if options['debugger']:
3193 pdb.post_mortem(sys.exc_info()[2])
3194 pdb.post_mortem(sys.exc_info()[2])
3194 u.print_exc()
3195 u.print_exc()
3195 raise
3196 raise
3196 except ParseError, inst:
3197 except ParseError, inst:
3197 if inst.args[0]:
3198 if inst.args[0]:
3198 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3199 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3199 help_(u, inst.args[0])
3200 help_(u, inst.args[0])
3200 else:
3201 else:
3201 u.warn(_("hg: %s\n") % inst.args[1])
3202 u.warn(_("hg: %s\n") % inst.args[1])
3202 help_(u, 'shortlist')
3203 help_(u, 'shortlist')
3203 except AmbiguousCommand, inst:
3204 except AmbiguousCommand, inst:
3204 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3205 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3205 (inst.args[0], " ".join(inst.args[1])))
3206 (inst.args[0], " ".join(inst.args[1])))
3206 except UnknownCommand, inst:
3207 except UnknownCommand, inst:
3207 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3208 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3208 help_(u, 'shortlist')
3209 help_(u, 'shortlist')
3209 except hg.RepoError, inst:
3210 except hg.RepoError, inst:
3210 u.warn(_("abort: %s!\n") % inst)
3211 u.warn(_("abort: %s!\n") % inst)
3211 except lock.LockHeld, inst:
3212 except lock.LockHeld, inst:
3212 if inst.errno == errno.ETIMEDOUT:
3213 if inst.errno == errno.ETIMEDOUT:
3213 reason = _('timed out waiting for lock held by %s') % inst.locker
3214 reason = _('timed out waiting for lock held by %s') % inst.locker
3214 else:
3215 else:
3215 reason = _('lock held by %s') % inst.locker
3216 reason = _('lock held by %s') % inst.locker
3216 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3217 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3217 except lock.LockUnavailable, inst:
3218 except lock.LockUnavailable, inst:
3218 u.warn(_("abort: could not lock %s: %s\n") %
3219 u.warn(_("abort: could not lock %s: %s\n") %
3219 (inst.desc or inst.filename, inst.strerror))
3220 (inst.desc or inst.filename, inst.strerror))
3220 except revlog.RevlogError, inst:
3221 except revlog.RevlogError, inst:
3221 u.warn(_("abort: %s!\n") % inst)
3222 u.warn(_("abort: %s!\n") % inst)
3222 except util.SignalInterrupt:
3223 except util.SignalInterrupt:
3223 u.warn(_("killed!\n"))
3224 u.warn(_("killed!\n"))
3224 except KeyboardInterrupt:
3225 except KeyboardInterrupt:
3225 try:
3226 try:
3226 u.warn(_("interrupted!\n"))
3227 u.warn(_("interrupted!\n"))
3227 except IOError, inst:
3228 except IOError, inst:
3228 if inst.errno == errno.EPIPE:
3229 if inst.errno == errno.EPIPE:
3229 if u.debugflag:
3230 if u.debugflag:
3230 u.warn(_("\nbroken pipe\n"))
3231 u.warn(_("\nbroken pipe\n"))
3231 else:
3232 else:
3232 raise
3233 raise
3233 except IOError, inst:
3234 except IOError, inst:
3234 if hasattr(inst, "code"):
3235 if hasattr(inst, "code"):
3235 u.warn(_("abort: %s\n") % inst)
3236 u.warn(_("abort: %s\n") % inst)
3236 elif hasattr(inst, "reason"):
3237 elif hasattr(inst, "reason"):
3237 u.warn(_("abort: error: %s\n") % inst.reason[1])
3238 u.warn(_("abort: error: %s\n") % inst.reason[1])
3238 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3239 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3239 if u.debugflag:
3240 if u.debugflag:
3240 u.warn(_("broken pipe\n"))
3241 u.warn(_("broken pipe\n"))
3241 elif getattr(inst, "strerror", None):
3242 elif getattr(inst, "strerror", None):
3242 if getattr(inst, "filename", None):
3243 if getattr(inst, "filename", None):
3243 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3244 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3244 else:
3245 else:
3245 u.warn(_("abort: %s\n") % inst.strerror)
3246 u.warn(_("abort: %s\n") % inst.strerror)
3246 else:
3247 else:
3247 raise
3248 raise
3248 except OSError, inst:
3249 except OSError, inst:
3249 if getattr(inst, "filename", None):
3250 if getattr(inst, "filename", None):
3250 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3251 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3251 else:
3252 else:
3252 u.warn(_("abort: %s\n") % inst.strerror)
3253 u.warn(_("abort: %s\n") % inst.strerror)
3253 except util.UnexpectedOutput, inst:
3254 except util.UnexpectedOutput, inst:
3254 u.warn(_("abort: %s") % inst[0])
3255 u.warn(_("abort: %s") % inst[0])
3255 if not isinstance(inst[1], basestring):
3256 if not isinstance(inst[1], basestring):
3256 u.warn(" %r\n" % (inst[1],))
3257 u.warn(" %r\n" % (inst[1],))
3257 elif not inst[1]:
3258 elif not inst[1]:
3258 u.warn(_(" empty string\n"))
3259 u.warn(_(" empty string\n"))
3259 else:
3260 else:
3260 u.warn("\n%r\n" % util.ellipsis(inst[1]))
3261 u.warn("\n%r\n" % util.ellipsis(inst[1]))
3261 except util.Abort, inst:
3262 except util.Abort, inst:
3262 u.warn(_("abort: %s\n") % inst)
3263 u.warn(_("abort: %s\n") % inst)
3263 except TypeError, inst:
3264 except TypeError, inst:
3264 # was this an argument error?
3265 # was this an argument error?
3265 tb = traceback.extract_tb(sys.exc_info()[2])
3266 tb = traceback.extract_tb(sys.exc_info()[2])
3266 if len(tb) > 2: # no
3267 if len(tb) > 2: # no
3267 raise
3268 raise
3268 u.debug(inst, "\n")
3269 u.debug(inst, "\n")
3269 u.warn(_("%s: invalid arguments\n") % cmd)
3270 u.warn(_("%s: invalid arguments\n") % cmd)
3270 help_(u, cmd)
3271 help_(u, cmd)
3271 except SystemExit, inst:
3272 except SystemExit, inst:
3272 # Commands shouldn't sys.exit directly, but give a return code.
3273 # Commands shouldn't sys.exit directly, but give a return code.
3273 # Just in case catch this and and pass exit code to caller.
3274 # Just in case catch this and and pass exit code to caller.
3274 return inst.code
3275 return inst.code
3275 except:
3276 except:
3276 u.warn(_("** unknown exception encountered, details follow\n"))
3277 u.warn(_("** unknown exception encountered, details follow\n"))
3277 u.warn(_("** report bug details to "
3278 u.warn(_("** report bug details to "
3278 "http://www.selenic.com/mercurial/bts\n"))
3279 "http://www.selenic.com/mercurial/bts\n"))
3279 u.warn(_("** or mercurial@selenic.com\n"))
3280 u.warn(_("** or mercurial@selenic.com\n"))
3280 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3281 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3281 % version.get_version())
3282 % version.get_version())
3282 raise
3283 raise
3283
3284
3284 return -1
3285 return -1
@@ -1,508 +1,505 b''
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import ancestor, bdiff, repo, revlog, util, os
10 import ancestor, bdiff, repo, revlog, util, os
11
11
12 class changectx(object):
12 class changectx(object):
13 """A changecontext object makes access to data related to a particular
13 """A changecontext object makes access to data related to a particular
14 changeset convenient."""
14 changeset convenient."""
15 def __init__(self, repo, changeid=None):
15 def __init__(self, repo, changeid=None):
16 """changeid is a revision number, node, or tag"""
16 """changeid is a revision number, node, or tag"""
17 self._repo = repo
17 self._repo = repo
18
18
19 if not changeid and changeid != 0:
19 if not changeid and changeid != 0:
20 p1, p2 = self._repo.dirstate.parents()
20 p1, p2 = self._repo.dirstate.parents()
21 self._rev = self._repo.changelog.rev(p1)
21 self._rev = self._repo.changelog.rev(p1)
22 if self._rev == -1:
22 if self._rev == -1:
23 changeid = 'tip'
23 changeid = 'tip'
24 else:
24 else:
25 self._node = p1
25 self._node = p1
26 return
26 return
27
27
28 self._node = self._repo.lookup(changeid)
28 self._node = self._repo.lookup(changeid)
29 self._rev = self._repo.changelog.rev(self._node)
29 self._rev = self._repo.changelog.rev(self._node)
30
30
31 def __str__(self):
31 def __str__(self):
32 return short(self.node())
32 return short(self.node())
33
33
34 def __repr__(self):
34 def __repr__(self):
35 return "<changectx %s>" % str(self)
35 return "<changectx %s>" % str(self)
36
36
37 def __eq__(self, other):
37 def __eq__(self, other):
38 try:
38 try:
39 return self._rev == other._rev
39 return self._rev == other._rev
40 except AttributeError:
40 except AttributeError:
41 return False
41 return False
42
42
43 def __nonzero__(self):
43 def __nonzero__(self):
44 return self._rev != nullrev
44 return self._rev != nullrev
45
45
46 def __getattr__(self, name):
46 def __getattr__(self, name):
47 if name == '_changeset':
47 if name == '_changeset':
48 self._changeset = self._repo.changelog.read(self.node())
48 self._changeset = self._repo.changelog.read(self.node())
49 return self._changeset
49 return self._changeset
50 elif name == '_manifest':
50 elif name == '_manifest':
51 self._manifest = self._repo.manifest.read(self._changeset[0])
51 self._manifest = self._repo.manifest.read(self._changeset[0])
52 return self._manifest
52 return self._manifest
53 elif name == '_manifestdelta':
53 elif name == '_manifestdelta':
54 md = self._repo.manifest.readdelta(self._changeset[0])
54 md = self._repo.manifest.readdelta(self._changeset[0])
55 self._manifestdelta = md
55 self._manifestdelta = md
56 return self._manifestdelta
56 return self._manifestdelta
57 else:
57 else:
58 raise AttributeError, name
58 raise AttributeError, name
59
59
60 def changeset(self): return self._changeset
60 def changeset(self): return self._changeset
61 def manifest(self): return self._manifest
61 def manifest(self): return self._manifest
62
62
63 def rev(self): return self._rev
63 def rev(self): return self._rev
64 def node(self): return self._node
64 def node(self): return self._node
65 def user(self): return self._changeset[1]
65 def user(self): return self._changeset[1]
66 def date(self): return self._changeset[2]
66 def date(self): return self._changeset[2]
67 def files(self): return self._changeset[3]
67 def files(self): return self._changeset[3]
68 def description(self): return self._changeset[4]
68 def description(self): return self._changeset[4]
69 def branch(self): return self._changeset[5].get("branch", "")
69 def branch(self): return self._changeset[5].get("branch", "")
70
70
71 def parents(self):
71 def parents(self):
72 """return contexts for each parent changeset"""
72 """return contexts for each parent changeset"""
73 p = self._repo.changelog.parents(self._node)
73 p = self._repo.changelog.parents(self._node)
74 return [changectx(self._repo, x) for x in p]
74 return [changectx(self._repo, x) for x in p]
75
75
76 def children(self):
76 def children(self):
77 """return contexts for each child changeset"""
77 """return contexts for each child changeset"""
78 c = self._repo.changelog.children(self._node)
78 c = self._repo.changelog.children(self._node)
79 return [changectx(self._repo, x) for x in c]
79 return [changectx(self._repo, x) for x in c]
80
80
81 def filenode(self, path):
81 def filenode(self, path):
82 if '_manifest' in self.__dict__:
82 if '_manifest' in self.__dict__:
83 try:
83 try:
84 return self._manifest[path]
84 return self._manifest[path]
85 except KeyError:
85 except KeyError:
86 raise repo.LookupError(_("'%s' not found in manifest") % path)
86 raise revlog.LookupError(_("'%s' not found in manifest") % path)
87 if '_manifestdelta' in self.__dict__ or path in self.files():
87 if '_manifestdelta' in self.__dict__ or path in self.files():
88 if path in self._manifestdelta:
88 if path in self._manifestdelta:
89 return self._manifestdelta[path]
89 return self._manifestdelta[path]
90 node, flag = self._repo.manifest.find(self._changeset[0], path)
90 node, flag = self._repo.manifest.find(self._changeset[0], path)
91 if not node:
91 if not node:
92 raise repo.LookupError(_("'%s' not found in manifest") % path)
92 raise revlog.LookupError(_("'%s' not found in manifest") % path)
93
93
94 return node
94 return node
95
95
96 def filectx(self, path, fileid=None):
96 def filectx(self, path, fileid=None):
97 """get a file context from this changeset"""
97 """get a file context from this changeset"""
98 if fileid is None:
98 if fileid is None:
99 fileid = self.filenode(path)
99 fileid = self.filenode(path)
100 return filectx(self._repo, path, fileid=fileid, changectx=self)
100 return filectx(self._repo, path, fileid=fileid, changectx=self)
101
101
102 def filectxs(self):
102 def filectxs(self):
103 """generate a file context for each file in this changeset's
103 """generate a file context for each file in this changeset's
104 manifest"""
104 manifest"""
105 mf = self.manifest()
105 mf = self.manifest()
106 m = mf.keys()
106 m = mf.keys()
107 m.sort()
107 m.sort()
108 for f in m:
108 for f in m:
109 yield self.filectx(f, fileid=mf[f])
109 yield self.filectx(f, fileid=mf[f])
110
110
111 def ancestor(self, c2):
111 def ancestor(self, c2):
112 """
112 """
113 return the ancestor context of self and c2
113 return the ancestor context of self and c2
114 """
114 """
115 n = self._repo.changelog.ancestor(self._node, c2._node)
115 n = self._repo.changelog.ancestor(self._node, c2._node)
116 return changectx(self._repo, n)
116 return changectx(self._repo, n)
117
117
118 class filectx(object):
118 class filectx(object):
119 """A filecontext object makes access to data related to a particular
119 """A filecontext object makes access to data related to a particular
120 filerevision convenient."""
120 filerevision convenient."""
121 def __init__(self, repo, path, changeid=None, fileid=None,
121 def __init__(self, repo, path, changeid=None, fileid=None,
122 filelog=None, changectx=None):
122 filelog=None, changectx=None):
123 """changeid can be a changeset revision, node, or tag.
123 """changeid can be a changeset revision, node, or tag.
124 fileid can be a file revision or node."""
124 fileid can be a file revision or node."""
125 self._repo = repo
125 self._repo = repo
126 self._path = path
126 self._path = path
127
127
128 assert changeid is not None or fileid is not None
128 assert changeid is not None or fileid is not None
129
129
130 if filelog:
130 if filelog:
131 self._filelog = filelog
131 self._filelog = filelog
132 if changectx:
132 if changectx:
133 self._changectx = changectx
133 self._changectx = changectx
134 self._changeid = changectx.node()
134 self._changeid = changectx.node()
135
135
136 if fileid is None:
136 if fileid is None:
137 self._changeid = changeid
137 self._changeid = changeid
138 else:
138 else:
139 self._fileid = fileid
139 self._fileid = fileid
140
140
141 def __getattr__(self, name):
141 def __getattr__(self, name):
142 if name == '_changectx':
142 if name == '_changectx':
143 self._changectx = changectx(self._repo, self._changeid)
143 self._changectx = changectx(self._repo, self._changeid)
144 return self._changectx
144 return self._changectx
145 elif name == '_filelog':
145 elif name == '_filelog':
146 self._filelog = self._repo.file(self._path)
146 self._filelog = self._repo.file(self._path)
147 return self._filelog
147 return self._filelog
148 elif name == '_changeid':
148 elif name == '_changeid':
149 self._changeid = self._filelog.linkrev(self._filenode)
149 self._changeid = self._filelog.linkrev(self._filenode)
150 return self._changeid
150 return self._changeid
151 elif name == '_filenode':
151 elif name == '_filenode':
152 try:
152 if '_fileid' in self.__dict__:
153 if '_fileid' in self.__dict__:
153 self._filenode = self._filelog.lookup(self._fileid)
154 self._filenode = self._filelog.lookup(self._fileid)
154 else:
155 else:
155 self._filenode = self._changectx.filenode(self._path)
156 self._filenode = self._changectx.filenode(self._path)
157 except revlog.RevlogError, inst:
158 raise repo.LookupError(str(inst))
159 return self._filenode
156 return self._filenode
160 elif name == '_filerev':
157 elif name == '_filerev':
161 self._filerev = self._filelog.rev(self._filenode)
158 self._filerev = self._filelog.rev(self._filenode)
162 return self._filerev
159 return self._filerev
163 else:
160 else:
164 raise AttributeError, name
161 raise AttributeError, name
165
162
166 def __nonzero__(self):
163 def __nonzero__(self):
167 try:
164 try:
168 n = self._filenode
165 n = self._filenode
169 return True
166 return True
170 except repo.LookupError:
167 except revlog.LookupError:
171 # file is missing
168 # file is missing
172 return False
169 return False
173
170
174 def __str__(self):
171 def __str__(self):
175 return "%s@%s" % (self.path(), short(self.node()))
172 return "%s@%s" % (self.path(), short(self.node()))
176
173
177 def __repr__(self):
174 def __repr__(self):
178 return "<filectx %s>" % str(self)
175 return "<filectx %s>" % str(self)
179
176
180 def __eq__(self, other):
177 def __eq__(self, other):
181 try:
178 try:
182 return (self._path == other._path
179 return (self._path == other._path
183 and self._changeid == other._changeid)
180 and self._changeid == other._changeid)
184 except AttributeError:
181 except AttributeError:
185 return False
182 return False
186
183
187 def filectx(self, fileid):
184 def filectx(self, fileid):
188 '''opens an arbitrary revision of the file without
185 '''opens an arbitrary revision of the file without
189 opening a new filelog'''
186 opening a new filelog'''
190 return filectx(self._repo, self._path, fileid=fileid,
187 return filectx(self._repo, self._path, fileid=fileid,
191 filelog=self._filelog)
188 filelog=self._filelog)
192
189
193 def filerev(self): return self._filerev
190 def filerev(self): return self._filerev
194 def filenode(self): return self._filenode
191 def filenode(self): return self._filenode
195 def filelog(self): return self._filelog
192 def filelog(self): return self._filelog
196
193
197 def rev(self):
194 def rev(self):
198 if '_changectx' in self.__dict__:
195 if '_changectx' in self.__dict__:
199 return self._changectx.rev()
196 return self._changectx.rev()
200 return self._filelog.linkrev(self._filenode)
197 return self._filelog.linkrev(self._filenode)
201
198
202 def node(self): return self._changectx.node()
199 def node(self): return self._changectx.node()
203 def user(self): return self._changectx.user()
200 def user(self): return self._changectx.user()
204 def date(self): return self._changectx.date()
201 def date(self): return self._changectx.date()
205 def files(self): return self._changectx.files()
202 def files(self): return self._changectx.files()
206 def description(self): return self._changectx.description()
203 def description(self): return self._changectx.description()
207 def branch(self): return self._changectx.branch()
204 def branch(self): return self._changectx.branch()
208 def manifest(self): return self._changectx.manifest()
205 def manifest(self): return self._changectx.manifest()
209 def changectx(self): return self._changectx
206 def changectx(self): return self._changectx
210
207
211 def data(self): return self._filelog.read(self._filenode)
208 def data(self): return self._filelog.read(self._filenode)
212 def renamed(self): return self._filelog.renamed(self._filenode)
209 def renamed(self): return self._filelog.renamed(self._filenode)
213 def path(self): return self._path
210 def path(self): return self._path
214 def size(self): return self._filelog.size(self._filerev)
211 def size(self): return self._filelog.size(self._filerev)
215
212
216 def cmp(self, text): return self._filelog.cmp(self._filenode, text)
213 def cmp(self, text): return self._filelog.cmp(self._filenode, text)
217
214
218 def parents(self):
215 def parents(self):
219 p = self._path
216 p = self._path
220 fl = self._filelog
217 fl = self._filelog
221 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
218 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
222
219
223 r = self.renamed()
220 r = self.renamed()
224 if r:
221 if r:
225 pl[0] = (r[0], r[1], None)
222 pl[0] = (r[0], r[1], None)
226
223
227 return [filectx(self._repo, p, fileid=n, filelog=l)
224 return [filectx(self._repo, p, fileid=n, filelog=l)
228 for p,n,l in pl if n != nullid]
225 for p,n,l in pl if n != nullid]
229
226
230 def children(self):
227 def children(self):
231 # hard for renames
228 # hard for renames
232 c = self._filelog.children(self._filenode)
229 c = self._filelog.children(self._filenode)
233 return [filectx(self._repo, self._path, fileid=x,
230 return [filectx(self._repo, self._path, fileid=x,
234 filelog=self._filelog) for x in c]
231 filelog=self._filelog) for x in c]
235
232
236 def annotate(self, follow=False):
233 def annotate(self, follow=False):
237 '''returns a list of tuples of (ctx, line) for each line
234 '''returns a list of tuples of (ctx, line) for each line
238 in the file, where ctx is the filectx of the node where
235 in the file, where ctx is the filectx of the node where
239 that line was last changed'''
236 that line was last changed'''
240
237
241 def decorate(text, rev):
238 def decorate(text, rev):
242 return ([rev] * len(text.splitlines()), text)
239 return ([rev] * len(text.splitlines()), text)
243
240
244 def pair(parent, child):
241 def pair(parent, child):
245 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
242 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
246 child[0][b1:b2] = parent[0][a1:a2]
243 child[0][b1:b2] = parent[0][a1:a2]
247 return child
244 return child
248
245
249 getlog = util.cachefunc(lambda x: self._repo.file(x))
246 getlog = util.cachefunc(lambda x: self._repo.file(x))
250 def getctx(path, fileid):
247 def getctx(path, fileid):
251 log = path == self._path and self._filelog or getlog(path)
248 log = path == self._path and self._filelog or getlog(path)
252 return filectx(self._repo, path, fileid=fileid, filelog=log)
249 return filectx(self._repo, path, fileid=fileid, filelog=log)
253 getctx = util.cachefunc(getctx)
250 getctx = util.cachefunc(getctx)
254
251
255 def parents(f):
252 def parents(f):
256 # we want to reuse filectx objects as much as possible
253 # we want to reuse filectx objects as much as possible
257 p = f._path
254 p = f._path
258 if f._filerev is None: # working dir
255 if f._filerev is None: # working dir
259 pl = [(n.path(), n.filerev()) for n in f.parents()]
256 pl = [(n.path(), n.filerev()) for n in f.parents()]
260 else:
257 else:
261 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
258 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
262
259
263 if follow:
260 if follow:
264 r = f.renamed()
261 r = f.renamed()
265 if r:
262 if r:
266 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
263 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
267
264
268 return [getctx(p, n) for p, n in pl if n != nullrev]
265 return [getctx(p, n) for p, n in pl if n != nullrev]
269
266
270 # use linkrev to find the first changeset where self appeared
267 # use linkrev to find the first changeset where self appeared
271 if self.rev() != self._filelog.linkrev(self._filenode):
268 if self.rev() != self._filelog.linkrev(self._filenode):
272 base = self.filectx(self.filerev())
269 base = self.filectx(self.filerev())
273 else:
270 else:
274 base = self
271 base = self
275
272
276 # find all ancestors
273 # find all ancestors
277 needed = {base: 1}
274 needed = {base: 1}
278 visit = [base]
275 visit = [base]
279 files = [base._path]
276 files = [base._path]
280 while visit:
277 while visit:
281 f = visit.pop(0)
278 f = visit.pop(0)
282 for p in parents(f):
279 for p in parents(f):
283 if p not in needed:
280 if p not in needed:
284 needed[p] = 1
281 needed[p] = 1
285 visit.append(p)
282 visit.append(p)
286 if p._path not in files:
283 if p._path not in files:
287 files.append(p._path)
284 files.append(p._path)
288 else:
285 else:
289 # count how many times we'll use this
286 # count how many times we'll use this
290 needed[p] += 1
287 needed[p] += 1
291
288
292 # sort by revision (per file) which is a topological order
289 # sort by revision (per file) which is a topological order
293 visit = []
290 visit = []
294 files.reverse()
291 files.reverse()
295 for f in files:
292 for f in files:
296 fn = [(n._filerev, n) for n in needed.keys() if n._path == f]
293 fn = [(n._filerev, n) for n in needed.keys() if n._path == f]
297 fn.sort()
294 fn.sort()
298 visit.extend(fn)
295 visit.extend(fn)
299 hist = {}
296 hist = {}
300
297
301 for r, f in visit:
298 for r, f in visit:
302 curr = decorate(f.data(), f)
299 curr = decorate(f.data(), f)
303 for p in parents(f):
300 for p in parents(f):
304 if p != nullid:
301 if p != nullid:
305 curr = pair(hist[p], curr)
302 curr = pair(hist[p], curr)
306 # trim the history of unneeded revs
303 # trim the history of unneeded revs
307 needed[p] -= 1
304 needed[p] -= 1
308 if not needed[p]:
305 if not needed[p]:
309 del hist[p]
306 del hist[p]
310 hist[f] = curr
307 hist[f] = curr
311
308
312 return zip(hist[f][0], hist[f][1].splitlines(1))
309 return zip(hist[f][0], hist[f][1].splitlines(1))
313
310
314 def ancestor(self, fc2):
311 def ancestor(self, fc2):
315 """
312 """
316 find the common ancestor file context, if any, of self, and fc2
313 find the common ancestor file context, if any, of self, and fc2
317 """
314 """
318
315
319 acache = {}
316 acache = {}
320
317
321 # prime the ancestor cache for the working directory
318 # prime the ancestor cache for the working directory
322 for c in (self, fc2):
319 for c in (self, fc2):
323 if c._filerev == None:
320 if c._filerev == None:
324 pl = [(n.path(), n.filenode()) for n in c.parents()]
321 pl = [(n.path(), n.filenode()) for n in c.parents()]
325 acache[(c._path, None)] = pl
322 acache[(c._path, None)] = pl
326
323
327 flcache = {self._path:self._filelog, fc2._path:fc2._filelog}
324 flcache = {self._path:self._filelog, fc2._path:fc2._filelog}
328 def parents(vertex):
325 def parents(vertex):
329 if vertex in acache:
326 if vertex in acache:
330 return acache[vertex]
327 return acache[vertex]
331 f, n = vertex
328 f, n = vertex
332 if f not in flcache:
329 if f not in flcache:
333 flcache[f] = self._repo.file(f)
330 flcache[f] = self._repo.file(f)
334 fl = flcache[f]
331 fl = flcache[f]
335 pl = [(f, p) for p in fl.parents(n) if p != nullid]
332 pl = [(f, p) for p in fl.parents(n) if p != nullid]
336 re = fl.renamed(n)
333 re = fl.renamed(n)
337 if re:
334 if re:
338 pl.append(re)
335 pl.append(re)
339 acache[vertex] = pl
336 acache[vertex] = pl
340 return pl
337 return pl
341
338
342 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
339 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
343 v = ancestor.ancestor(a, b, parents)
340 v = ancestor.ancestor(a, b, parents)
344 if v:
341 if v:
345 f, n = v
342 f, n = v
346 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
343 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
347
344
348 return None
345 return None
349
346
350 class workingctx(changectx):
347 class workingctx(changectx):
351 """A workingctx object makes access to data related to
348 """A workingctx object makes access to data related to
352 the current working directory convenient."""
349 the current working directory convenient."""
353 def __init__(self, repo):
350 def __init__(self, repo):
354 self._repo = repo
351 self._repo = repo
355 self._rev = None
352 self._rev = None
356 self._node = None
353 self._node = None
357
354
358 def __str__(self):
355 def __str__(self):
359 return str(self._parents[0]) + "+"
356 return str(self._parents[0]) + "+"
360
357
361 def __nonzero__(self):
358 def __nonzero__(self):
362 return True
359 return True
363
360
364 def __getattr__(self, name):
361 def __getattr__(self, name):
365 if name == '_parents':
362 if name == '_parents':
366 self._parents = self._repo.parents()
363 self._parents = self._repo.parents()
367 return self._parents
364 return self._parents
368 if name == '_status':
365 if name == '_status':
369 self._status = self._repo.status()
366 self._status = self._repo.status()
370 return self._status
367 return self._status
371 if name == '_manifest':
368 if name == '_manifest':
372 self._buildmanifest()
369 self._buildmanifest()
373 return self._manifest
370 return self._manifest
374 else:
371 else:
375 raise AttributeError, name
372 raise AttributeError, name
376
373
377 def _buildmanifest(self):
374 def _buildmanifest(self):
378 """generate a manifest corresponding to the working directory"""
375 """generate a manifest corresponding to the working directory"""
379
376
380 man = self._parents[0].manifest().copy()
377 man = self._parents[0].manifest().copy()
381 copied = self._repo.dirstate.copies()
378 copied = self._repo.dirstate.copies()
382 modified, added, removed, deleted, unknown = self._status[:5]
379 modified, added, removed, deleted, unknown = self._status[:5]
383 for i, l in (("a", added), ("m", modified), ("u", unknown)):
380 for i, l in (("a", added), ("m", modified), ("u", unknown)):
384 for f in l:
381 for f in l:
385 man[f] = man.get(copied.get(f, f), nullid) + i
382 man[f] = man.get(copied.get(f, f), nullid) + i
386 try:
383 try:
387 man.set(f, util.is_exec(self._repo.wjoin(f), man.execf(f)))
384 man.set(f, util.is_exec(self._repo.wjoin(f), man.execf(f)))
388 except OSError:
385 except OSError:
389 pass
386 pass
390
387
391 for f in deleted + removed:
388 for f in deleted + removed:
392 if f in man:
389 if f in man:
393 del man[f]
390 del man[f]
394
391
395 self._manifest = man
392 self._manifest = man
396
393
397 def manifest(self): return self._manifest
394 def manifest(self): return self._manifest
398
395
399 def user(self): return self._repo.ui.username()
396 def user(self): return self._repo.ui.username()
400 def date(self): return util.makedate()
397 def date(self): return util.makedate()
401 def description(self): return ""
398 def description(self): return ""
402 def files(self):
399 def files(self):
403 f = self.modified() + self.added() + self.removed()
400 f = self.modified() + self.added() + self.removed()
404 f.sort()
401 f.sort()
405 return f
402 return f
406
403
407 def modified(self): return self._status[0]
404 def modified(self): return self._status[0]
408 def added(self): return self._status[1]
405 def added(self): return self._status[1]
409 def removed(self): return self._status[2]
406 def removed(self): return self._status[2]
410 def deleted(self): return self._status[3]
407 def deleted(self): return self._status[3]
411 def unknown(self): return self._status[4]
408 def unknown(self): return self._status[4]
412 def clean(self): return self._status[5]
409 def clean(self): return self._status[5]
413 def branch(self):
410 def branch(self):
414 try:
411 try:
415 return self._repo.opener("branch").read().strip()
412 return self._repo.opener("branch").read().strip()
416 except IOError:
413 except IOError:
417 return ""
414 return ""
418
415
419 def parents(self):
416 def parents(self):
420 """return contexts for each parent changeset"""
417 """return contexts for each parent changeset"""
421 return self._parents
418 return self._parents
422
419
423 def children(self):
420 def children(self):
424 return []
421 return []
425
422
426 def filectx(self, path):
423 def filectx(self, path):
427 """get a file context from the working directory"""
424 """get a file context from the working directory"""
428 return workingfilectx(self._repo, path, workingctx=self)
425 return workingfilectx(self._repo, path, workingctx=self)
429
426
430 def ancestor(self, c2):
427 def ancestor(self, c2):
431 """return the ancestor context of self and c2"""
428 """return the ancestor context of self and c2"""
432 return self._parents[0].ancestor(c2) # punt on two parents for now
429 return self._parents[0].ancestor(c2) # punt on two parents for now
433
430
434 class workingfilectx(filectx):
431 class workingfilectx(filectx):
435 """A workingfilectx object makes access to data related to a particular
432 """A workingfilectx object makes access to data related to a particular
436 file in the working directory convenient."""
433 file in the working directory convenient."""
437 def __init__(self, repo, path, filelog=None, workingctx=None):
434 def __init__(self, repo, path, filelog=None, workingctx=None):
438 """changeid can be a changeset revision, node, or tag.
435 """changeid can be a changeset revision, node, or tag.
439 fileid can be a file revision or node."""
436 fileid can be a file revision or node."""
440 self._repo = repo
437 self._repo = repo
441 self._path = path
438 self._path = path
442 self._changeid = None
439 self._changeid = None
443 self._filerev = self._filenode = None
440 self._filerev = self._filenode = None
444
441
445 if filelog:
442 if filelog:
446 self._filelog = filelog
443 self._filelog = filelog
447 if workingctx:
444 if workingctx:
448 self._changectx = workingctx
445 self._changectx = workingctx
449
446
450 def __getattr__(self, name):
447 def __getattr__(self, name):
451 if name == '_changectx':
448 if name == '_changectx':
452 self._changectx = workingctx(repo)
449 self._changectx = workingctx(repo)
453 return self._changectx
450 return self._changectx
454 elif name == '_repopath':
451 elif name == '_repopath':
455 self._repopath = (self._repo.dirstate.copied(self._path)
452 self._repopath = (self._repo.dirstate.copied(self._path)
456 or self._path)
453 or self._path)
457 return self._repopath
454 return self._repopath
458 elif name == '_filelog':
455 elif name == '_filelog':
459 self._filelog = self._repo.file(self._repopath)
456 self._filelog = self._repo.file(self._repopath)
460 return self._filelog
457 return self._filelog
461 else:
458 else:
462 raise AttributeError, name
459 raise AttributeError, name
463
460
464 def __nonzero__(self):
461 def __nonzero__(self):
465 return True
462 return True
466
463
467 def __str__(self):
464 def __str__(self):
468 return "%s@%s" % (self.path(), self._changectx)
465 return "%s@%s" % (self.path(), self._changectx)
469
466
470 def filectx(self, fileid):
467 def filectx(self, fileid):
471 '''opens an arbitrary revision of the file without
468 '''opens an arbitrary revision of the file without
472 opening a new filelog'''
469 opening a new filelog'''
473 return filectx(self._repo, self._repopath, fileid=fileid,
470 return filectx(self._repo, self._repopath, fileid=fileid,
474 filelog=self._filelog)
471 filelog=self._filelog)
475
472
476 def rev(self):
473 def rev(self):
477 if '_changectx' in self.__dict__:
474 if '_changectx' in self.__dict__:
478 return self._changectx.rev()
475 return self._changectx.rev()
479 return self._filelog.linkrev(self._filenode)
476 return self._filelog.linkrev(self._filenode)
480
477
481 def data(self): return self._repo.wread(self._path)
478 def data(self): return self._repo.wread(self._path)
482 def renamed(self):
479 def renamed(self):
483 rp = self._repopath
480 rp = self._repopath
484 if rp == self._path:
481 if rp == self._path:
485 return None
482 return None
486 return rp, self._workingctx._parents._manifest.get(rp, nullid)
483 return rp, self._workingctx._parents._manifest.get(rp, nullid)
487
484
488 def parents(self):
485 def parents(self):
489 '''return parent filectxs, following copies if necessary'''
486 '''return parent filectxs, following copies if necessary'''
490 p = self._path
487 p = self._path
491 rp = self._repopath
488 rp = self._repopath
492 pcl = self._changectx._parents
489 pcl = self._changectx._parents
493 fl = self._filelog
490 fl = self._filelog
494 pl = [(rp, pcl[0]._manifest.get(rp, nullid), fl)]
491 pl = [(rp, pcl[0]._manifest.get(rp, nullid), fl)]
495 if len(pcl) > 1:
492 if len(pcl) > 1:
496 if rp != p:
493 if rp != p:
497 fl = None
494 fl = None
498 pl.append((p, pcl[1]._manifest.get(p, nullid), fl))
495 pl.append((p, pcl[1]._manifest.get(p, nullid), fl))
499
496
500 return [filectx(self._repo, p, fileid=n, filelog=l)
497 return [filectx(self._repo, p, fileid=n, filelog=l)
501 for p,n,l in pl if n != nullid]
498 for p,n,l in pl if n != nullid]
502
499
503 def children(self):
500 def children(self):
504 return []
501 return []
505
502
506 def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
503 def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
507
504
508 def cmp(self, text): return self._repo.wread(self._path) == text
505 def cmp(self, text): return self._repo.wread(self._path) == text
@@ -1,1863 +1,1863 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import repo, appendfile, changegroup
10 import repo, appendfile, changegroup
11 import changelog, dirstate, filelog, manifest, context
11 import changelog, dirstate, filelog, manifest, context
12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
13 import os, revlog, time, util
13 import os, revlog, time, util
14
14
15 class localrepository(repo.repository):
15 class localrepository(repo.repository):
16 capabilities = ('lookup', 'changegroupsubset')
16 capabilities = ('lookup', 'changegroupsubset')
17 supported = ('revlogv1', 'store')
17 supported = ('revlogv1', 'store')
18
18
19 def __del__(self):
19 def __del__(self):
20 self.transhandle = None
20 self.transhandle = None
21 def __init__(self, parentui, path=None, create=0):
21 def __init__(self, parentui, path=None, create=0):
22 repo.repository.__init__(self)
22 repo.repository.__init__(self)
23 if not path:
23 if not path:
24 p = os.getcwd()
24 p = os.getcwd()
25 while not os.path.isdir(os.path.join(p, ".hg")):
25 while not os.path.isdir(os.path.join(p, ".hg")):
26 oldp = p
26 oldp = p
27 p = os.path.dirname(p)
27 p = os.path.dirname(p)
28 if p == oldp:
28 if p == oldp:
29 raise repo.RepoError(_("There is no Mercurial repository"
29 raise repo.RepoError(_("There is no Mercurial repository"
30 " here (.hg not found)"))
30 " here (.hg not found)"))
31 path = p
31 path = p
32
32
33 self.path = os.path.join(path, ".hg")
33 self.path = os.path.join(path, ".hg")
34 self.root = os.path.realpath(path)
34 self.root = os.path.realpath(path)
35 self.origroot = path
35 self.origroot = path
36 self.opener = util.opener(self.path)
36 self.opener = util.opener(self.path)
37 self.wopener = util.opener(self.root)
37 self.wopener = util.opener(self.root)
38
38
39 if not os.path.isdir(self.path):
39 if not os.path.isdir(self.path):
40 if create:
40 if create:
41 if not os.path.exists(path):
41 if not os.path.exists(path):
42 os.mkdir(path)
42 os.mkdir(path)
43 os.mkdir(self.path)
43 os.mkdir(self.path)
44 os.mkdir(os.path.join(self.path, "store"))
44 os.mkdir(os.path.join(self.path, "store"))
45 requirements = ("revlogv1", "store")
45 requirements = ("revlogv1", "store")
46 reqfile = self.opener("requires", "w")
46 reqfile = self.opener("requires", "w")
47 for r in requirements:
47 for r in requirements:
48 reqfile.write("%s\n" % r)
48 reqfile.write("%s\n" % r)
49 reqfile.close()
49 reqfile.close()
50 # create an invalid changelog
50 # create an invalid changelog
51 self.opener("00changelog.i", "a").write(
51 self.opener("00changelog.i", "a").write(
52 '\0\0\0\2' # represents revlogv2
52 '\0\0\0\2' # represents revlogv2
53 ' dummy changelog to prevent using the old repo layout'
53 ' dummy changelog to prevent using the old repo layout'
54 )
54 )
55 else:
55 else:
56 raise repo.RepoError(_("repository %s not found") % path)
56 raise repo.RepoError(_("repository %s not found") % path)
57 elif create:
57 elif create:
58 raise repo.RepoError(_("repository %s already exists") % path)
58 raise repo.RepoError(_("repository %s already exists") % path)
59 else:
59 else:
60 # find requirements
60 # find requirements
61 try:
61 try:
62 requirements = self.opener("requires").read().splitlines()
62 requirements = self.opener("requires").read().splitlines()
63 except IOError, inst:
63 except IOError, inst:
64 if inst.errno != errno.ENOENT:
64 if inst.errno != errno.ENOENT:
65 raise
65 raise
66 requirements = []
66 requirements = []
67 # check them
67 # check them
68 for r in requirements:
68 for r in requirements:
69 if r not in self.supported:
69 if r not in self.supported:
70 raise repo.RepoError(_("requirement '%s' not supported") % r)
70 raise repo.RepoError(_("requirement '%s' not supported") % r)
71
71
72 # setup store
72 # setup store
73 if "store" in requirements:
73 if "store" in requirements:
74 self.encodefn = util.encodefilename
74 self.encodefn = util.encodefilename
75 self.decodefn = util.decodefilename
75 self.decodefn = util.decodefilename
76 self.spath = os.path.join(self.path, "store")
76 self.spath = os.path.join(self.path, "store")
77 else:
77 else:
78 self.encodefn = lambda x: x
78 self.encodefn = lambda x: x
79 self.decodefn = lambda x: x
79 self.decodefn = lambda x: x
80 self.spath = self.path
80 self.spath = self.path
81 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
81 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
82
82
83 self.ui = ui.ui(parentui=parentui)
83 self.ui = ui.ui(parentui=parentui)
84 try:
84 try:
85 self.ui.readconfig(self.join("hgrc"), self.root)
85 self.ui.readconfig(self.join("hgrc"), self.root)
86 except IOError:
86 except IOError:
87 pass
87 pass
88
88
89 v = self.ui.configrevlog()
89 v = self.ui.configrevlog()
90 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
90 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
91 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
91 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
92 fl = v.get('flags', None)
92 fl = v.get('flags', None)
93 flags = 0
93 flags = 0
94 if fl != None:
94 if fl != None:
95 for x in fl.split():
95 for x in fl.split():
96 flags |= revlog.flagstr(x)
96 flags |= revlog.flagstr(x)
97 elif self.revlogv1:
97 elif self.revlogv1:
98 flags = revlog.REVLOG_DEFAULT_FLAGS
98 flags = revlog.REVLOG_DEFAULT_FLAGS
99
99
100 v = self.revlogversion | flags
100 v = self.revlogversion | flags
101 self.manifest = manifest.manifest(self.sopener, v)
101 self.manifest = manifest.manifest(self.sopener, v)
102 self.changelog = changelog.changelog(self.sopener, v)
102 self.changelog = changelog.changelog(self.sopener, v)
103
103
104 fallback = self.ui.config('ui', 'fallbackencoding')
104 fallback = self.ui.config('ui', 'fallbackencoding')
105 if fallback:
105 if fallback:
106 util._fallbackencoding = fallback
106 util._fallbackencoding = fallback
107
107
108 # the changelog might not have the inline index flag
108 # the changelog might not have the inline index flag
109 # on. If the format of the changelog is the same as found in
109 # on. If the format of the changelog is the same as found in
110 # .hgrc, apply any flags found in the .hgrc as well.
110 # .hgrc, apply any flags found in the .hgrc as well.
111 # Otherwise, just version from the changelog
111 # Otherwise, just version from the changelog
112 v = self.changelog.version
112 v = self.changelog.version
113 if v == self.revlogversion:
113 if v == self.revlogversion:
114 v |= flags
114 v |= flags
115 self.revlogversion = v
115 self.revlogversion = v
116
116
117 self.tagscache = None
117 self.tagscache = None
118 self.branchcache = None
118 self.branchcache = None
119 self.nodetagscache = None
119 self.nodetagscache = None
120 self.encodepats = None
120 self.encodepats = None
121 self.decodepats = None
121 self.decodepats = None
122 self.transhandle = None
122 self.transhandle = None
123
123
124 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
124 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
125
125
126 def url(self):
126 def url(self):
127 return 'file:' + self.root
127 return 'file:' + self.root
128
128
129 def hook(self, name, throw=False, **args):
129 def hook(self, name, throw=False, **args):
130 def callhook(hname, funcname):
130 def callhook(hname, funcname):
131 '''call python hook. hook is callable object, looked up as
131 '''call python hook. hook is callable object, looked up as
132 name in python module. if callable returns "true", hook
132 name in python module. if callable returns "true", hook
133 fails, else passes. if hook raises exception, treated as
133 fails, else passes. if hook raises exception, treated as
134 hook failure. exception propagates if throw is "true".
134 hook failure. exception propagates if throw is "true".
135
135
136 reason for "true" meaning "hook failed" is so that
136 reason for "true" meaning "hook failed" is so that
137 unmodified commands (e.g. mercurial.commands.update) can
137 unmodified commands (e.g. mercurial.commands.update) can
138 be run as hooks without wrappers to convert return values.'''
138 be run as hooks without wrappers to convert return values.'''
139
139
140 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
140 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
141 d = funcname.rfind('.')
141 d = funcname.rfind('.')
142 if d == -1:
142 if d == -1:
143 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
143 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
144 % (hname, funcname))
144 % (hname, funcname))
145 modname = funcname[:d]
145 modname = funcname[:d]
146 try:
146 try:
147 obj = __import__(modname)
147 obj = __import__(modname)
148 except ImportError:
148 except ImportError:
149 try:
149 try:
150 # extensions are loaded with hgext_ prefix
150 # extensions are loaded with hgext_ prefix
151 obj = __import__("hgext_%s" % modname)
151 obj = __import__("hgext_%s" % modname)
152 except ImportError:
152 except ImportError:
153 raise util.Abort(_('%s hook is invalid '
153 raise util.Abort(_('%s hook is invalid '
154 '(import of "%s" failed)') %
154 '(import of "%s" failed)') %
155 (hname, modname))
155 (hname, modname))
156 try:
156 try:
157 for p in funcname.split('.')[1:]:
157 for p in funcname.split('.')[1:]:
158 obj = getattr(obj, p)
158 obj = getattr(obj, p)
159 except AttributeError, err:
159 except AttributeError, err:
160 raise util.Abort(_('%s hook is invalid '
160 raise util.Abort(_('%s hook is invalid '
161 '("%s" is not defined)') %
161 '("%s" is not defined)') %
162 (hname, funcname))
162 (hname, funcname))
163 if not callable(obj):
163 if not callable(obj):
164 raise util.Abort(_('%s hook is invalid '
164 raise util.Abort(_('%s hook is invalid '
165 '("%s" is not callable)') %
165 '("%s" is not callable)') %
166 (hname, funcname))
166 (hname, funcname))
167 try:
167 try:
168 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
168 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
169 except (KeyboardInterrupt, util.SignalInterrupt):
169 except (KeyboardInterrupt, util.SignalInterrupt):
170 raise
170 raise
171 except Exception, exc:
171 except Exception, exc:
172 if isinstance(exc, util.Abort):
172 if isinstance(exc, util.Abort):
173 self.ui.warn(_('error: %s hook failed: %s\n') %
173 self.ui.warn(_('error: %s hook failed: %s\n') %
174 (hname, exc.args[0]))
174 (hname, exc.args[0]))
175 else:
175 else:
176 self.ui.warn(_('error: %s hook raised an exception: '
176 self.ui.warn(_('error: %s hook raised an exception: '
177 '%s\n') % (hname, exc))
177 '%s\n') % (hname, exc))
178 if throw:
178 if throw:
179 raise
179 raise
180 self.ui.print_exc()
180 self.ui.print_exc()
181 return True
181 return True
182 if r:
182 if r:
183 if throw:
183 if throw:
184 raise util.Abort(_('%s hook failed') % hname)
184 raise util.Abort(_('%s hook failed') % hname)
185 self.ui.warn(_('warning: %s hook failed\n') % hname)
185 self.ui.warn(_('warning: %s hook failed\n') % hname)
186 return r
186 return r
187
187
188 def runhook(name, cmd):
188 def runhook(name, cmd):
189 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
189 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
190 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
190 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
191 r = util.system(cmd, environ=env, cwd=self.root)
191 r = util.system(cmd, environ=env, cwd=self.root)
192 if r:
192 if r:
193 desc, r = util.explain_exit(r)
193 desc, r = util.explain_exit(r)
194 if throw:
194 if throw:
195 raise util.Abort(_('%s hook %s') % (name, desc))
195 raise util.Abort(_('%s hook %s') % (name, desc))
196 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
196 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
197 return r
197 return r
198
198
199 r = False
199 r = False
200 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
200 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
201 if hname.split(".", 1)[0] == name and cmd]
201 if hname.split(".", 1)[0] == name and cmd]
202 hooks.sort()
202 hooks.sort()
203 for hname, cmd in hooks:
203 for hname, cmd in hooks:
204 if cmd.startswith('python:'):
204 if cmd.startswith('python:'):
205 r = callhook(hname, cmd[7:].strip()) or r
205 r = callhook(hname, cmd[7:].strip()) or r
206 else:
206 else:
207 r = runhook(hname, cmd) or r
207 r = runhook(hname, cmd) or r
208 return r
208 return r
209
209
210 tag_disallowed = ':\r\n'
210 tag_disallowed = ':\r\n'
211
211
212 def tag(self, name, node, message, local, user, date):
212 def tag(self, name, node, message, local, user, date):
213 '''tag a revision with a symbolic name.
213 '''tag a revision with a symbolic name.
214
214
215 if local is True, the tag is stored in a per-repository file.
215 if local is True, the tag is stored in a per-repository file.
216 otherwise, it is stored in the .hgtags file, and a new
216 otherwise, it is stored in the .hgtags file, and a new
217 changeset is committed with the change.
217 changeset is committed with the change.
218
218
219 keyword arguments:
219 keyword arguments:
220
220
221 local: whether to store tag in non-version-controlled file
221 local: whether to store tag in non-version-controlled file
222 (default False)
222 (default False)
223
223
224 message: commit message to use if committing
224 message: commit message to use if committing
225
225
226 user: name of user to use if committing
226 user: name of user to use if committing
227
227
228 date: date tuple to use if committing'''
228 date: date tuple to use if committing'''
229
229
230 for c in self.tag_disallowed:
230 for c in self.tag_disallowed:
231 if c in name:
231 if c in name:
232 raise util.Abort(_('%r cannot be used in a tag name') % c)
232 raise util.Abort(_('%r cannot be used in a tag name') % c)
233
233
234 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
234 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
235
235
236 if local:
236 if local:
237 # local tags are stored in the current charset
237 # local tags are stored in the current charset
238 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
238 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
239 self.hook('tag', node=hex(node), tag=name, local=local)
239 self.hook('tag', node=hex(node), tag=name, local=local)
240 return
240 return
241
241
242 for x in self.status()[:5]:
242 for x in self.status()[:5]:
243 if '.hgtags' in x:
243 if '.hgtags' in x:
244 raise util.Abort(_('working copy of .hgtags is changed '
244 raise util.Abort(_('working copy of .hgtags is changed '
245 '(please commit .hgtags manually)'))
245 '(please commit .hgtags manually)'))
246
246
247 # committed tags are stored in UTF-8
247 # committed tags are stored in UTF-8
248 line = '%s %s\n' % (hex(node), util.fromlocal(name))
248 line = '%s %s\n' % (hex(node), util.fromlocal(name))
249 self.wfile('.hgtags', 'ab').write(line)
249 self.wfile('.hgtags', 'ab').write(line)
250 if self.dirstate.state('.hgtags') == '?':
250 if self.dirstate.state('.hgtags') == '?':
251 self.add(['.hgtags'])
251 self.add(['.hgtags'])
252
252
253 self.commit(['.hgtags'], message, user, date)
253 self.commit(['.hgtags'], message, user, date)
254 self.hook('tag', node=hex(node), tag=name, local=local)
254 self.hook('tag', node=hex(node), tag=name, local=local)
255
255
256 def tags(self):
256 def tags(self):
257 '''return a mapping of tag to node'''
257 '''return a mapping of tag to node'''
258 if not self.tagscache:
258 if not self.tagscache:
259 self.tagscache = {}
259 self.tagscache = {}
260
260
261 def parsetag(line, context):
261 def parsetag(line, context):
262 if not line:
262 if not line:
263 return
263 return
264 s = l.split(" ", 1)
264 s = l.split(" ", 1)
265 if len(s) != 2:
265 if len(s) != 2:
266 self.ui.warn(_("%s: cannot parse entry\n") % context)
266 self.ui.warn(_("%s: cannot parse entry\n") % context)
267 return
267 return
268 node, key = s
268 node, key = s
269 key = util.tolocal(key.strip()) # stored in UTF-8
269 key = util.tolocal(key.strip()) # stored in UTF-8
270 try:
270 try:
271 bin_n = bin(node)
271 bin_n = bin(node)
272 except TypeError:
272 except TypeError:
273 self.ui.warn(_("%s: node '%s' is not well formed\n") %
273 self.ui.warn(_("%s: node '%s' is not well formed\n") %
274 (context, node))
274 (context, node))
275 return
275 return
276 if bin_n not in self.changelog.nodemap:
276 if bin_n not in self.changelog.nodemap:
277 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
277 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
278 (context, key))
278 (context, key))
279 return
279 return
280 self.tagscache[key] = bin_n
280 self.tagscache[key] = bin_n
281
281
282 # read the tags file from each head, ending with the tip,
282 # read the tags file from each head, ending with the tip,
283 # and add each tag found to the map, with "newer" ones
283 # and add each tag found to the map, with "newer" ones
284 # taking precedence
284 # taking precedence
285 f = None
285 f = None
286 for rev, node, fnode in self._hgtagsnodes():
286 for rev, node, fnode in self._hgtagsnodes():
287 f = (f and f.filectx(fnode) or
287 f = (f and f.filectx(fnode) or
288 self.filectx('.hgtags', fileid=fnode))
288 self.filectx('.hgtags', fileid=fnode))
289 count = 0
289 count = 0
290 for l in f.data().splitlines():
290 for l in f.data().splitlines():
291 count += 1
291 count += 1
292 parsetag(l, _("%s, line %d") % (str(f), count))
292 parsetag(l, _("%s, line %d") % (str(f), count))
293
293
294 try:
294 try:
295 f = self.opener("localtags")
295 f = self.opener("localtags")
296 count = 0
296 count = 0
297 for l in f:
297 for l in f:
298 # localtags are stored in the local character set
298 # localtags are stored in the local character set
299 # while the internal tag table is stored in UTF-8
299 # while the internal tag table is stored in UTF-8
300 l = util.fromlocal(l)
300 l = util.fromlocal(l)
301 count += 1
301 count += 1
302 parsetag(l, _("localtags, line %d") % count)
302 parsetag(l, _("localtags, line %d") % count)
303 except IOError:
303 except IOError:
304 pass
304 pass
305
305
306 self.tagscache['tip'] = self.changelog.tip()
306 self.tagscache['tip'] = self.changelog.tip()
307
307
308 return self.tagscache
308 return self.tagscache
309
309
310 def _hgtagsnodes(self):
310 def _hgtagsnodes(self):
311 heads = self.heads()
311 heads = self.heads()
312 heads.reverse()
312 heads.reverse()
313 last = {}
313 last = {}
314 ret = []
314 ret = []
315 for node in heads:
315 for node in heads:
316 c = self.changectx(node)
316 c = self.changectx(node)
317 rev = c.rev()
317 rev = c.rev()
318 try:
318 try:
319 fnode = c.filenode('.hgtags')
319 fnode = c.filenode('.hgtags')
320 except repo.LookupError:
320 except revlog.LookupError:
321 continue
321 continue
322 ret.append((rev, node, fnode))
322 ret.append((rev, node, fnode))
323 if fnode in last:
323 if fnode in last:
324 ret[last[fnode]] = None
324 ret[last[fnode]] = None
325 last[fnode] = len(ret) - 1
325 last[fnode] = len(ret) - 1
326 return [item for item in ret if item]
326 return [item for item in ret if item]
327
327
328 def tagslist(self):
328 def tagslist(self):
329 '''return a list of tags ordered by revision'''
329 '''return a list of tags ordered by revision'''
330 l = []
330 l = []
331 for t, n in self.tags().items():
331 for t, n in self.tags().items():
332 try:
332 try:
333 r = self.changelog.rev(n)
333 r = self.changelog.rev(n)
334 except:
334 except:
335 r = -2 # sort to the beginning of the list if unknown
335 r = -2 # sort to the beginning of the list if unknown
336 l.append((r, t, n))
336 l.append((r, t, n))
337 l.sort()
337 l.sort()
338 return [(t, n) for r, t, n in l]
338 return [(t, n) for r, t, n in l]
339
339
340 def nodetags(self, node):
340 def nodetags(self, node):
341 '''return the tags associated with a node'''
341 '''return the tags associated with a node'''
342 if not self.nodetagscache:
342 if not self.nodetagscache:
343 self.nodetagscache = {}
343 self.nodetagscache = {}
344 for t, n in self.tags().items():
344 for t, n in self.tags().items():
345 self.nodetagscache.setdefault(n, []).append(t)
345 self.nodetagscache.setdefault(n, []).append(t)
346 return self.nodetagscache.get(node, [])
346 return self.nodetagscache.get(node, [])
347
347
348 def _branchtags(self):
348 def _branchtags(self):
349 partial, last, lrev = self._readbranchcache()
349 partial, last, lrev = self._readbranchcache()
350
350
351 tiprev = self.changelog.count() - 1
351 tiprev = self.changelog.count() - 1
352 if lrev != tiprev:
352 if lrev != tiprev:
353 self._updatebranchcache(partial, lrev+1, tiprev+1)
353 self._updatebranchcache(partial, lrev+1, tiprev+1)
354 self._writebranchcache(partial, self.changelog.tip(), tiprev)
354 self._writebranchcache(partial, self.changelog.tip(), tiprev)
355
355
356 return partial
356 return partial
357
357
358 def branchtags(self):
358 def branchtags(self):
359 if self.branchcache is not None:
359 if self.branchcache is not None:
360 return self.branchcache
360 return self.branchcache
361
361
362 self.branchcache = {} # avoid recursion in changectx
362 self.branchcache = {} # avoid recursion in changectx
363 partial = self._branchtags()
363 partial = self._branchtags()
364
364
365 # the branch cache is stored on disk as UTF-8, but in the local
365 # the branch cache is stored on disk as UTF-8, but in the local
366 # charset internally
366 # charset internally
367 for k, v in partial.items():
367 for k, v in partial.items():
368 self.branchcache[util.tolocal(k)] = v
368 self.branchcache[util.tolocal(k)] = v
369 return self.branchcache
369 return self.branchcache
370
370
371 def _readbranchcache(self):
371 def _readbranchcache(self):
372 partial = {}
372 partial = {}
373 try:
373 try:
374 f = self.opener("branches.cache")
374 f = self.opener("branches.cache")
375 lines = f.read().split('\n')
375 lines = f.read().split('\n')
376 f.close()
376 f.close()
377 last, lrev = lines.pop(0).rstrip().split(" ", 1)
377 last, lrev = lines.pop(0).rstrip().split(" ", 1)
378 last, lrev = bin(last), int(lrev)
378 last, lrev = bin(last), int(lrev)
379 if not (lrev < self.changelog.count() and
379 if not (lrev < self.changelog.count() and
380 self.changelog.node(lrev) == last): # sanity check
380 self.changelog.node(lrev) == last): # sanity check
381 # invalidate the cache
381 # invalidate the cache
382 raise ValueError('Invalid branch cache: unknown tip')
382 raise ValueError('Invalid branch cache: unknown tip')
383 for l in lines:
383 for l in lines:
384 if not l: continue
384 if not l: continue
385 node, label = l.rstrip().split(" ", 1)
385 node, label = l.rstrip().split(" ", 1)
386 partial[label] = bin(node)
386 partial[label] = bin(node)
387 except (KeyboardInterrupt, util.SignalInterrupt):
387 except (KeyboardInterrupt, util.SignalInterrupt):
388 raise
388 raise
389 except Exception, inst:
389 except Exception, inst:
390 if self.ui.debugflag:
390 if self.ui.debugflag:
391 self.ui.warn(str(inst), '\n')
391 self.ui.warn(str(inst), '\n')
392 partial, last, lrev = {}, nullid, nullrev
392 partial, last, lrev = {}, nullid, nullrev
393 return partial, last, lrev
393 return partial, last, lrev
394
394
395 def _writebranchcache(self, branches, tip, tiprev):
395 def _writebranchcache(self, branches, tip, tiprev):
396 try:
396 try:
397 f = self.opener("branches.cache", "w")
397 f = self.opener("branches.cache", "w")
398 f.write("%s %s\n" % (hex(tip), tiprev))
398 f.write("%s %s\n" % (hex(tip), tiprev))
399 for label, node in branches.iteritems():
399 for label, node in branches.iteritems():
400 f.write("%s %s\n" % (hex(node), label))
400 f.write("%s %s\n" % (hex(node), label))
401 except IOError:
401 except IOError:
402 pass
402 pass
403
403
404 def _updatebranchcache(self, partial, start, end):
404 def _updatebranchcache(self, partial, start, end):
405 for r in xrange(start, end):
405 for r in xrange(start, end):
406 c = self.changectx(r)
406 c = self.changectx(r)
407 b = c.branch()
407 b = c.branch()
408 if b:
408 if b:
409 partial[b] = c.node()
409 partial[b] = c.node()
410
410
411 def lookup(self, key):
411 def lookup(self, key):
412 if key == '.':
412 if key == '.':
413 key = self.dirstate.parents()[0]
413 key = self.dirstate.parents()[0]
414 if key == nullid:
414 if key == nullid:
415 raise repo.RepoError(_("no revision checked out"))
415 raise repo.RepoError(_("no revision checked out"))
416 elif key == 'null':
416 elif key == 'null':
417 return nullid
417 return nullid
418 n = self.changelog._match(key)
418 n = self.changelog._match(key)
419 if n:
419 if n:
420 return n
420 return n
421 if key in self.tags():
421 if key in self.tags():
422 return self.tags()[key]
422 return self.tags()[key]
423 if key in self.branchtags():
423 if key in self.branchtags():
424 return self.branchtags()[key]
424 return self.branchtags()[key]
425 n = self.changelog._partialmatch(key)
425 n = self.changelog._partialmatch(key)
426 if n:
426 if n:
427 return n
427 return n
428 raise repo.RepoError(_("unknown revision '%s'") % key)
428 raise repo.RepoError(_("unknown revision '%s'") % key)
429
429
430 def dev(self):
430 def dev(self):
431 return os.lstat(self.path).st_dev
431 return os.lstat(self.path).st_dev
432
432
433 def local(self):
433 def local(self):
434 return True
434 return True
435
435
436 def join(self, f):
436 def join(self, f):
437 return os.path.join(self.path, f)
437 return os.path.join(self.path, f)
438
438
439 def sjoin(self, f):
439 def sjoin(self, f):
440 f = self.encodefn(f)
440 f = self.encodefn(f)
441 return os.path.join(self.spath, f)
441 return os.path.join(self.spath, f)
442
442
443 def wjoin(self, f):
443 def wjoin(self, f):
444 return os.path.join(self.root, f)
444 return os.path.join(self.root, f)
445
445
446 def file(self, f):
446 def file(self, f):
447 if f[0] == '/':
447 if f[0] == '/':
448 f = f[1:]
448 f = f[1:]
449 return filelog.filelog(self.sopener, f, self.revlogversion)
449 return filelog.filelog(self.sopener, f, self.revlogversion)
450
450
451 def changectx(self, changeid=None):
451 def changectx(self, changeid=None):
452 return context.changectx(self, changeid)
452 return context.changectx(self, changeid)
453
453
454 def workingctx(self):
454 def workingctx(self):
455 return context.workingctx(self)
455 return context.workingctx(self)
456
456
457 def parents(self, changeid=None):
457 def parents(self, changeid=None):
458 '''
458 '''
459 get list of changectxs for parents of changeid or working directory
459 get list of changectxs for parents of changeid or working directory
460 '''
460 '''
461 if changeid is None:
461 if changeid is None:
462 pl = self.dirstate.parents()
462 pl = self.dirstate.parents()
463 else:
463 else:
464 n = self.changelog.lookup(changeid)
464 n = self.changelog.lookup(changeid)
465 pl = self.changelog.parents(n)
465 pl = self.changelog.parents(n)
466 if pl[1] == nullid:
466 if pl[1] == nullid:
467 return [self.changectx(pl[0])]
467 return [self.changectx(pl[0])]
468 return [self.changectx(pl[0]), self.changectx(pl[1])]
468 return [self.changectx(pl[0]), self.changectx(pl[1])]
469
469
470 def filectx(self, path, changeid=None, fileid=None):
470 def filectx(self, path, changeid=None, fileid=None):
471 """changeid can be a changeset revision, node, or tag.
471 """changeid can be a changeset revision, node, or tag.
472 fileid can be a file revision or node."""
472 fileid can be a file revision or node."""
473 return context.filectx(self, path, changeid, fileid)
473 return context.filectx(self, path, changeid, fileid)
474
474
475 def getcwd(self):
475 def getcwd(self):
476 return self.dirstate.getcwd()
476 return self.dirstate.getcwd()
477
477
478 def wfile(self, f, mode='r'):
478 def wfile(self, f, mode='r'):
479 return self.wopener(f, mode)
479 return self.wopener(f, mode)
480
480
481 def wread(self, filename):
481 def wread(self, filename):
482 if self.encodepats == None:
482 if self.encodepats == None:
483 l = []
483 l = []
484 for pat, cmd in self.ui.configitems("encode"):
484 for pat, cmd in self.ui.configitems("encode"):
485 mf = util.matcher(self.root, "", [pat], [], [])[1]
485 mf = util.matcher(self.root, "", [pat], [], [])[1]
486 l.append((mf, cmd))
486 l.append((mf, cmd))
487 self.encodepats = l
487 self.encodepats = l
488
488
489 data = self.wopener(filename, 'r').read()
489 data = self.wopener(filename, 'r').read()
490
490
491 for mf, cmd in self.encodepats:
491 for mf, cmd in self.encodepats:
492 if mf(filename):
492 if mf(filename):
493 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
493 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
494 data = util.filter(data, cmd)
494 data = util.filter(data, cmd)
495 break
495 break
496
496
497 return data
497 return data
498
498
499 def wwrite(self, filename, data, fd=None):
499 def wwrite(self, filename, data, fd=None):
500 if self.decodepats == None:
500 if self.decodepats == None:
501 l = []
501 l = []
502 for pat, cmd in self.ui.configitems("decode"):
502 for pat, cmd in self.ui.configitems("decode"):
503 mf = util.matcher(self.root, "", [pat], [], [])[1]
503 mf = util.matcher(self.root, "", [pat], [], [])[1]
504 l.append((mf, cmd))
504 l.append((mf, cmd))
505 self.decodepats = l
505 self.decodepats = l
506
506
507 for mf, cmd in self.decodepats:
507 for mf, cmd in self.decodepats:
508 if mf(filename):
508 if mf(filename):
509 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
509 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
510 data = util.filter(data, cmd)
510 data = util.filter(data, cmd)
511 break
511 break
512
512
513 if fd:
513 if fd:
514 return fd.write(data)
514 return fd.write(data)
515 return self.wopener(filename, 'w').write(data)
515 return self.wopener(filename, 'w').write(data)
516
516
517 def transaction(self):
517 def transaction(self):
518 tr = self.transhandle
518 tr = self.transhandle
519 if tr != None and tr.running():
519 if tr != None and tr.running():
520 return tr.nest()
520 return tr.nest()
521
521
522 # save dirstate for rollback
522 # save dirstate for rollback
523 try:
523 try:
524 ds = self.opener("dirstate").read()
524 ds = self.opener("dirstate").read()
525 except IOError:
525 except IOError:
526 ds = ""
526 ds = ""
527 self.opener("journal.dirstate", "w").write(ds)
527 self.opener("journal.dirstate", "w").write(ds)
528
528
529 renames = [(self.sjoin("journal"), self.sjoin("undo")),
529 renames = [(self.sjoin("journal"), self.sjoin("undo")),
530 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
530 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
531 tr = transaction.transaction(self.ui.warn, self.sopener,
531 tr = transaction.transaction(self.ui.warn, self.sopener,
532 self.sjoin("journal"),
532 self.sjoin("journal"),
533 aftertrans(renames))
533 aftertrans(renames))
534 self.transhandle = tr
534 self.transhandle = tr
535 return tr
535 return tr
536
536
537 def recover(self):
537 def recover(self):
538 l = self.lock()
538 l = self.lock()
539 if os.path.exists(self.sjoin("journal")):
539 if os.path.exists(self.sjoin("journal")):
540 self.ui.status(_("rolling back interrupted transaction\n"))
540 self.ui.status(_("rolling back interrupted transaction\n"))
541 transaction.rollback(self.sopener, self.sjoin("journal"))
541 transaction.rollback(self.sopener, self.sjoin("journal"))
542 self.reload()
542 self.reload()
543 return True
543 return True
544 else:
544 else:
545 self.ui.warn(_("no interrupted transaction available\n"))
545 self.ui.warn(_("no interrupted transaction available\n"))
546 return False
546 return False
547
547
548 def rollback(self, wlock=None):
548 def rollback(self, wlock=None):
549 if not wlock:
549 if not wlock:
550 wlock = self.wlock()
550 wlock = self.wlock()
551 l = self.lock()
551 l = self.lock()
552 if os.path.exists(self.sjoin("undo")):
552 if os.path.exists(self.sjoin("undo")):
553 self.ui.status(_("rolling back last transaction\n"))
553 self.ui.status(_("rolling back last transaction\n"))
554 transaction.rollback(self.sopener, self.sjoin("undo"))
554 transaction.rollback(self.sopener, self.sjoin("undo"))
555 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
555 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
556 self.reload()
556 self.reload()
557 self.wreload()
557 self.wreload()
558 else:
558 else:
559 self.ui.warn(_("no rollback information available\n"))
559 self.ui.warn(_("no rollback information available\n"))
560
560
561 def wreload(self):
561 def wreload(self):
562 self.dirstate.read()
562 self.dirstate.read()
563
563
564 def reload(self):
564 def reload(self):
565 self.changelog.load()
565 self.changelog.load()
566 self.manifest.load()
566 self.manifest.load()
567 self.tagscache = None
567 self.tagscache = None
568 self.nodetagscache = None
568 self.nodetagscache = None
569
569
570 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
570 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
571 desc=None):
571 desc=None):
572 try:
572 try:
573 l = lock.lock(lockname, 0, releasefn, desc=desc)
573 l = lock.lock(lockname, 0, releasefn, desc=desc)
574 except lock.LockHeld, inst:
574 except lock.LockHeld, inst:
575 if not wait:
575 if not wait:
576 raise
576 raise
577 self.ui.warn(_("waiting for lock on %s held by %r\n") %
577 self.ui.warn(_("waiting for lock on %s held by %r\n") %
578 (desc, inst.locker))
578 (desc, inst.locker))
579 # default to 600 seconds timeout
579 # default to 600 seconds timeout
580 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
580 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
581 releasefn, desc=desc)
581 releasefn, desc=desc)
582 if acquirefn:
582 if acquirefn:
583 acquirefn()
583 acquirefn()
584 return l
584 return l
585
585
586 def lock(self, wait=1):
586 def lock(self, wait=1):
587 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
587 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
588 desc=_('repository %s') % self.origroot)
588 desc=_('repository %s') % self.origroot)
589
589
590 def wlock(self, wait=1):
590 def wlock(self, wait=1):
591 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
591 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
592 self.wreload,
592 self.wreload,
593 desc=_('working directory of %s') % self.origroot)
593 desc=_('working directory of %s') % self.origroot)
594
594
595 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
595 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
596 """
596 """
597 commit an individual file as part of a larger transaction
597 commit an individual file as part of a larger transaction
598 """
598 """
599
599
600 t = self.wread(fn)
600 t = self.wread(fn)
601 fl = self.file(fn)
601 fl = self.file(fn)
602 fp1 = manifest1.get(fn, nullid)
602 fp1 = manifest1.get(fn, nullid)
603 fp2 = manifest2.get(fn, nullid)
603 fp2 = manifest2.get(fn, nullid)
604
604
605 meta = {}
605 meta = {}
606 cp = self.dirstate.copied(fn)
606 cp = self.dirstate.copied(fn)
607 if cp:
607 if cp:
608 meta["copy"] = cp
608 meta["copy"] = cp
609 if not manifest2: # not a branch merge
609 if not manifest2: # not a branch merge
610 meta["copyrev"] = hex(manifest1.get(cp, nullid))
610 meta["copyrev"] = hex(manifest1.get(cp, nullid))
611 fp2 = nullid
611 fp2 = nullid
612 elif fp2 != nullid: # copied on remote side
612 elif fp2 != nullid: # copied on remote side
613 meta["copyrev"] = hex(manifest1.get(cp, nullid))
613 meta["copyrev"] = hex(manifest1.get(cp, nullid))
614 elif fp1 != nullid: # copied on local side, reversed
614 elif fp1 != nullid: # copied on local side, reversed
615 meta["copyrev"] = hex(manifest2.get(cp))
615 meta["copyrev"] = hex(manifest2.get(cp))
616 fp2 = nullid
616 fp2 = nullid
617 else: # directory rename
617 else: # directory rename
618 meta["copyrev"] = hex(manifest1.get(cp, nullid))
618 meta["copyrev"] = hex(manifest1.get(cp, nullid))
619 self.ui.debug(_(" %s: copy %s:%s\n") %
619 self.ui.debug(_(" %s: copy %s:%s\n") %
620 (fn, cp, meta["copyrev"]))
620 (fn, cp, meta["copyrev"]))
621 fp1 = nullid
621 fp1 = nullid
622 elif fp2 != nullid:
622 elif fp2 != nullid:
623 # is one parent an ancestor of the other?
623 # is one parent an ancestor of the other?
624 fpa = fl.ancestor(fp1, fp2)
624 fpa = fl.ancestor(fp1, fp2)
625 if fpa == fp1:
625 if fpa == fp1:
626 fp1, fp2 = fp2, nullid
626 fp1, fp2 = fp2, nullid
627 elif fpa == fp2:
627 elif fpa == fp2:
628 fp2 = nullid
628 fp2 = nullid
629
629
630 # is the file unmodified from the parent? report existing entry
630 # is the file unmodified from the parent? report existing entry
631 if fp2 == nullid and not fl.cmp(fp1, t):
631 if fp2 == nullid and not fl.cmp(fp1, t):
632 return fp1
632 return fp1
633
633
634 changelist.append(fn)
634 changelist.append(fn)
635 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
635 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
636
636
637 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
637 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
638 if p1 is None:
638 if p1 is None:
639 p1, p2 = self.dirstate.parents()
639 p1, p2 = self.dirstate.parents()
640 return self.commit(files=files, text=text, user=user, date=date,
640 return self.commit(files=files, text=text, user=user, date=date,
641 p1=p1, p2=p2, wlock=wlock)
641 p1=p1, p2=p2, wlock=wlock)
642
642
643 def commit(self, files=None, text="", user=None, date=None,
643 def commit(self, files=None, text="", user=None, date=None,
644 match=util.always, force=False, lock=None, wlock=None,
644 match=util.always, force=False, lock=None, wlock=None,
645 force_editor=False, p1=None, p2=None, extra={}):
645 force_editor=False, p1=None, p2=None, extra={}):
646
646
647 commit = []
647 commit = []
648 remove = []
648 remove = []
649 changed = []
649 changed = []
650 use_dirstate = (p1 is None) # not rawcommit
650 use_dirstate = (p1 is None) # not rawcommit
651 extra = extra.copy()
651 extra = extra.copy()
652
652
653 if use_dirstate:
653 if use_dirstate:
654 if files:
654 if files:
655 for f in files:
655 for f in files:
656 s = self.dirstate.state(f)
656 s = self.dirstate.state(f)
657 if s in 'nmai':
657 if s in 'nmai':
658 commit.append(f)
658 commit.append(f)
659 elif s == 'r':
659 elif s == 'r':
660 remove.append(f)
660 remove.append(f)
661 else:
661 else:
662 self.ui.warn(_("%s not tracked!\n") % f)
662 self.ui.warn(_("%s not tracked!\n") % f)
663 else:
663 else:
664 changes = self.status(match=match)[:5]
664 changes = self.status(match=match)[:5]
665 modified, added, removed, deleted, unknown = changes
665 modified, added, removed, deleted, unknown = changes
666 commit = modified + added
666 commit = modified + added
667 remove = removed
667 remove = removed
668 else:
668 else:
669 commit = files
669 commit = files
670
670
671 if use_dirstate:
671 if use_dirstate:
672 p1, p2 = self.dirstate.parents()
672 p1, p2 = self.dirstate.parents()
673 update_dirstate = True
673 update_dirstate = True
674 else:
674 else:
675 p1, p2 = p1, p2 or nullid
675 p1, p2 = p1, p2 or nullid
676 update_dirstate = (self.dirstate.parents()[0] == p1)
676 update_dirstate = (self.dirstate.parents()[0] == p1)
677
677
678 c1 = self.changelog.read(p1)
678 c1 = self.changelog.read(p1)
679 c2 = self.changelog.read(p2)
679 c2 = self.changelog.read(p2)
680 m1 = self.manifest.read(c1[0]).copy()
680 m1 = self.manifest.read(c1[0]).copy()
681 m2 = self.manifest.read(c2[0])
681 m2 = self.manifest.read(c2[0])
682
682
683 if use_dirstate:
683 if use_dirstate:
684 branchname = self.workingctx().branch()
684 branchname = self.workingctx().branch()
685 try:
685 try:
686 branchname = branchname.decode('UTF-8').encode('UTF-8')
686 branchname = branchname.decode('UTF-8').encode('UTF-8')
687 except UnicodeDecodeError:
687 except UnicodeDecodeError:
688 raise util.Abort(_('branch name not in UTF-8!'))
688 raise util.Abort(_('branch name not in UTF-8!'))
689 else:
689 else:
690 branchname = ""
690 branchname = ""
691
691
692 if use_dirstate:
692 if use_dirstate:
693 oldname = c1[5].get("branch", "") # stored in UTF-8
693 oldname = c1[5].get("branch", "") # stored in UTF-8
694 if not commit and not remove and not force and p2 == nullid and \
694 if not commit and not remove and not force and p2 == nullid and \
695 branchname == oldname:
695 branchname == oldname:
696 self.ui.status(_("nothing changed\n"))
696 self.ui.status(_("nothing changed\n"))
697 return None
697 return None
698
698
699 xp1 = hex(p1)
699 xp1 = hex(p1)
700 if p2 == nullid: xp2 = ''
700 if p2 == nullid: xp2 = ''
701 else: xp2 = hex(p2)
701 else: xp2 = hex(p2)
702
702
703 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
703 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
704
704
705 if not wlock:
705 if not wlock:
706 wlock = self.wlock()
706 wlock = self.wlock()
707 if not lock:
707 if not lock:
708 lock = self.lock()
708 lock = self.lock()
709 tr = self.transaction()
709 tr = self.transaction()
710
710
711 # check in files
711 # check in files
712 new = {}
712 new = {}
713 linkrev = self.changelog.count()
713 linkrev = self.changelog.count()
714 commit.sort()
714 commit.sort()
715 for f in commit:
715 for f in commit:
716 self.ui.note(f + "\n")
716 self.ui.note(f + "\n")
717 try:
717 try:
718 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
718 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
719 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
719 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
720 except IOError:
720 except IOError:
721 if use_dirstate:
721 if use_dirstate:
722 self.ui.warn(_("trouble committing %s!\n") % f)
722 self.ui.warn(_("trouble committing %s!\n") % f)
723 raise
723 raise
724 else:
724 else:
725 remove.append(f)
725 remove.append(f)
726
726
727 # update manifest
727 # update manifest
728 m1.update(new)
728 m1.update(new)
729 remove.sort()
729 remove.sort()
730
730
731 for f in remove:
731 for f in remove:
732 if f in m1:
732 if f in m1:
733 del m1[f]
733 del m1[f]
734 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, remove))
734 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, remove))
735
735
736 # add changeset
736 # add changeset
737 new = new.keys()
737 new = new.keys()
738 new.sort()
738 new.sort()
739
739
740 user = user or self.ui.username()
740 user = user or self.ui.username()
741 if not text or force_editor:
741 if not text or force_editor:
742 edittext = []
742 edittext = []
743 if text:
743 if text:
744 edittext.append(text)
744 edittext.append(text)
745 edittext.append("")
745 edittext.append("")
746 edittext.append("HG: user: %s" % user)
746 edittext.append("HG: user: %s" % user)
747 if p2 != nullid:
747 if p2 != nullid:
748 edittext.append("HG: branch merge")
748 edittext.append("HG: branch merge")
749 edittext.extend(["HG: changed %s" % f for f in changed])
749 edittext.extend(["HG: changed %s" % f for f in changed])
750 edittext.extend(["HG: removed %s" % f for f in remove])
750 edittext.extend(["HG: removed %s" % f for f in remove])
751 if not changed and not remove:
751 if not changed and not remove:
752 edittext.append("HG: no files changed")
752 edittext.append("HG: no files changed")
753 edittext.append("")
753 edittext.append("")
754 # run editor in the repository root
754 # run editor in the repository root
755 olddir = os.getcwd()
755 olddir = os.getcwd()
756 os.chdir(self.root)
756 os.chdir(self.root)
757 text = self.ui.edit("\n".join(edittext), user)
757 text = self.ui.edit("\n".join(edittext), user)
758 os.chdir(olddir)
758 os.chdir(olddir)
759
759
760 lines = [line.rstrip() for line in text.rstrip().splitlines()]
760 lines = [line.rstrip() for line in text.rstrip().splitlines()]
761 while lines and not lines[0]:
761 while lines and not lines[0]:
762 del lines[0]
762 del lines[0]
763 if not lines:
763 if not lines:
764 return None
764 return None
765 text = '\n'.join(lines)
765 text = '\n'.join(lines)
766 if branchname:
766 if branchname:
767 extra["branch"] = branchname
767 extra["branch"] = branchname
768 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2,
768 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2,
769 user, date, extra)
769 user, date, extra)
770 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
770 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
771 parent2=xp2)
771 parent2=xp2)
772 tr.close()
772 tr.close()
773
773
774 if use_dirstate or update_dirstate:
774 if use_dirstate or update_dirstate:
775 self.dirstate.setparents(n)
775 self.dirstate.setparents(n)
776 if use_dirstate:
776 if use_dirstate:
777 self.dirstate.update(new, "n")
777 self.dirstate.update(new, "n")
778 self.dirstate.forget(remove)
778 self.dirstate.forget(remove)
779
779
780 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
780 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
781 return n
781 return n
782
782
783 def walk(self, node=None, files=[], match=util.always, badmatch=None):
783 def walk(self, node=None, files=[], match=util.always, badmatch=None):
784 '''
784 '''
785 walk recursively through the directory tree or a given
785 walk recursively through the directory tree or a given
786 changeset, finding all files matched by the match
786 changeset, finding all files matched by the match
787 function
787 function
788
788
789 results are yielded in a tuple (src, filename), where src
789 results are yielded in a tuple (src, filename), where src
790 is one of:
790 is one of:
791 'f' the file was found in the directory tree
791 'f' the file was found in the directory tree
792 'm' the file was only in the dirstate and not in the tree
792 'm' the file was only in the dirstate and not in the tree
793 'b' file was not found and matched badmatch
793 'b' file was not found and matched badmatch
794 '''
794 '''
795
795
796 if node:
796 if node:
797 fdict = dict.fromkeys(files)
797 fdict = dict.fromkeys(files)
798 for fn in self.manifest.read(self.changelog.read(node)[0]):
798 for fn in self.manifest.read(self.changelog.read(node)[0]):
799 for ffn in fdict:
799 for ffn in fdict:
800 # match if the file is the exact name or a directory
800 # match if the file is the exact name or a directory
801 if ffn == fn or fn.startswith("%s/" % ffn):
801 if ffn == fn or fn.startswith("%s/" % ffn):
802 del fdict[ffn]
802 del fdict[ffn]
803 break
803 break
804 if match(fn):
804 if match(fn):
805 yield 'm', fn
805 yield 'm', fn
806 for fn in fdict:
806 for fn in fdict:
807 if badmatch and badmatch(fn):
807 if badmatch and badmatch(fn):
808 if match(fn):
808 if match(fn):
809 yield 'b', fn
809 yield 'b', fn
810 else:
810 else:
811 self.ui.warn(_('%s: No such file in rev %s\n') % (
811 self.ui.warn(_('%s: No such file in rev %s\n') % (
812 util.pathto(self.getcwd(), fn), short(node)))
812 util.pathto(self.getcwd(), fn), short(node)))
813 else:
813 else:
814 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
814 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
815 yield src, fn
815 yield src, fn
816
816
817 def status(self, node1=None, node2=None, files=[], match=util.always,
817 def status(self, node1=None, node2=None, files=[], match=util.always,
818 wlock=None, list_ignored=False, list_clean=False):
818 wlock=None, list_ignored=False, list_clean=False):
819 """return status of files between two nodes or node and working directory
819 """return status of files between two nodes or node and working directory
820
820
821 If node1 is None, use the first dirstate parent instead.
821 If node1 is None, use the first dirstate parent instead.
822 If node2 is None, compare node1 with working directory.
822 If node2 is None, compare node1 with working directory.
823 """
823 """
824
824
825 def fcmp(fn, mf):
825 def fcmp(fn, mf):
826 t1 = self.wread(fn)
826 t1 = self.wread(fn)
827 return self.file(fn).cmp(mf.get(fn, nullid), t1)
827 return self.file(fn).cmp(mf.get(fn, nullid), t1)
828
828
829 def mfmatches(node):
829 def mfmatches(node):
830 change = self.changelog.read(node)
830 change = self.changelog.read(node)
831 mf = self.manifest.read(change[0]).copy()
831 mf = self.manifest.read(change[0]).copy()
832 for fn in mf.keys():
832 for fn in mf.keys():
833 if not match(fn):
833 if not match(fn):
834 del mf[fn]
834 del mf[fn]
835 return mf
835 return mf
836
836
837 modified, added, removed, deleted, unknown = [], [], [], [], []
837 modified, added, removed, deleted, unknown = [], [], [], [], []
838 ignored, clean = [], []
838 ignored, clean = [], []
839
839
840 compareworking = False
840 compareworking = False
841 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
841 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
842 compareworking = True
842 compareworking = True
843
843
844 if not compareworking:
844 if not compareworking:
845 # read the manifest from node1 before the manifest from node2,
845 # read the manifest from node1 before the manifest from node2,
846 # so that we'll hit the manifest cache if we're going through
846 # so that we'll hit the manifest cache if we're going through
847 # all the revisions in parent->child order.
847 # all the revisions in parent->child order.
848 mf1 = mfmatches(node1)
848 mf1 = mfmatches(node1)
849
849
850 # are we comparing the working directory?
850 # are we comparing the working directory?
851 if not node2:
851 if not node2:
852 if not wlock:
852 if not wlock:
853 try:
853 try:
854 wlock = self.wlock(wait=0)
854 wlock = self.wlock(wait=0)
855 except lock.LockException:
855 except lock.LockException:
856 wlock = None
856 wlock = None
857 (lookup, modified, added, removed, deleted, unknown,
857 (lookup, modified, added, removed, deleted, unknown,
858 ignored, clean) = self.dirstate.status(files, match,
858 ignored, clean) = self.dirstate.status(files, match,
859 list_ignored, list_clean)
859 list_ignored, list_clean)
860
860
861 # are we comparing working dir against its parent?
861 # are we comparing working dir against its parent?
862 if compareworking:
862 if compareworking:
863 if lookup:
863 if lookup:
864 # do a full compare of any files that might have changed
864 # do a full compare of any files that might have changed
865 mf2 = mfmatches(self.dirstate.parents()[0])
865 mf2 = mfmatches(self.dirstate.parents()[0])
866 for f in lookup:
866 for f in lookup:
867 if fcmp(f, mf2):
867 if fcmp(f, mf2):
868 modified.append(f)
868 modified.append(f)
869 else:
869 else:
870 clean.append(f)
870 clean.append(f)
871 if wlock is not None:
871 if wlock is not None:
872 self.dirstate.update([f], "n")
872 self.dirstate.update([f], "n")
873 else:
873 else:
874 # we are comparing working dir against non-parent
874 # we are comparing working dir against non-parent
875 # generate a pseudo-manifest for the working dir
875 # generate a pseudo-manifest for the working dir
876 # XXX: create it in dirstate.py ?
876 # XXX: create it in dirstate.py ?
877 mf2 = mfmatches(self.dirstate.parents()[0])
877 mf2 = mfmatches(self.dirstate.parents()[0])
878 for f in lookup + modified + added:
878 for f in lookup + modified + added:
879 mf2[f] = ""
879 mf2[f] = ""
880 mf2.set(f, execf=util.is_exec(self.wjoin(f), mf2.execf(f)))
880 mf2.set(f, execf=util.is_exec(self.wjoin(f), mf2.execf(f)))
881 for f in removed:
881 for f in removed:
882 if f in mf2:
882 if f in mf2:
883 del mf2[f]
883 del mf2[f]
884 else:
884 else:
885 # we are comparing two revisions
885 # we are comparing two revisions
886 mf2 = mfmatches(node2)
886 mf2 = mfmatches(node2)
887
887
888 if not compareworking:
888 if not compareworking:
889 # flush lists from dirstate before comparing manifests
889 # flush lists from dirstate before comparing manifests
890 modified, added, clean = [], [], []
890 modified, added, clean = [], [], []
891
891
892 # make sure to sort the files so we talk to the disk in a
892 # make sure to sort the files so we talk to the disk in a
893 # reasonable order
893 # reasonable order
894 mf2keys = mf2.keys()
894 mf2keys = mf2.keys()
895 mf2keys.sort()
895 mf2keys.sort()
896 for fn in mf2keys:
896 for fn in mf2keys:
897 if mf1.has_key(fn):
897 if mf1.has_key(fn):
898 if mf1.flags(fn) != mf2.flags(fn) or \
898 if mf1.flags(fn) != mf2.flags(fn) or \
899 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
899 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
900 modified.append(fn)
900 modified.append(fn)
901 elif list_clean:
901 elif list_clean:
902 clean.append(fn)
902 clean.append(fn)
903 del mf1[fn]
903 del mf1[fn]
904 else:
904 else:
905 added.append(fn)
905 added.append(fn)
906
906
907 removed = mf1.keys()
907 removed = mf1.keys()
908
908
909 # sort and return results:
909 # sort and return results:
910 for l in modified, added, removed, deleted, unknown, ignored, clean:
910 for l in modified, added, removed, deleted, unknown, ignored, clean:
911 l.sort()
911 l.sort()
912 return (modified, added, removed, deleted, unknown, ignored, clean)
912 return (modified, added, removed, deleted, unknown, ignored, clean)
913
913
914 def add(self, list, wlock=None):
914 def add(self, list, wlock=None):
915 if not wlock:
915 if not wlock:
916 wlock = self.wlock()
916 wlock = self.wlock()
917 for f in list:
917 for f in list:
918 p = self.wjoin(f)
918 p = self.wjoin(f)
919 if not os.path.exists(p):
919 if not os.path.exists(p):
920 self.ui.warn(_("%s does not exist!\n") % f)
920 self.ui.warn(_("%s does not exist!\n") % f)
921 elif not os.path.isfile(p):
921 elif not os.path.isfile(p):
922 self.ui.warn(_("%s not added: only files supported currently\n")
922 self.ui.warn(_("%s not added: only files supported currently\n")
923 % f)
923 % f)
924 elif self.dirstate.state(f) in 'an':
924 elif self.dirstate.state(f) in 'an':
925 self.ui.warn(_("%s already tracked!\n") % f)
925 self.ui.warn(_("%s already tracked!\n") % f)
926 else:
926 else:
927 self.dirstate.update([f], "a")
927 self.dirstate.update([f], "a")
928
928
929 def forget(self, list, wlock=None):
929 def forget(self, list, wlock=None):
930 if not wlock:
930 if not wlock:
931 wlock = self.wlock()
931 wlock = self.wlock()
932 for f in list:
932 for f in list:
933 if self.dirstate.state(f) not in 'ai':
933 if self.dirstate.state(f) not in 'ai':
934 self.ui.warn(_("%s not added!\n") % f)
934 self.ui.warn(_("%s not added!\n") % f)
935 else:
935 else:
936 self.dirstate.forget([f])
936 self.dirstate.forget([f])
937
937
938 def remove(self, list, unlink=False, wlock=None):
938 def remove(self, list, unlink=False, wlock=None):
939 if unlink:
939 if unlink:
940 for f in list:
940 for f in list:
941 try:
941 try:
942 util.unlink(self.wjoin(f))
942 util.unlink(self.wjoin(f))
943 except OSError, inst:
943 except OSError, inst:
944 if inst.errno != errno.ENOENT:
944 if inst.errno != errno.ENOENT:
945 raise
945 raise
946 if not wlock:
946 if not wlock:
947 wlock = self.wlock()
947 wlock = self.wlock()
948 for f in list:
948 for f in list:
949 p = self.wjoin(f)
949 p = self.wjoin(f)
950 if os.path.exists(p):
950 if os.path.exists(p):
951 self.ui.warn(_("%s still exists!\n") % f)
951 self.ui.warn(_("%s still exists!\n") % f)
952 elif self.dirstate.state(f) == 'a':
952 elif self.dirstate.state(f) == 'a':
953 self.dirstate.forget([f])
953 self.dirstate.forget([f])
954 elif f not in self.dirstate:
954 elif f not in self.dirstate:
955 self.ui.warn(_("%s not tracked!\n") % f)
955 self.ui.warn(_("%s not tracked!\n") % f)
956 else:
956 else:
957 self.dirstate.update([f], "r")
957 self.dirstate.update([f], "r")
958
958
959 def undelete(self, list, wlock=None):
959 def undelete(self, list, wlock=None):
960 p = self.dirstate.parents()[0]
960 p = self.dirstate.parents()[0]
961 mn = self.changelog.read(p)[0]
961 mn = self.changelog.read(p)[0]
962 m = self.manifest.read(mn)
962 m = self.manifest.read(mn)
963 if not wlock:
963 if not wlock:
964 wlock = self.wlock()
964 wlock = self.wlock()
965 for f in list:
965 for f in list:
966 if self.dirstate.state(f) not in "r":
966 if self.dirstate.state(f) not in "r":
967 self.ui.warn("%s not removed!\n" % f)
967 self.ui.warn("%s not removed!\n" % f)
968 else:
968 else:
969 t = self.file(f).read(m[f])
969 t = self.file(f).read(m[f])
970 self.wwrite(f, t)
970 self.wwrite(f, t)
971 util.set_exec(self.wjoin(f), m.execf(f))
971 util.set_exec(self.wjoin(f), m.execf(f))
972 self.dirstate.update([f], "n")
972 self.dirstate.update([f], "n")
973
973
974 def copy(self, source, dest, wlock=None):
974 def copy(self, source, dest, wlock=None):
975 p = self.wjoin(dest)
975 p = self.wjoin(dest)
976 if not os.path.exists(p):
976 if not os.path.exists(p):
977 self.ui.warn(_("%s does not exist!\n") % dest)
977 self.ui.warn(_("%s does not exist!\n") % dest)
978 elif not os.path.isfile(p):
978 elif not os.path.isfile(p):
979 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
979 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
980 else:
980 else:
981 if not wlock:
981 if not wlock:
982 wlock = self.wlock()
982 wlock = self.wlock()
983 if self.dirstate.state(dest) == '?':
983 if self.dirstate.state(dest) == '?':
984 self.dirstate.update([dest], "a")
984 self.dirstate.update([dest], "a")
985 self.dirstate.copy(source, dest)
985 self.dirstate.copy(source, dest)
986
986
987 def heads(self, start=None):
987 def heads(self, start=None):
988 heads = self.changelog.heads(start)
988 heads = self.changelog.heads(start)
989 # sort the output in rev descending order
989 # sort the output in rev descending order
990 heads = [(-self.changelog.rev(h), h) for h in heads]
990 heads = [(-self.changelog.rev(h), h) for h in heads]
991 heads.sort()
991 heads.sort()
992 return [n for (r, n) in heads]
992 return [n for (r, n) in heads]
993
993
994 def branches(self, nodes):
994 def branches(self, nodes):
995 if not nodes:
995 if not nodes:
996 nodes = [self.changelog.tip()]
996 nodes = [self.changelog.tip()]
997 b = []
997 b = []
998 for n in nodes:
998 for n in nodes:
999 t = n
999 t = n
1000 while 1:
1000 while 1:
1001 p = self.changelog.parents(n)
1001 p = self.changelog.parents(n)
1002 if p[1] != nullid or p[0] == nullid:
1002 if p[1] != nullid or p[0] == nullid:
1003 b.append((t, n, p[0], p[1]))
1003 b.append((t, n, p[0], p[1]))
1004 break
1004 break
1005 n = p[0]
1005 n = p[0]
1006 return b
1006 return b
1007
1007
1008 def between(self, pairs):
1008 def between(self, pairs):
1009 r = []
1009 r = []
1010
1010
1011 for top, bottom in pairs:
1011 for top, bottom in pairs:
1012 n, l, i = top, [], 0
1012 n, l, i = top, [], 0
1013 f = 1
1013 f = 1
1014
1014
1015 while n != bottom:
1015 while n != bottom:
1016 p = self.changelog.parents(n)[0]
1016 p = self.changelog.parents(n)[0]
1017 if i == f:
1017 if i == f:
1018 l.append(n)
1018 l.append(n)
1019 f = f * 2
1019 f = f * 2
1020 n = p
1020 n = p
1021 i += 1
1021 i += 1
1022
1022
1023 r.append(l)
1023 r.append(l)
1024
1024
1025 return r
1025 return r
1026
1026
1027 def findincoming(self, remote, base=None, heads=None, force=False):
1027 def findincoming(self, remote, base=None, heads=None, force=False):
1028 """Return list of roots of the subsets of missing nodes from remote
1028 """Return list of roots of the subsets of missing nodes from remote
1029
1029
1030 If base dict is specified, assume that these nodes and their parents
1030 If base dict is specified, assume that these nodes and their parents
1031 exist on the remote side and that no child of a node of base exists
1031 exist on the remote side and that no child of a node of base exists
1032 in both remote and self.
1032 in both remote and self.
1033 Furthermore base will be updated to include the nodes that exists
1033 Furthermore base will be updated to include the nodes that exists
1034 in self and remote but no children exists in self and remote.
1034 in self and remote but no children exists in self and remote.
1035 If a list of heads is specified, return only nodes which are heads
1035 If a list of heads is specified, return only nodes which are heads
1036 or ancestors of these heads.
1036 or ancestors of these heads.
1037
1037
1038 All the ancestors of base are in self and in remote.
1038 All the ancestors of base are in self and in remote.
1039 All the descendants of the list returned are missing in self.
1039 All the descendants of the list returned are missing in self.
1040 (and so we know that the rest of the nodes are missing in remote, see
1040 (and so we know that the rest of the nodes are missing in remote, see
1041 outgoing)
1041 outgoing)
1042 """
1042 """
1043 m = self.changelog.nodemap
1043 m = self.changelog.nodemap
1044 search = []
1044 search = []
1045 fetch = {}
1045 fetch = {}
1046 seen = {}
1046 seen = {}
1047 seenbranch = {}
1047 seenbranch = {}
1048 if base == None:
1048 if base == None:
1049 base = {}
1049 base = {}
1050
1050
1051 if not heads:
1051 if not heads:
1052 heads = remote.heads()
1052 heads = remote.heads()
1053
1053
1054 if self.changelog.tip() == nullid:
1054 if self.changelog.tip() == nullid:
1055 base[nullid] = 1
1055 base[nullid] = 1
1056 if heads != [nullid]:
1056 if heads != [nullid]:
1057 return [nullid]
1057 return [nullid]
1058 return []
1058 return []
1059
1059
1060 # assume we're closer to the tip than the root
1060 # assume we're closer to the tip than the root
1061 # and start by examining the heads
1061 # and start by examining the heads
1062 self.ui.status(_("searching for changes\n"))
1062 self.ui.status(_("searching for changes\n"))
1063
1063
1064 unknown = []
1064 unknown = []
1065 for h in heads:
1065 for h in heads:
1066 if h not in m:
1066 if h not in m:
1067 unknown.append(h)
1067 unknown.append(h)
1068 else:
1068 else:
1069 base[h] = 1
1069 base[h] = 1
1070
1070
1071 if not unknown:
1071 if not unknown:
1072 return []
1072 return []
1073
1073
1074 req = dict.fromkeys(unknown)
1074 req = dict.fromkeys(unknown)
1075 reqcnt = 0
1075 reqcnt = 0
1076
1076
1077 # search through remote branches
1077 # search through remote branches
1078 # a 'branch' here is a linear segment of history, with four parts:
1078 # a 'branch' here is a linear segment of history, with four parts:
1079 # head, root, first parent, second parent
1079 # head, root, first parent, second parent
1080 # (a branch always has two parents (or none) by definition)
1080 # (a branch always has two parents (or none) by definition)
1081 unknown = remote.branches(unknown)
1081 unknown = remote.branches(unknown)
1082 while unknown:
1082 while unknown:
1083 r = []
1083 r = []
1084 while unknown:
1084 while unknown:
1085 n = unknown.pop(0)
1085 n = unknown.pop(0)
1086 if n[0] in seen:
1086 if n[0] in seen:
1087 continue
1087 continue
1088
1088
1089 self.ui.debug(_("examining %s:%s\n")
1089 self.ui.debug(_("examining %s:%s\n")
1090 % (short(n[0]), short(n[1])))
1090 % (short(n[0]), short(n[1])))
1091 if n[0] == nullid: # found the end of the branch
1091 if n[0] == nullid: # found the end of the branch
1092 pass
1092 pass
1093 elif n in seenbranch:
1093 elif n in seenbranch:
1094 self.ui.debug(_("branch already found\n"))
1094 self.ui.debug(_("branch already found\n"))
1095 continue
1095 continue
1096 elif n[1] and n[1] in m: # do we know the base?
1096 elif n[1] and n[1] in m: # do we know the base?
1097 self.ui.debug(_("found incomplete branch %s:%s\n")
1097 self.ui.debug(_("found incomplete branch %s:%s\n")
1098 % (short(n[0]), short(n[1])))
1098 % (short(n[0]), short(n[1])))
1099 search.append(n) # schedule branch range for scanning
1099 search.append(n) # schedule branch range for scanning
1100 seenbranch[n] = 1
1100 seenbranch[n] = 1
1101 else:
1101 else:
1102 if n[1] not in seen and n[1] not in fetch:
1102 if n[1] not in seen and n[1] not in fetch:
1103 if n[2] in m and n[3] in m:
1103 if n[2] in m and n[3] in m:
1104 self.ui.debug(_("found new changeset %s\n") %
1104 self.ui.debug(_("found new changeset %s\n") %
1105 short(n[1]))
1105 short(n[1]))
1106 fetch[n[1]] = 1 # earliest unknown
1106 fetch[n[1]] = 1 # earliest unknown
1107 for p in n[2:4]:
1107 for p in n[2:4]:
1108 if p in m:
1108 if p in m:
1109 base[p] = 1 # latest known
1109 base[p] = 1 # latest known
1110
1110
1111 for p in n[2:4]:
1111 for p in n[2:4]:
1112 if p not in req and p not in m:
1112 if p not in req and p not in m:
1113 r.append(p)
1113 r.append(p)
1114 req[p] = 1
1114 req[p] = 1
1115 seen[n[0]] = 1
1115 seen[n[0]] = 1
1116
1116
1117 if r:
1117 if r:
1118 reqcnt += 1
1118 reqcnt += 1
1119 self.ui.debug(_("request %d: %s\n") %
1119 self.ui.debug(_("request %d: %s\n") %
1120 (reqcnt, " ".join(map(short, r))))
1120 (reqcnt, " ".join(map(short, r))))
1121 for p in xrange(0, len(r), 10):
1121 for p in xrange(0, len(r), 10):
1122 for b in remote.branches(r[p:p+10]):
1122 for b in remote.branches(r[p:p+10]):
1123 self.ui.debug(_("received %s:%s\n") %
1123 self.ui.debug(_("received %s:%s\n") %
1124 (short(b[0]), short(b[1])))
1124 (short(b[0]), short(b[1])))
1125 unknown.append(b)
1125 unknown.append(b)
1126
1126
1127 # do binary search on the branches we found
1127 # do binary search on the branches we found
1128 while search:
1128 while search:
1129 n = search.pop(0)
1129 n = search.pop(0)
1130 reqcnt += 1
1130 reqcnt += 1
1131 l = remote.between([(n[0], n[1])])[0]
1131 l = remote.between([(n[0], n[1])])[0]
1132 l.append(n[1])
1132 l.append(n[1])
1133 p = n[0]
1133 p = n[0]
1134 f = 1
1134 f = 1
1135 for i in l:
1135 for i in l:
1136 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1136 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1137 if i in m:
1137 if i in m:
1138 if f <= 2:
1138 if f <= 2:
1139 self.ui.debug(_("found new branch changeset %s\n") %
1139 self.ui.debug(_("found new branch changeset %s\n") %
1140 short(p))
1140 short(p))
1141 fetch[p] = 1
1141 fetch[p] = 1
1142 base[i] = 1
1142 base[i] = 1
1143 else:
1143 else:
1144 self.ui.debug(_("narrowed branch search to %s:%s\n")
1144 self.ui.debug(_("narrowed branch search to %s:%s\n")
1145 % (short(p), short(i)))
1145 % (short(p), short(i)))
1146 search.append((p, i))
1146 search.append((p, i))
1147 break
1147 break
1148 p, f = i, f * 2
1148 p, f = i, f * 2
1149
1149
1150 # sanity check our fetch list
1150 # sanity check our fetch list
1151 for f in fetch.keys():
1151 for f in fetch.keys():
1152 if f in m:
1152 if f in m:
1153 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1153 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1154
1154
1155 if base.keys() == [nullid]:
1155 if base.keys() == [nullid]:
1156 if force:
1156 if force:
1157 self.ui.warn(_("warning: repository is unrelated\n"))
1157 self.ui.warn(_("warning: repository is unrelated\n"))
1158 else:
1158 else:
1159 raise util.Abort(_("repository is unrelated"))
1159 raise util.Abort(_("repository is unrelated"))
1160
1160
1161 self.ui.debug(_("found new changesets starting at ") +
1161 self.ui.debug(_("found new changesets starting at ") +
1162 " ".join([short(f) for f in fetch]) + "\n")
1162 " ".join([short(f) for f in fetch]) + "\n")
1163
1163
1164 self.ui.debug(_("%d total queries\n") % reqcnt)
1164 self.ui.debug(_("%d total queries\n") % reqcnt)
1165
1165
1166 return fetch.keys()
1166 return fetch.keys()
1167
1167
1168 def findoutgoing(self, remote, base=None, heads=None, force=False):
1168 def findoutgoing(self, remote, base=None, heads=None, force=False):
1169 """Return list of nodes that are roots of subsets not in remote
1169 """Return list of nodes that are roots of subsets not in remote
1170
1170
1171 If base dict is specified, assume that these nodes and their parents
1171 If base dict is specified, assume that these nodes and their parents
1172 exist on the remote side.
1172 exist on the remote side.
1173 If a list of heads is specified, return only nodes which are heads
1173 If a list of heads is specified, return only nodes which are heads
1174 or ancestors of these heads, and return a second element which
1174 or ancestors of these heads, and return a second element which
1175 contains all remote heads which get new children.
1175 contains all remote heads which get new children.
1176 """
1176 """
1177 if base == None:
1177 if base == None:
1178 base = {}
1178 base = {}
1179 self.findincoming(remote, base, heads, force=force)
1179 self.findincoming(remote, base, heads, force=force)
1180
1180
1181 self.ui.debug(_("common changesets up to ")
1181 self.ui.debug(_("common changesets up to ")
1182 + " ".join(map(short, base.keys())) + "\n")
1182 + " ".join(map(short, base.keys())) + "\n")
1183
1183
1184 remain = dict.fromkeys(self.changelog.nodemap)
1184 remain = dict.fromkeys(self.changelog.nodemap)
1185
1185
1186 # prune everything remote has from the tree
1186 # prune everything remote has from the tree
1187 del remain[nullid]
1187 del remain[nullid]
1188 remove = base.keys()
1188 remove = base.keys()
1189 while remove:
1189 while remove:
1190 n = remove.pop(0)
1190 n = remove.pop(0)
1191 if n in remain:
1191 if n in remain:
1192 del remain[n]
1192 del remain[n]
1193 for p in self.changelog.parents(n):
1193 for p in self.changelog.parents(n):
1194 remove.append(p)
1194 remove.append(p)
1195
1195
1196 # find every node whose parents have been pruned
1196 # find every node whose parents have been pruned
1197 subset = []
1197 subset = []
1198 # find every remote head that will get new children
1198 # find every remote head that will get new children
1199 updated_heads = {}
1199 updated_heads = {}
1200 for n in remain:
1200 for n in remain:
1201 p1, p2 = self.changelog.parents(n)
1201 p1, p2 = self.changelog.parents(n)
1202 if p1 not in remain and p2 not in remain:
1202 if p1 not in remain and p2 not in remain:
1203 subset.append(n)
1203 subset.append(n)
1204 if heads:
1204 if heads:
1205 if p1 in heads:
1205 if p1 in heads:
1206 updated_heads[p1] = True
1206 updated_heads[p1] = True
1207 if p2 in heads:
1207 if p2 in heads:
1208 updated_heads[p2] = True
1208 updated_heads[p2] = True
1209
1209
1210 # this is the set of all roots we have to push
1210 # this is the set of all roots we have to push
1211 if heads:
1211 if heads:
1212 return subset, updated_heads.keys()
1212 return subset, updated_heads.keys()
1213 else:
1213 else:
1214 return subset
1214 return subset
1215
1215
1216 def pull(self, remote, heads=None, force=False, lock=None):
1216 def pull(self, remote, heads=None, force=False, lock=None):
1217 mylock = False
1217 mylock = False
1218 if not lock:
1218 if not lock:
1219 lock = self.lock()
1219 lock = self.lock()
1220 mylock = True
1220 mylock = True
1221
1221
1222 try:
1222 try:
1223 fetch = self.findincoming(remote, force=force)
1223 fetch = self.findincoming(remote, force=force)
1224 if fetch == [nullid]:
1224 if fetch == [nullid]:
1225 self.ui.status(_("requesting all changes\n"))
1225 self.ui.status(_("requesting all changes\n"))
1226
1226
1227 if not fetch:
1227 if not fetch:
1228 self.ui.status(_("no changes found\n"))
1228 self.ui.status(_("no changes found\n"))
1229 return 0
1229 return 0
1230
1230
1231 if heads is None:
1231 if heads is None:
1232 cg = remote.changegroup(fetch, 'pull')
1232 cg = remote.changegroup(fetch, 'pull')
1233 else:
1233 else:
1234 if 'changegroupsubset' not in remote.capabilities:
1234 if 'changegroupsubset' not in remote.capabilities:
1235 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1235 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1236 cg = remote.changegroupsubset(fetch, heads, 'pull')
1236 cg = remote.changegroupsubset(fetch, heads, 'pull')
1237 return self.addchangegroup(cg, 'pull', remote.url())
1237 return self.addchangegroup(cg, 'pull', remote.url())
1238 finally:
1238 finally:
1239 if mylock:
1239 if mylock:
1240 lock.release()
1240 lock.release()
1241
1241
1242 def push(self, remote, force=False, revs=None):
1242 def push(self, remote, force=False, revs=None):
1243 # there are two ways to push to remote repo:
1243 # there are two ways to push to remote repo:
1244 #
1244 #
1245 # addchangegroup assumes local user can lock remote
1245 # addchangegroup assumes local user can lock remote
1246 # repo (local filesystem, old ssh servers).
1246 # repo (local filesystem, old ssh servers).
1247 #
1247 #
1248 # unbundle assumes local user cannot lock remote repo (new ssh
1248 # unbundle assumes local user cannot lock remote repo (new ssh
1249 # servers, http servers).
1249 # servers, http servers).
1250
1250
1251 if remote.capable('unbundle'):
1251 if remote.capable('unbundle'):
1252 return self.push_unbundle(remote, force, revs)
1252 return self.push_unbundle(remote, force, revs)
1253 return self.push_addchangegroup(remote, force, revs)
1253 return self.push_addchangegroup(remote, force, revs)
1254
1254
1255 def prepush(self, remote, force, revs):
1255 def prepush(self, remote, force, revs):
1256 base = {}
1256 base = {}
1257 remote_heads = remote.heads()
1257 remote_heads = remote.heads()
1258 inc = self.findincoming(remote, base, remote_heads, force=force)
1258 inc = self.findincoming(remote, base, remote_heads, force=force)
1259
1259
1260 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1260 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1261 if revs is not None:
1261 if revs is not None:
1262 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1262 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1263 else:
1263 else:
1264 bases, heads = update, self.changelog.heads()
1264 bases, heads = update, self.changelog.heads()
1265
1265
1266 if not bases:
1266 if not bases:
1267 self.ui.status(_("no changes found\n"))
1267 self.ui.status(_("no changes found\n"))
1268 return None, 1
1268 return None, 1
1269 elif not force:
1269 elif not force:
1270 # check if we're creating new remote heads
1270 # check if we're creating new remote heads
1271 # to be a remote head after push, node must be either
1271 # to be a remote head after push, node must be either
1272 # - unknown locally
1272 # - unknown locally
1273 # - a local outgoing head descended from update
1273 # - a local outgoing head descended from update
1274 # - a remote head that's known locally and not
1274 # - a remote head that's known locally and not
1275 # ancestral to an outgoing head
1275 # ancestral to an outgoing head
1276
1276
1277 warn = 0
1277 warn = 0
1278
1278
1279 if remote_heads == [nullid]:
1279 if remote_heads == [nullid]:
1280 warn = 0
1280 warn = 0
1281 elif not revs and len(heads) > len(remote_heads):
1281 elif not revs and len(heads) > len(remote_heads):
1282 warn = 1
1282 warn = 1
1283 else:
1283 else:
1284 newheads = list(heads)
1284 newheads = list(heads)
1285 for r in remote_heads:
1285 for r in remote_heads:
1286 if r in self.changelog.nodemap:
1286 if r in self.changelog.nodemap:
1287 desc = self.changelog.heads(r, heads)
1287 desc = self.changelog.heads(r, heads)
1288 l = [h for h in heads if h in desc]
1288 l = [h for h in heads if h in desc]
1289 if not l:
1289 if not l:
1290 newheads.append(r)
1290 newheads.append(r)
1291 else:
1291 else:
1292 newheads.append(r)
1292 newheads.append(r)
1293 if len(newheads) > len(remote_heads):
1293 if len(newheads) > len(remote_heads):
1294 warn = 1
1294 warn = 1
1295
1295
1296 if warn:
1296 if warn:
1297 self.ui.warn(_("abort: push creates new remote branches!\n"))
1297 self.ui.warn(_("abort: push creates new remote branches!\n"))
1298 self.ui.status(_("(did you forget to merge?"
1298 self.ui.status(_("(did you forget to merge?"
1299 " use push -f to force)\n"))
1299 " use push -f to force)\n"))
1300 return None, 1
1300 return None, 1
1301 elif inc:
1301 elif inc:
1302 self.ui.warn(_("note: unsynced remote changes!\n"))
1302 self.ui.warn(_("note: unsynced remote changes!\n"))
1303
1303
1304
1304
1305 if revs is None:
1305 if revs is None:
1306 cg = self.changegroup(update, 'push')
1306 cg = self.changegroup(update, 'push')
1307 else:
1307 else:
1308 cg = self.changegroupsubset(update, revs, 'push')
1308 cg = self.changegroupsubset(update, revs, 'push')
1309 return cg, remote_heads
1309 return cg, remote_heads
1310
1310
1311 def push_addchangegroup(self, remote, force, revs):
1311 def push_addchangegroup(self, remote, force, revs):
1312 lock = remote.lock()
1312 lock = remote.lock()
1313
1313
1314 ret = self.prepush(remote, force, revs)
1314 ret = self.prepush(remote, force, revs)
1315 if ret[0] is not None:
1315 if ret[0] is not None:
1316 cg, remote_heads = ret
1316 cg, remote_heads = ret
1317 return remote.addchangegroup(cg, 'push', self.url())
1317 return remote.addchangegroup(cg, 'push', self.url())
1318 return ret[1]
1318 return ret[1]
1319
1319
1320 def push_unbundle(self, remote, force, revs):
1320 def push_unbundle(self, remote, force, revs):
1321 # local repo finds heads on server, finds out what revs it
1321 # local repo finds heads on server, finds out what revs it
1322 # must push. once revs transferred, if server finds it has
1322 # must push. once revs transferred, if server finds it has
1323 # different heads (someone else won commit/push race), server
1323 # different heads (someone else won commit/push race), server
1324 # aborts.
1324 # aborts.
1325
1325
1326 ret = self.prepush(remote, force, revs)
1326 ret = self.prepush(remote, force, revs)
1327 if ret[0] is not None:
1327 if ret[0] is not None:
1328 cg, remote_heads = ret
1328 cg, remote_heads = ret
1329 if force: remote_heads = ['force']
1329 if force: remote_heads = ['force']
1330 return remote.unbundle(cg, remote_heads, 'push')
1330 return remote.unbundle(cg, remote_heads, 'push')
1331 return ret[1]
1331 return ret[1]
1332
1332
1333 def changegroupinfo(self, nodes):
1333 def changegroupinfo(self, nodes):
1334 self.ui.note(_("%d changesets found\n") % len(nodes))
1334 self.ui.note(_("%d changesets found\n") % len(nodes))
1335 if self.ui.debugflag:
1335 if self.ui.debugflag:
1336 self.ui.debug(_("List of changesets:\n"))
1336 self.ui.debug(_("List of changesets:\n"))
1337 for node in nodes:
1337 for node in nodes:
1338 self.ui.debug("%s\n" % hex(node))
1338 self.ui.debug("%s\n" % hex(node))
1339
1339
1340 def changegroupsubset(self, bases, heads, source):
1340 def changegroupsubset(self, bases, heads, source):
1341 """This function generates a changegroup consisting of all the nodes
1341 """This function generates a changegroup consisting of all the nodes
1342 that are descendents of any of the bases, and ancestors of any of
1342 that are descendents of any of the bases, and ancestors of any of
1343 the heads.
1343 the heads.
1344
1344
1345 It is fairly complex as determining which filenodes and which
1345 It is fairly complex as determining which filenodes and which
1346 manifest nodes need to be included for the changeset to be complete
1346 manifest nodes need to be included for the changeset to be complete
1347 is non-trivial.
1347 is non-trivial.
1348
1348
1349 Another wrinkle is doing the reverse, figuring out which changeset in
1349 Another wrinkle is doing the reverse, figuring out which changeset in
1350 the changegroup a particular filenode or manifestnode belongs to."""
1350 the changegroup a particular filenode or manifestnode belongs to."""
1351
1351
1352 self.hook('preoutgoing', throw=True, source=source)
1352 self.hook('preoutgoing', throw=True, source=source)
1353
1353
1354 # Set up some initial variables
1354 # Set up some initial variables
1355 # Make it easy to refer to self.changelog
1355 # Make it easy to refer to self.changelog
1356 cl = self.changelog
1356 cl = self.changelog
1357 # msng is short for missing - compute the list of changesets in this
1357 # msng is short for missing - compute the list of changesets in this
1358 # changegroup.
1358 # changegroup.
1359 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1359 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1360 self.changegroupinfo(msng_cl_lst)
1360 self.changegroupinfo(msng_cl_lst)
1361 # Some bases may turn out to be superfluous, and some heads may be
1361 # Some bases may turn out to be superfluous, and some heads may be
1362 # too. nodesbetween will return the minimal set of bases and heads
1362 # too. nodesbetween will return the minimal set of bases and heads
1363 # necessary to re-create the changegroup.
1363 # necessary to re-create the changegroup.
1364
1364
1365 # Known heads are the list of heads that it is assumed the recipient
1365 # Known heads are the list of heads that it is assumed the recipient
1366 # of this changegroup will know about.
1366 # of this changegroup will know about.
1367 knownheads = {}
1367 knownheads = {}
1368 # We assume that all parents of bases are known heads.
1368 # We assume that all parents of bases are known heads.
1369 for n in bases:
1369 for n in bases:
1370 for p in cl.parents(n):
1370 for p in cl.parents(n):
1371 if p != nullid:
1371 if p != nullid:
1372 knownheads[p] = 1
1372 knownheads[p] = 1
1373 knownheads = knownheads.keys()
1373 knownheads = knownheads.keys()
1374 if knownheads:
1374 if knownheads:
1375 # Now that we know what heads are known, we can compute which
1375 # Now that we know what heads are known, we can compute which
1376 # changesets are known. The recipient must know about all
1376 # changesets are known. The recipient must know about all
1377 # changesets required to reach the known heads from the null
1377 # changesets required to reach the known heads from the null
1378 # changeset.
1378 # changeset.
1379 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1379 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1380 junk = None
1380 junk = None
1381 # Transform the list into an ersatz set.
1381 # Transform the list into an ersatz set.
1382 has_cl_set = dict.fromkeys(has_cl_set)
1382 has_cl_set = dict.fromkeys(has_cl_set)
1383 else:
1383 else:
1384 # If there were no known heads, the recipient cannot be assumed to
1384 # If there were no known heads, the recipient cannot be assumed to
1385 # know about any changesets.
1385 # know about any changesets.
1386 has_cl_set = {}
1386 has_cl_set = {}
1387
1387
1388 # Make it easy to refer to self.manifest
1388 # Make it easy to refer to self.manifest
1389 mnfst = self.manifest
1389 mnfst = self.manifest
1390 # We don't know which manifests are missing yet
1390 # We don't know which manifests are missing yet
1391 msng_mnfst_set = {}
1391 msng_mnfst_set = {}
1392 # Nor do we know which filenodes are missing.
1392 # Nor do we know which filenodes are missing.
1393 msng_filenode_set = {}
1393 msng_filenode_set = {}
1394
1394
1395 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1395 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1396 junk = None
1396 junk = None
1397
1397
1398 # A changeset always belongs to itself, so the changenode lookup
1398 # A changeset always belongs to itself, so the changenode lookup
1399 # function for a changenode is identity.
1399 # function for a changenode is identity.
1400 def identity(x):
1400 def identity(x):
1401 return x
1401 return x
1402
1402
1403 # A function generating function. Sets up an environment for the
1403 # A function generating function. Sets up an environment for the
1404 # inner function.
1404 # inner function.
1405 def cmp_by_rev_func(revlog):
1405 def cmp_by_rev_func(revlog):
1406 # Compare two nodes by their revision number in the environment's
1406 # Compare two nodes by their revision number in the environment's
1407 # revision history. Since the revision number both represents the
1407 # revision history. Since the revision number both represents the
1408 # most efficient order to read the nodes in, and represents a
1408 # most efficient order to read the nodes in, and represents a
1409 # topological sorting of the nodes, this function is often useful.
1409 # topological sorting of the nodes, this function is often useful.
1410 def cmp_by_rev(a, b):
1410 def cmp_by_rev(a, b):
1411 return cmp(revlog.rev(a), revlog.rev(b))
1411 return cmp(revlog.rev(a), revlog.rev(b))
1412 return cmp_by_rev
1412 return cmp_by_rev
1413
1413
1414 # If we determine that a particular file or manifest node must be a
1414 # If we determine that a particular file or manifest node must be a
1415 # node that the recipient of the changegroup will already have, we can
1415 # node that the recipient of the changegroup will already have, we can
1416 # also assume the recipient will have all the parents. This function
1416 # also assume the recipient will have all the parents. This function
1417 # prunes them from the set of missing nodes.
1417 # prunes them from the set of missing nodes.
1418 def prune_parents(revlog, hasset, msngset):
1418 def prune_parents(revlog, hasset, msngset):
1419 haslst = hasset.keys()
1419 haslst = hasset.keys()
1420 haslst.sort(cmp_by_rev_func(revlog))
1420 haslst.sort(cmp_by_rev_func(revlog))
1421 for node in haslst:
1421 for node in haslst:
1422 parentlst = [p for p in revlog.parents(node) if p != nullid]
1422 parentlst = [p for p in revlog.parents(node) if p != nullid]
1423 while parentlst:
1423 while parentlst:
1424 n = parentlst.pop()
1424 n = parentlst.pop()
1425 if n not in hasset:
1425 if n not in hasset:
1426 hasset[n] = 1
1426 hasset[n] = 1
1427 p = [p for p in revlog.parents(n) if p != nullid]
1427 p = [p for p in revlog.parents(n) if p != nullid]
1428 parentlst.extend(p)
1428 parentlst.extend(p)
1429 for n in hasset:
1429 for n in hasset:
1430 msngset.pop(n, None)
1430 msngset.pop(n, None)
1431
1431
1432 # This is a function generating function used to set up an environment
1432 # This is a function generating function used to set up an environment
1433 # for the inner function to execute in.
1433 # for the inner function to execute in.
1434 def manifest_and_file_collector(changedfileset):
1434 def manifest_and_file_collector(changedfileset):
1435 # This is an information gathering function that gathers
1435 # This is an information gathering function that gathers
1436 # information from each changeset node that goes out as part of
1436 # information from each changeset node that goes out as part of
1437 # the changegroup. The information gathered is a list of which
1437 # the changegroup. The information gathered is a list of which
1438 # manifest nodes are potentially required (the recipient may
1438 # manifest nodes are potentially required (the recipient may
1439 # already have them) and total list of all files which were
1439 # already have them) and total list of all files which were
1440 # changed in any changeset in the changegroup.
1440 # changed in any changeset in the changegroup.
1441 #
1441 #
1442 # We also remember the first changenode we saw any manifest
1442 # We also remember the first changenode we saw any manifest
1443 # referenced by so we can later determine which changenode 'owns'
1443 # referenced by so we can later determine which changenode 'owns'
1444 # the manifest.
1444 # the manifest.
1445 def collect_manifests_and_files(clnode):
1445 def collect_manifests_and_files(clnode):
1446 c = cl.read(clnode)
1446 c = cl.read(clnode)
1447 for f in c[3]:
1447 for f in c[3]:
1448 # This is to make sure we only have one instance of each
1448 # This is to make sure we only have one instance of each
1449 # filename string for each filename.
1449 # filename string for each filename.
1450 changedfileset.setdefault(f, f)
1450 changedfileset.setdefault(f, f)
1451 msng_mnfst_set.setdefault(c[0], clnode)
1451 msng_mnfst_set.setdefault(c[0], clnode)
1452 return collect_manifests_and_files
1452 return collect_manifests_and_files
1453
1453
1454 # Figure out which manifest nodes (of the ones we think might be part
1454 # Figure out which manifest nodes (of the ones we think might be part
1455 # of the changegroup) the recipient must know about and remove them
1455 # of the changegroup) the recipient must know about and remove them
1456 # from the changegroup.
1456 # from the changegroup.
1457 def prune_manifests():
1457 def prune_manifests():
1458 has_mnfst_set = {}
1458 has_mnfst_set = {}
1459 for n in msng_mnfst_set:
1459 for n in msng_mnfst_set:
1460 # If a 'missing' manifest thinks it belongs to a changenode
1460 # If a 'missing' manifest thinks it belongs to a changenode
1461 # the recipient is assumed to have, obviously the recipient
1461 # the recipient is assumed to have, obviously the recipient
1462 # must have that manifest.
1462 # must have that manifest.
1463 linknode = cl.node(mnfst.linkrev(n))
1463 linknode = cl.node(mnfst.linkrev(n))
1464 if linknode in has_cl_set:
1464 if linknode in has_cl_set:
1465 has_mnfst_set[n] = 1
1465 has_mnfst_set[n] = 1
1466 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1466 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1467
1467
1468 # Use the information collected in collect_manifests_and_files to say
1468 # Use the information collected in collect_manifests_and_files to say
1469 # which changenode any manifestnode belongs to.
1469 # which changenode any manifestnode belongs to.
1470 def lookup_manifest_link(mnfstnode):
1470 def lookup_manifest_link(mnfstnode):
1471 return msng_mnfst_set[mnfstnode]
1471 return msng_mnfst_set[mnfstnode]
1472
1472
1473 # A function generating function that sets up the initial environment
1473 # A function generating function that sets up the initial environment
1474 # the inner function.
1474 # the inner function.
1475 def filenode_collector(changedfiles):
1475 def filenode_collector(changedfiles):
1476 next_rev = [0]
1476 next_rev = [0]
1477 # This gathers information from each manifestnode included in the
1477 # This gathers information from each manifestnode included in the
1478 # changegroup about which filenodes the manifest node references
1478 # changegroup about which filenodes the manifest node references
1479 # so we can include those in the changegroup too.
1479 # so we can include those in the changegroup too.
1480 #
1480 #
1481 # It also remembers which changenode each filenode belongs to. It
1481 # It also remembers which changenode each filenode belongs to. It
1482 # does this by assuming the a filenode belongs to the changenode
1482 # does this by assuming the a filenode belongs to the changenode
1483 # the first manifest that references it belongs to.
1483 # the first manifest that references it belongs to.
1484 def collect_msng_filenodes(mnfstnode):
1484 def collect_msng_filenodes(mnfstnode):
1485 r = mnfst.rev(mnfstnode)
1485 r = mnfst.rev(mnfstnode)
1486 if r == next_rev[0]:
1486 if r == next_rev[0]:
1487 # If the last rev we looked at was the one just previous,
1487 # If the last rev we looked at was the one just previous,
1488 # we only need to see a diff.
1488 # we only need to see a diff.
1489 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1489 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1490 # For each line in the delta
1490 # For each line in the delta
1491 for dline in delta.splitlines():
1491 for dline in delta.splitlines():
1492 # get the filename and filenode for that line
1492 # get the filename and filenode for that line
1493 f, fnode = dline.split('\0')
1493 f, fnode = dline.split('\0')
1494 fnode = bin(fnode[:40])
1494 fnode = bin(fnode[:40])
1495 f = changedfiles.get(f, None)
1495 f = changedfiles.get(f, None)
1496 # And if the file is in the list of files we care
1496 # And if the file is in the list of files we care
1497 # about.
1497 # about.
1498 if f is not None:
1498 if f is not None:
1499 # Get the changenode this manifest belongs to
1499 # Get the changenode this manifest belongs to
1500 clnode = msng_mnfst_set[mnfstnode]
1500 clnode = msng_mnfst_set[mnfstnode]
1501 # Create the set of filenodes for the file if
1501 # Create the set of filenodes for the file if
1502 # there isn't one already.
1502 # there isn't one already.
1503 ndset = msng_filenode_set.setdefault(f, {})
1503 ndset = msng_filenode_set.setdefault(f, {})
1504 # And set the filenode's changelog node to the
1504 # And set the filenode's changelog node to the
1505 # manifest's if it hasn't been set already.
1505 # manifest's if it hasn't been set already.
1506 ndset.setdefault(fnode, clnode)
1506 ndset.setdefault(fnode, clnode)
1507 else:
1507 else:
1508 # Otherwise we need a full manifest.
1508 # Otherwise we need a full manifest.
1509 m = mnfst.read(mnfstnode)
1509 m = mnfst.read(mnfstnode)
1510 # For every file in we care about.
1510 # For every file in we care about.
1511 for f in changedfiles:
1511 for f in changedfiles:
1512 fnode = m.get(f, None)
1512 fnode = m.get(f, None)
1513 # If it's in the manifest
1513 # If it's in the manifest
1514 if fnode is not None:
1514 if fnode is not None:
1515 # See comments above.
1515 # See comments above.
1516 clnode = msng_mnfst_set[mnfstnode]
1516 clnode = msng_mnfst_set[mnfstnode]
1517 ndset = msng_filenode_set.setdefault(f, {})
1517 ndset = msng_filenode_set.setdefault(f, {})
1518 ndset.setdefault(fnode, clnode)
1518 ndset.setdefault(fnode, clnode)
1519 # Remember the revision we hope to see next.
1519 # Remember the revision we hope to see next.
1520 next_rev[0] = r + 1
1520 next_rev[0] = r + 1
1521 return collect_msng_filenodes
1521 return collect_msng_filenodes
1522
1522
1523 # We have a list of filenodes we think we need for a file, lets remove
1523 # We have a list of filenodes we think we need for a file, lets remove
1524 # all those we now the recipient must have.
1524 # all those we now the recipient must have.
1525 def prune_filenodes(f, filerevlog):
1525 def prune_filenodes(f, filerevlog):
1526 msngset = msng_filenode_set[f]
1526 msngset = msng_filenode_set[f]
1527 hasset = {}
1527 hasset = {}
1528 # If a 'missing' filenode thinks it belongs to a changenode we
1528 # If a 'missing' filenode thinks it belongs to a changenode we
1529 # assume the recipient must have, then the recipient must have
1529 # assume the recipient must have, then the recipient must have
1530 # that filenode.
1530 # that filenode.
1531 for n in msngset:
1531 for n in msngset:
1532 clnode = cl.node(filerevlog.linkrev(n))
1532 clnode = cl.node(filerevlog.linkrev(n))
1533 if clnode in has_cl_set:
1533 if clnode in has_cl_set:
1534 hasset[n] = 1
1534 hasset[n] = 1
1535 prune_parents(filerevlog, hasset, msngset)
1535 prune_parents(filerevlog, hasset, msngset)
1536
1536
1537 # A function generator function that sets up the a context for the
1537 # A function generator function that sets up the a context for the
1538 # inner function.
1538 # inner function.
1539 def lookup_filenode_link_func(fname):
1539 def lookup_filenode_link_func(fname):
1540 msngset = msng_filenode_set[fname]
1540 msngset = msng_filenode_set[fname]
1541 # Lookup the changenode the filenode belongs to.
1541 # Lookup the changenode the filenode belongs to.
1542 def lookup_filenode_link(fnode):
1542 def lookup_filenode_link(fnode):
1543 return msngset[fnode]
1543 return msngset[fnode]
1544 return lookup_filenode_link
1544 return lookup_filenode_link
1545
1545
1546 # Now that we have all theses utility functions to help out and
1546 # Now that we have all theses utility functions to help out and
1547 # logically divide up the task, generate the group.
1547 # logically divide up the task, generate the group.
1548 def gengroup():
1548 def gengroup():
1549 # The set of changed files starts empty.
1549 # The set of changed files starts empty.
1550 changedfiles = {}
1550 changedfiles = {}
1551 # Create a changenode group generator that will call our functions
1551 # Create a changenode group generator that will call our functions
1552 # back to lookup the owning changenode and collect information.
1552 # back to lookup the owning changenode and collect information.
1553 group = cl.group(msng_cl_lst, identity,
1553 group = cl.group(msng_cl_lst, identity,
1554 manifest_and_file_collector(changedfiles))
1554 manifest_and_file_collector(changedfiles))
1555 for chnk in group:
1555 for chnk in group:
1556 yield chnk
1556 yield chnk
1557
1557
1558 # The list of manifests has been collected by the generator
1558 # The list of manifests has been collected by the generator
1559 # calling our functions back.
1559 # calling our functions back.
1560 prune_manifests()
1560 prune_manifests()
1561 msng_mnfst_lst = msng_mnfst_set.keys()
1561 msng_mnfst_lst = msng_mnfst_set.keys()
1562 # Sort the manifestnodes by revision number.
1562 # Sort the manifestnodes by revision number.
1563 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1563 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1564 # Create a generator for the manifestnodes that calls our lookup
1564 # Create a generator for the manifestnodes that calls our lookup
1565 # and data collection functions back.
1565 # and data collection functions back.
1566 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1566 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1567 filenode_collector(changedfiles))
1567 filenode_collector(changedfiles))
1568 for chnk in group:
1568 for chnk in group:
1569 yield chnk
1569 yield chnk
1570
1570
1571 # These are no longer needed, dereference and toss the memory for
1571 # These are no longer needed, dereference and toss the memory for
1572 # them.
1572 # them.
1573 msng_mnfst_lst = None
1573 msng_mnfst_lst = None
1574 msng_mnfst_set.clear()
1574 msng_mnfst_set.clear()
1575
1575
1576 changedfiles = changedfiles.keys()
1576 changedfiles = changedfiles.keys()
1577 changedfiles.sort()
1577 changedfiles.sort()
1578 # Go through all our files in order sorted by name.
1578 # Go through all our files in order sorted by name.
1579 for fname in changedfiles:
1579 for fname in changedfiles:
1580 filerevlog = self.file(fname)
1580 filerevlog = self.file(fname)
1581 # Toss out the filenodes that the recipient isn't really
1581 # Toss out the filenodes that the recipient isn't really
1582 # missing.
1582 # missing.
1583 if msng_filenode_set.has_key(fname):
1583 if msng_filenode_set.has_key(fname):
1584 prune_filenodes(fname, filerevlog)
1584 prune_filenodes(fname, filerevlog)
1585 msng_filenode_lst = msng_filenode_set[fname].keys()
1585 msng_filenode_lst = msng_filenode_set[fname].keys()
1586 else:
1586 else:
1587 msng_filenode_lst = []
1587 msng_filenode_lst = []
1588 # If any filenodes are left, generate the group for them,
1588 # If any filenodes are left, generate the group for them,
1589 # otherwise don't bother.
1589 # otherwise don't bother.
1590 if len(msng_filenode_lst) > 0:
1590 if len(msng_filenode_lst) > 0:
1591 yield changegroup.genchunk(fname)
1591 yield changegroup.genchunk(fname)
1592 # Sort the filenodes by their revision #
1592 # Sort the filenodes by their revision #
1593 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1593 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1594 # Create a group generator and only pass in a changenode
1594 # Create a group generator and only pass in a changenode
1595 # lookup function as we need to collect no information
1595 # lookup function as we need to collect no information
1596 # from filenodes.
1596 # from filenodes.
1597 group = filerevlog.group(msng_filenode_lst,
1597 group = filerevlog.group(msng_filenode_lst,
1598 lookup_filenode_link_func(fname))
1598 lookup_filenode_link_func(fname))
1599 for chnk in group:
1599 for chnk in group:
1600 yield chnk
1600 yield chnk
1601 if msng_filenode_set.has_key(fname):
1601 if msng_filenode_set.has_key(fname):
1602 # Don't need this anymore, toss it to free memory.
1602 # Don't need this anymore, toss it to free memory.
1603 del msng_filenode_set[fname]
1603 del msng_filenode_set[fname]
1604 # Signal that no more groups are left.
1604 # Signal that no more groups are left.
1605 yield changegroup.closechunk()
1605 yield changegroup.closechunk()
1606
1606
1607 if msng_cl_lst:
1607 if msng_cl_lst:
1608 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1608 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1609
1609
1610 return util.chunkbuffer(gengroup())
1610 return util.chunkbuffer(gengroup())
1611
1611
1612 def changegroup(self, basenodes, source):
1612 def changegroup(self, basenodes, source):
1613 """Generate a changegroup of all nodes that we have that a recipient
1613 """Generate a changegroup of all nodes that we have that a recipient
1614 doesn't.
1614 doesn't.
1615
1615
1616 This is much easier than the previous function as we can assume that
1616 This is much easier than the previous function as we can assume that
1617 the recipient has any changenode we aren't sending them."""
1617 the recipient has any changenode we aren't sending them."""
1618
1618
1619 self.hook('preoutgoing', throw=True, source=source)
1619 self.hook('preoutgoing', throw=True, source=source)
1620
1620
1621 cl = self.changelog
1621 cl = self.changelog
1622 nodes = cl.nodesbetween(basenodes, None)[0]
1622 nodes = cl.nodesbetween(basenodes, None)[0]
1623 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1623 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1624 self.changegroupinfo(nodes)
1624 self.changegroupinfo(nodes)
1625
1625
1626 def identity(x):
1626 def identity(x):
1627 return x
1627 return x
1628
1628
1629 def gennodelst(revlog):
1629 def gennodelst(revlog):
1630 for r in xrange(0, revlog.count()):
1630 for r in xrange(0, revlog.count()):
1631 n = revlog.node(r)
1631 n = revlog.node(r)
1632 if revlog.linkrev(n) in revset:
1632 if revlog.linkrev(n) in revset:
1633 yield n
1633 yield n
1634
1634
1635 def changed_file_collector(changedfileset):
1635 def changed_file_collector(changedfileset):
1636 def collect_changed_files(clnode):
1636 def collect_changed_files(clnode):
1637 c = cl.read(clnode)
1637 c = cl.read(clnode)
1638 for fname in c[3]:
1638 for fname in c[3]:
1639 changedfileset[fname] = 1
1639 changedfileset[fname] = 1
1640 return collect_changed_files
1640 return collect_changed_files
1641
1641
1642 def lookuprevlink_func(revlog):
1642 def lookuprevlink_func(revlog):
1643 def lookuprevlink(n):
1643 def lookuprevlink(n):
1644 return cl.node(revlog.linkrev(n))
1644 return cl.node(revlog.linkrev(n))
1645 return lookuprevlink
1645 return lookuprevlink
1646
1646
1647 def gengroup():
1647 def gengroup():
1648 # construct a list of all changed files
1648 # construct a list of all changed files
1649 changedfiles = {}
1649 changedfiles = {}
1650
1650
1651 for chnk in cl.group(nodes, identity,
1651 for chnk in cl.group(nodes, identity,
1652 changed_file_collector(changedfiles)):
1652 changed_file_collector(changedfiles)):
1653 yield chnk
1653 yield chnk
1654 changedfiles = changedfiles.keys()
1654 changedfiles = changedfiles.keys()
1655 changedfiles.sort()
1655 changedfiles.sort()
1656
1656
1657 mnfst = self.manifest
1657 mnfst = self.manifest
1658 nodeiter = gennodelst(mnfst)
1658 nodeiter = gennodelst(mnfst)
1659 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1659 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1660 yield chnk
1660 yield chnk
1661
1661
1662 for fname in changedfiles:
1662 for fname in changedfiles:
1663 filerevlog = self.file(fname)
1663 filerevlog = self.file(fname)
1664 nodeiter = gennodelst(filerevlog)
1664 nodeiter = gennodelst(filerevlog)
1665 nodeiter = list(nodeiter)
1665 nodeiter = list(nodeiter)
1666 if nodeiter:
1666 if nodeiter:
1667 yield changegroup.genchunk(fname)
1667 yield changegroup.genchunk(fname)
1668 lookup = lookuprevlink_func(filerevlog)
1668 lookup = lookuprevlink_func(filerevlog)
1669 for chnk in filerevlog.group(nodeiter, lookup):
1669 for chnk in filerevlog.group(nodeiter, lookup):
1670 yield chnk
1670 yield chnk
1671
1671
1672 yield changegroup.closechunk()
1672 yield changegroup.closechunk()
1673
1673
1674 if nodes:
1674 if nodes:
1675 self.hook('outgoing', node=hex(nodes[0]), source=source)
1675 self.hook('outgoing', node=hex(nodes[0]), source=source)
1676
1676
1677 return util.chunkbuffer(gengroup())
1677 return util.chunkbuffer(gengroup())
1678
1678
1679 def addchangegroup(self, source, srctype, url):
1679 def addchangegroup(self, source, srctype, url):
1680 """add changegroup to repo.
1680 """add changegroup to repo.
1681
1681
1682 return values:
1682 return values:
1683 - nothing changed or no source: 0
1683 - nothing changed or no source: 0
1684 - more heads than before: 1+added heads (2..n)
1684 - more heads than before: 1+added heads (2..n)
1685 - less heads than before: -1-removed heads (-2..-n)
1685 - less heads than before: -1-removed heads (-2..-n)
1686 - number of heads stays the same: 1
1686 - number of heads stays the same: 1
1687 """
1687 """
1688 def csmap(x):
1688 def csmap(x):
1689 self.ui.debug(_("add changeset %s\n") % short(x))
1689 self.ui.debug(_("add changeset %s\n") % short(x))
1690 return cl.count()
1690 return cl.count()
1691
1691
1692 def revmap(x):
1692 def revmap(x):
1693 return cl.rev(x)
1693 return cl.rev(x)
1694
1694
1695 if not source:
1695 if not source:
1696 return 0
1696 return 0
1697
1697
1698 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1698 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1699
1699
1700 changesets = files = revisions = 0
1700 changesets = files = revisions = 0
1701
1701
1702 tr = self.transaction()
1702 tr = self.transaction()
1703
1703
1704 # write changelog data to temp files so concurrent readers will not see
1704 # write changelog data to temp files so concurrent readers will not see
1705 # inconsistent view
1705 # inconsistent view
1706 cl = None
1706 cl = None
1707 try:
1707 try:
1708 cl = appendfile.appendchangelog(self.sopener,
1708 cl = appendfile.appendchangelog(self.sopener,
1709 self.changelog.version)
1709 self.changelog.version)
1710
1710
1711 oldheads = len(cl.heads())
1711 oldheads = len(cl.heads())
1712
1712
1713 # pull off the changeset group
1713 # pull off the changeset group
1714 self.ui.status(_("adding changesets\n"))
1714 self.ui.status(_("adding changesets\n"))
1715 cor = cl.count() - 1
1715 cor = cl.count() - 1
1716 chunkiter = changegroup.chunkiter(source)
1716 chunkiter = changegroup.chunkiter(source)
1717 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1717 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1718 raise util.Abort(_("received changelog group is empty"))
1718 raise util.Abort(_("received changelog group is empty"))
1719 cnr = cl.count() - 1
1719 cnr = cl.count() - 1
1720 changesets = cnr - cor
1720 changesets = cnr - cor
1721
1721
1722 # pull off the manifest group
1722 # pull off the manifest group
1723 self.ui.status(_("adding manifests\n"))
1723 self.ui.status(_("adding manifests\n"))
1724 chunkiter = changegroup.chunkiter(source)
1724 chunkiter = changegroup.chunkiter(source)
1725 # no need to check for empty manifest group here:
1725 # no need to check for empty manifest group here:
1726 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1726 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1727 # no new manifest will be created and the manifest group will
1727 # no new manifest will be created and the manifest group will
1728 # be empty during the pull
1728 # be empty during the pull
1729 self.manifest.addgroup(chunkiter, revmap, tr)
1729 self.manifest.addgroup(chunkiter, revmap, tr)
1730
1730
1731 # process the files
1731 # process the files
1732 self.ui.status(_("adding file changes\n"))
1732 self.ui.status(_("adding file changes\n"))
1733 while 1:
1733 while 1:
1734 f = changegroup.getchunk(source)
1734 f = changegroup.getchunk(source)
1735 if not f:
1735 if not f:
1736 break
1736 break
1737 self.ui.debug(_("adding %s revisions\n") % f)
1737 self.ui.debug(_("adding %s revisions\n") % f)
1738 fl = self.file(f)
1738 fl = self.file(f)
1739 o = fl.count()
1739 o = fl.count()
1740 chunkiter = changegroup.chunkiter(source)
1740 chunkiter = changegroup.chunkiter(source)
1741 if fl.addgroup(chunkiter, revmap, tr) is None:
1741 if fl.addgroup(chunkiter, revmap, tr) is None:
1742 raise util.Abort(_("received file revlog group is empty"))
1742 raise util.Abort(_("received file revlog group is empty"))
1743 revisions += fl.count() - o
1743 revisions += fl.count() - o
1744 files += 1
1744 files += 1
1745
1745
1746 cl.writedata()
1746 cl.writedata()
1747 finally:
1747 finally:
1748 if cl:
1748 if cl:
1749 cl.cleanup()
1749 cl.cleanup()
1750
1750
1751 # make changelog see real files again
1751 # make changelog see real files again
1752 self.changelog = changelog.changelog(self.sopener,
1752 self.changelog = changelog.changelog(self.sopener,
1753 self.changelog.version)
1753 self.changelog.version)
1754 self.changelog.checkinlinesize(tr)
1754 self.changelog.checkinlinesize(tr)
1755
1755
1756 newheads = len(self.changelog.heads())
1756 newheads = len(self.changelog.heads())
1757 heads = ""
1757 heads = ""
1758 if oldheads and newheads != oldheads:
1758 if oldheads and newheads != oldheads:
1759 heads = _(" (%+d heads)") % (newheads - oldheads)
1759 heads = _(" (%+d heads)") % (newheads - oldheads)
1760
1760
1761 self.ui.status(_("added %d changesets"
1761 self.ui.status(_("added %d changesets"
1762 " with %d changes to %d files%s\n")
1762 " with %d changes to %d files%s\n")
1763 % (changesets, revisions, files, heads))
1763 % (changesets, revisions, files, heads))
1764
1764
1765 if changesets > 0:
1765 if changesets > 0:
1766 self.hook('pretxnchangegroup', throw=True,
1766 self.hook('pretxnchangegroup', throw=True,
1767 node=hex(self.changelog.node(cor+1)), source=srctype,
1767 node=hex(self.changelog.node(cor+1)), source=srctype,
1768 url=url)
1768 url=url)
1769
1769
1770 tr.close()
1770 tr.close()
1771
1771
1772 if changesets > 0:
1772 if changesets > 0:
1773 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1773 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1774 source=srctype, url=url)
1774 source=srctype, url=url)
1775
1775
1776 for i in xrange(cor + 1, cnr + 1):
1776 for i in xrange(cor + 1, cnr + 1):
1777 self.hook("incoming", node=hex(self.changelog.node(i)),
1777 self.hook("incoming", node=hex(self.changelog.node(i)),
1778 source=srctype, url=url)
1778 source=srctype, url=url)
1779
1779
1780 # never return 0 here:
1780 # never return 0 here:
1781 if newheads < oldheads:
1781 if newheads < oldheads:
1782 return newheads - oldheads - 1
1782 return newheads - oldheads - 1
1783 else:
1783 else:
1784 return newheads - oldheads + 1
1784 return newheads - oldheads + 1
1785
1785
1786
1786
1787 def stream_in(self, remote):
1787 def stream_in(self, remote):
1788 fp = remote.stream_out()
1788 fp = remote.stream_out()
1789 l = fp.readline()
1789 l = fp.readline()
1790 try:
1790 try:
1791 resp = int(l)
1791 resp = int(l)
1792 except ValueError:
1792 except ValueError:
1793 raise util.UnexpectedOutput(
1793 raise util.UnexpectedOutput(
1794 _('Unexpected response from remote server:'), l)
1794 _('Unexpected response from remote server:'), l)
1795 if resp == 1:
1795 if resp == 1:
1796 raise util.Abort(_('operation forbidden by server'))
1796 raise util.Abort(_('operation forbidden by server'))
1797 elif resp == 2:
1797 elif resp == 2:
1798 raise util.Abort(_('locking the remote repository failed'))
1798 raise util.Abort(_('locking the remote repository failed'))
1799 elif resp != 0:
1799 elif resp != 0:
1800 raise util.Abort(_('the server sent an unknown error code'))
1800 raise util.Abort(_('the server sent an unknown error code'))
1801 self.ui.status(_('streaming all changes\n'))
1801 self.ui.status(_('streaming all changes\n'))
1802 l = fp.readline()
1802 l = fp.readline()
1803 try:
1803 try:
1804 total_files, total_bytes = map(int, l.split(' ', 1))
1804 total_files, total_bytes = map(int, l.split(' ', 1))
1805 except ValueError, TypeError:
1805 except ValueError, TypeError:
1806 raise util.UnexpectedOutput(
1806 raise util.UnexpectedOutput(
1807 _('Unexpected response from remote server:'), l)
1807 _('Unexpected response from remote server:'), l)
1808 self.ui.status(_('%d files to transfer, %s of data\n') %
1808 self.ui.status(_('%d files to transfer, %s of data\n') %
1809 (total_files, util.bytecount(total_bytes)))
1809 (total_files, util.bytecount(total_bytes)))
1810 start = time.time()
1810 start = time.time()
1811 for i in xrange(total_files):
1811 for i in xrange(total_files):
1812 # XXX doesn't support '\n' or '\r' in filenames
1812 # XXX doesn't support '\n' or '\r' in filenames
1813 l = fp.readline()
1813 l = fp.readline()
1814 try:
1814 try:
1815 name, size = l.split('\0', 1)
1815 name, size = l.split('\0', 1)
1816 size = int(size)
1816 size = int(size)
1817 except ValueError, TypeError:
1817 except ValueError, TypeError:
1818 raise util.UnexpectedOutput(
1818 raise util.UnexpectedOutput(
1819 _('Unexpected response from remote server:'), l)
1819 _('Unexpected response from remote server:'), l)
1820 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1820 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1821 ofp = self.sopener(name, 'w')
1821 ofp = self.sopener(name, 'w')
1822 for chunk in util.filechunkiter(fp, limit=size):
1822 for chunk in util.filechunkiter(fp, limit=size):
1823 ofp.write(chunk)
1823 ofp.write(chunk)
1824 ofp.close()
1824 ofp.close()
1825 elapsed = time.time() - start
1825 elapsed = time.time() - start
1826 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1826 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1827 (util.bytecount(total_bytes), elapsed,
1827 (util.bytecount(total_bytes), elapsed,
1828 util.bytecount(total_bytes / elapsed)))
1828 util.bytecount(total_bytes / elapsed)))
1829 self.reload()
1829 self.reload()
1830 return len(self.heads()) + 1
1830 return len(self.heads()) + 1
1831
1831
1832 def clone(self, remote, heads=[], stream=False):
1832 def clone(self, remote, heads=[], stream=False):
1833 '''clone remote repository.
1833 '''clone remote repository.
1834
1834
1835 keyword arguments:
1835 keyword arguments:
1836 heads: list of revs to clone (forces use of pull)
1836 heads: list of revs to clone (forces use of pull)
1837 stream: use streaming clone if possible'''
1837 stream: use streaming clone if possible'''
1838
1838
1839 # now, all clients that can request uncompressed clones can
1839 # now, all clients that can request uncompressed clones can
1840 # read repo formats supported by all servers that can serve
1840 # read repo formats supported by all servers that can serve
1841 # them.
1841 # them.
1842
1842
1843 # if revlog format changes, client will have to check version
1843 # if revlog format changes, client will have to check version
1844 # and format flags on "stream" capability, and use
1844 # and format flags on "stream" capability, and use
1845 # uncompressed only if compatible.
1845 # uncompressed only if compatible.
1846
1846
1847 if stream and not heads and remote.capable('stream'):
1847 if stream and not heads and remote.capable('stream'):
1848 return self.stream_in(remote)
1848 return self.stream_in(remote)
1849 return self.pull(remote, heads)
1849 return self.pull(remote, heads)
1850
1850
1851 # used to avoid circular references so destructors work
1851 # used to avoid circular references so destructors work
1852 def aftertrans(files):
1852 def aftertrans(files):
1853 renamefiles = [tuple(t) for t in files]
1853 renamefiles = [tuple(t) for t in files]
1854 def a():
1854 def a():
1855 for src, dest in renamefiles:
1855 for src, dest in renamefiles:
1856 util.rename(src, dest)
1856 util.rename(src, dest)
1857 return a
1857 return a
1858
1858
1859 def instance(ui, path, create):
1859 def instance(ui, path, create):
1860 return localrepository(ui, util.drop_scheme('file', path), create)
1860 return localrepository(ui, util.drop_scheme('file', path), create)
1861
1861
1862 def islocal(path):
1862 def islocal(path):
1863 return True
1863 return True
@@ -1,27 +1,24 b''
1 # repo.py - repository base classes for mercurial
1 # repo.py - repository base classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 class RepoError(Exception):
9 class RepoError(Exception):
10 pass
10 pass
11
11
12 class LookupError(RepoError):
13 pass
14
15 class repository(object):
12 class repository(object):
16 def capable(self, name):
13 def capable(self, name):
17 '''tell whether repo supports named capability.
14 '''tell whether repo supports named capability.
18 return False if not supported.
15 return False if not supported.
19 if boolean capability, return True.
16 if boolean capability, return True.
20 if string capability, return string.'''
17 if string capability, return string.'''
21 name_eq = name + '='
18 name_eq = name + '='
22 for cap in self.capabilities:
19 for cap in self.capabilities:
23 if name == cap:
20 if name == cap:
24 return True
21 return True
25 if cap.startswith(name_eq):
22 if cap.startswith(name_eq):
26 return cap[len(name_eq):]
23 return cap[len(name_eq):]
27 return False
24 return False
@@ -1,1291 +1,1292 b''
1 """
1 """
2 revlog.py - storage back-end for mercurial
2 revlog.py - storage back-end for mercurial
3
3
4 This provides efficient delta storage with O(1) retrieve and append
4 This provides efficient delta storage with O(1) retrieve and append
5 and O(changes) merge between branches
5 and O(changes) merge between branches
6
6
7 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
7 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
8
8
9 This software may be used and distributed according to the terms
9 This software may be used and distributed according to the terms
10 of the GNU General Public License, incorporated herein by reference.
10 of the GNU General Public License, incorporated herein by reference.
11 """
11 """
12
12
13 from node import *
13 from node import *
14 from i18n import _
14 from i18n import _
15 import binascii, changegroup, errno, ancestor, mdiff, os
15 import binascii, changegroup, errno, ancestor, mdiff, os
16 import sha, struct, util, zlib
16 import sha, struct, util, zlib
17
17
18 # revlog version strings
18 # revlog version strings
19 REVLOGV0 = 0
19 REVLOGV0 = 0
20 REVLOGNG = 1
20 REVLOGNG = 1
21
21
22 # revlog flags
22 # revlog flags
23 REVLOGNGINLINEDATA = (1 << 16)
23 REVLOGNGINLINEDATA = (1 << 16)
24 REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
24 REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
25
25
26 REVLOG_DEFAULT_FORMAT = REVLOGNG
26 REVLOG_DEFAULT_FORMAT = REVLOGNG
27 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
27 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
28
28
29 def flagstr(flag):
29 def flagstr(flag):
30 if flag == "inline":
30 if flag == "inline":
31 return REVLOGNGINLINEDATA
31 return REVLOGNGINLINEDATA
32 raise RevlogError(_("unknown revlog flag %s") % flag)
32 raise RevlogError(_("unknown revlog flag %s") % flag)
33
33
34 def hash(text, p1, p2):
34 def hash(text, p1, p2):
35 """generate a hash from the given text and its parent hashes
35 """generate a hash from the given text and its parent hashes
36
36
37 This hash combines both the current file contents and its history
37 This hash combines both the current file contents and its history
38 in a manner that makes it easy to distinguish nodes with the same
38 in a manner that makes it easy to distinguish nodes with the same
39 content in the revision graph.
39 content in the revision graph.
40 """
40 """
41 l = [p1, p2]
41 l = [p1, p2]
42 l.sort()
42 l.sort()
43 s = sha.new(l[0])
43 s = sha.new(l[0])
44 s.update(l[1])
44 s.update(l[1])
45 s.update(text)
45 s.update(text)
46 return s.digest()
46 return s.digest()
47
47
48 def compress(text):
48 def compress(text):
49 """ generate a possibly-compressed representation of text """
49 """ generate a possibly-compressed representation of text """
50 if not text: return ("", text)
50 if not text: return ("", text)
51 if len(text) < 44:
51 if len(text) < 44:
52 if text[0] == '\0': return ("", text)
52 if text[0] == '\0': return ("", text)
53 return ('u', text)
53 return ('u', text)
54 bin = zlib.compress(text)
54 bin = zlib.compress(text)
55 if len(bin) > len(text):
55 if len(bin) > len(text):
56 if text[0] == '\0': return ("", text)
56 if text[0] == '\0': return ("", text)
57 return ('u', text)
57 return ('u', text)
58 return ("", bin)
58 return ("", bin)
59
59
60 def decompress(bin):
60 def decompress(bin):
61 """ decompress the given input """
61 """ decompress the given input """
62 if not bin: return bin
62 if not bin: return bin
63 t = bin[0]
63 t = bin[0]
64 if t == '\0': return bin
64 if t == '\0': return bin
65 if t == 'x': return zlib.decompress(bin)
65 if t == 'x': return zlib.decompress(bin)
66 if t == 'u': return bin[1:]
66 if t == 'u': return bin[1:]
67 raise RevlogError(_("unknown compression type %r") % t)
67 raise RevlogError(_("unknown compression type %r") % t)
68
68
69 indexformatv0 = ">4l20s20s20s"
69 indexformatv0 = ">4l20s20s20s"
70 v0shaoffset = 56
70 v0shaoffset = 56
71 # index ng:
71 # index ng:
72 # 6 bytes offset
72 # 6 bytes offset
73 # 2 bytes flags
73 # 2 bytes flags
74 # 4 bytes compressed length
74 # 4 bytes compressed length
75 # 4 bytes uncompressed length
75 # 4 bytes uncompressed length
76 # 4 bytes: base rev
76 # 4 bytes: base rev
77 # 4 bytes link rev
77 # 4 bytes link rev
78 # 4 bytes parent 1 rev
78 # 4 bytes parent 1 rev
79 # 4 bytes parent 2 rev
79 # 4 bytes parent 2 rev
80 # 32 bytes: nodeid
80 # 32 bytes: nodeid
81 indexformatng = ">Qiiiiii20s12x"
81 indexformatng = ">Qiiiiii20s12x"
82 ngshaoffset = 32
82 ngshaoffset = 32
83 versionformat = ">I"
83 versionformat = ">I"
84
84
85 class lazyparser(object):
85 class lazyparser(object):
86 """
86 """
87 this class avoids the need to parse the entirety of large indices
87 this class avoids the need to parse the entirety of large indices
88 """
88 """
89
89
90 # lazyparser is not safe to use on windows if win32 extensions not
90 # lazyparser is not safe to use on windows if win32 extensions not
91 # available. it keeps file handle open, which make it not possible
91 # available. it keeps file handle open, which make it not possible
92 # to break hardlinks on local cloned repos.
92 # to break hardlinks on local cloned repos.
93 safe_to_use = os.name != 'nt' or (not util.is_win_9x() and
93 safe_to_use = os.name != 'nt' or (not util.is_win_9x() and
94 hasattr(util, 'win32api'))
94 hasattr(util, 'win32api'))
95
95
96 def __init__(self, dataf, size, indexformat, shaoffset):
96 def __init__(self, dataf, size, indexformat, shaoffset):
97 self.dataf = dataf
97 self.dataf = dataf
98 self.format = indexformat
98 self.format = indexformat
99 self.s = struct.calcsize(indexformat)
99 self.s = struct.calcsize(indexformat)
100 self.indexformat = indexformat
100 self.indexformat = indexformat
101 self.datasize = size
101 self.datasize = size
102 self.l = size/self.s
102 self.l = size/self.s
103 self.index = [None] * self.l
103 self.index = [None] * self.l
104 self.map = {nullid: nullrev}
104 self.map = {nullid: nullrev}
105 self.allmap = 0
105 self.allmap = 0
106 self.all = 0
106 self.all = 0
107 self.mapfind_count = 0
107 self.mapfind_count = 0
108 self.shaoffset = shaoffset
108 self.shaoffset = shaoffset
109
109
110 def loadmap(self):
110 def loadmap(self):
111 """
111 """
112 during a commit, we need to make sure the rev being added is
112 during a commit, we need to make sure the rev being added is
113 not a duplicate. This requires loading the entire index,
113 not a duplicate. This requires loading the entire index,
114 which is fairly slow. loadmap can load up just the node map,
114 which is fairly slow. loadmap can load up just the node map,
115 which takes much less time.
115 which takes much less time.
116 """
116 """
117 if self.allmap: return
117 if self.allmap: return
118 end = self.datasize
118 end = self.datasize
119 self.allmap = 1
119 self.allmap = 1
120 cur = 0
120 cur = 0
121 count = 0
121 count = 0
122 blocksize = self.s * 256
122 blocksize = self.s * 256
123 self.dataf.seek(0)
123 self.dataf.seek(0)
124 while cur < end:
124 while cur < end:
125 data = self.dataf.read(blocksize)
125 data = self.dataf.read(blocksize)
126 off = 0
126 off = 0
127 for x in xrange(256):
127 for x in xrange(256):
128 n = data[off + self.shaoffset:off + self.shaoffset + 20]
128 n = data[off + self.shaoffset:off + self.shaoffset + 20]
129 self.map[n] = count
129 self.map[n] = count
130 count += 1
130 count += 1
131 if count >= self.l:
131 if count >= self.l:
132 break
132 break
133 off += self.s
133 off += self.s
134 cur += blocksize
134 cur += blocksize
135
135
136 def loadblock(self, blockstart, blocksize, data=None):
136 def loadblock(self, blockstart, blocksize, data=None):
137 if self.all: return
137 if self.all: return
138 if data is None:
138 if data is None:
139 self.dataf.seek(blockstart)
139 self.dataf.seek(blockstart)
140 if blockstart + blocksize > self.datasize:
140 if blockstart + blocksize > self.datasize:
141 # the revlog may have grown since we've started running,
141 # the revlog may have grown since we've started running,
142 # but we don't have space in self.index for more entries.
142 # but we don't have space in self.index for more entries.
143 # limit blocksize so that we don't get too much data.
143 # limit blocksize so that we don't get too much data.
144 blocksize = max(self.datasize - blockstart, 0)
144 blocksize = max(self.datasize - blockstart, 0)
145 data = self.dataf.read(blocksize)
145 data = self.dataf.read(blocksize)
146 lend = len(data) / self.s
146 lend = len(data) / self.s
147 i = blockstart / self.s
147 i = blockstart / self.s
148 off = 0
148 off = 0
149 for x in xrange(lend):
149 for x in xrange(lend):
150 if self.index[i + x] == None:
150 if self.index[i + x] == None:
151 b = data[off : off + self.s]
151 b = data[off : off + self.s]
152 self.index[i + x] = b
152 self.index[i + x] = b
153 n = b[self.shaoffset:self.shaoffset + 20]
153 n = b[self.shaoffset:self.shaoffset + 20]
154 self.map[n] = i + x
154 self.map[n] = i + x
155 off += self.s
155 off += self.s
156
156
157 def findnode(self, node):
157 def findnode(self, node):
158 """search backwards through the index file for a specific node"""
158 """search backwards through the index file for a specific node"""
159 if self.allmap: return None
159 if self.allmap: return None
160
160
161 # hg log will cause many many searches for the manifest
161 # hg log will cause many many searches for the manifest
162 # nodes. After we get called a few times, just load the whole
162 # nodes. After we get called a few times, just load the whole
163 # thing.
163 # thing.
164 if self.mapfind_count > 8:
164 if self.mapfind_count > 8:
165 self.loadmap()
165 self.loadmap()
166 if node in self.map:
166 if node in self.map:
167 return node
167 return node
168 return None
168 return None
169 self.mapfind_count += 1
169 self.mapfind_count += 1
170 last = self.l - 1
170 last = self.l - 1
171 while self.index[last] != None:
171 while self.index[last] != None:
172 if last == 0:
172 if last == 0:
173 self.all = 1
173 self.all = 1
174 self.allmap = 1
174 self.allmap = 1
175 return None
175 return None
176 last -= 1
176 last -= 1
177 end = (last + 1) * self.s
177 end = (last + 1) * self.s
178 blocksize = self.s * 256
178 blocksize = self.s * 256
179 while end >= 0:
179 while end >= 0:
180 start = max(end - blocksize, 0)
180 start = max(end - blocksize, 0)
181 self.dataf.seek(start)
181 self.dataf.seek(start)
182 data = self.dataf.read(end - start)
182 data = self.dataf.read(end - start)
183 findend = end - start
183 findend = end - start
184 while True:
184 while True:
185 # we're searching backwards, so weh have to make sure
185 # we're searching backwards, so weh have to make sure
186 # we don't find a changeset where this node is a parent
186 # we don't find a changeset where this node is a parent
187 off = data.rfind(node, 0, findend)
187 off = data.rfind(node, 0, findend)
188 findend = off
188 findend = off
189 if off >= 0:
189 if off >= 0:
190 i = off / self.s
190 i = off / self.s
191 off = i * self.s
191 off = i * self.s
192 n = data[off + self.shaoffset:off + self.shaoffset + 20]
192 n = data[off + self.shaoffset:off + self.shaoffset + 20]
193 if n == node:
193 if n == node:
194 self.map[n] = i + start / self.s
194 self.map[n] = i + start / self.s
195 return node
195 return node
196 else:
196 else:
197 break
197 break
198 end -= blocksize
198 end -= blocksize
199 return None
199 return None
200
200
201 def loadindex(self, i=None, end=None):
201 def loadindex(self, i=None, end=None):
202 if self.all: return
202 if self.all: return
203 all = False
203 all = False
204 if i == None:
204 if i == None:
205 blockstart = 0
205 blockstart = 0
206 blocksize = (512 / self.s) * self.s
206 blocksize = (512 / self.s) * self.s
207 end = self.datasize
207 end = self.datasize
208 all = True
208 all = True
209 else:
209 else:
210 if end:
210 if end:
211 blockstart = i * self.s
211 blockstart = i * self.s
212 end = end * self.s
212 end = end * self.s
213 blocksize = end - blockstart
213 blocksize = end - blockstart
214 else:
214 else:
215 blockstart = (i & ~(32)) * self.s
215 blockstart = (i & ~(32)) * self.s
216 blocksize = self.s * 64
216 blocksize = self.s * 64
217 end = blockstart + blocksize
217 end = blockstart + blocksize
218 while blockstart < end:
218 while blockstart < end:
219 self.loadblock(blockstart, blocksize)
219 self.loadblock(blockstart, blocksize)
220 blockstart += blocksize
220 blockstart += blocksize
221 if all: self.all = True
221 if all: self.all = True
222
222
223 class lazyindex(object):
223 class lazyindex(object):
224 """a lazy version of the index array"""
224 """a lazy version of the index array"""
225 def __init__(self, parser):
225 def __init__(self, parser):
226 self.p = parser
226 self.p = parser
227 def __len__(self):
227 def __len__(self):
228 return len(self.p.index)
228 return len(self.p.index)
229 def load(self, pos):
229 def load(self, pos):
230 if pos < 0:
230 if pos < 0:
231 pos += len(self.p.index)
231 pos += len(self.p.index)
232 self.p.loadindex(pos)
232 self.p.loadindex(pos)
233 return self.p.index[pos]
233 return self.p.index[pos]
234 def __getitem__(self, pos):
234 def __getitem__(self, pos):
235 ret = self.p.index[pos] or self.load(pos)
235 ret = self.p.index[pos] or self.load(pos)
236 if isinstance(ret, str):
236 if isinstance(ret, str):
237 ret = struct.unpack(self.p.indexformat, ret)
237 ret = struct.unpack(self.p.indexformat, ret)
238 return ret
238 return ret
239 def __setitem__(self, pos, item):
239 def __setitem__(self, pos, item):
240 self.p.index[pos] = item
240 self.p.index[pos] = item
241 def __delitem__(self, pos):
241 def __delitem__(self, pos):
242 del self.p.index[pos]
242 del self.p.index[pos]
243 def append(self, e):
243 def append(self, e):
244 self.p.index.append(e)
244 self.p.index.append(e)
245
245
246 class lazymap(object):
246 class lazymap(object):
247 """a lazy version of the node map"""
247 """a lazy version of the node map"""
248 def __init__(self, parser):
248 def __init__(self, parser):
249 self.p = parser
249 self.p = parser
250 def load(self, key):
250 def load(self, key):
251 n = self.p.findnode(key)
251 n = self.p.findnode(key)
252 if n == None:
252 if n == None:
253 raise KeyError(key)
253 raise KeyError(key)
254 def __contains__(self, key):
254 def __contains__(self, key):
255 if key in self.p.map:
255 if key in self.p.map:
256 return True
256 return True
257 self.p.loadmap()
257 self.p.loadmap()
258 return key in self.p.map
258 return key in self.p.map
259 def __iter__(self):
259 def __iter__(self):
260 yield nullid
260 yield nullid
261 for i in xrange(self.p.l):
261 for i in xrange(self.p.l):
262 ret = self.p.index[i]
262 ret = self.p.index[i]
263 if not ret:
263 if not ret:
264 self.p.loadindex(i)
264 self.p.loadindex(i)
265 ret = self.p.index[i]
265 ret = self.p.index[i]
266 if isinstance(ret, str):
266 if isinstance(ret, str):
267 ret = struct.unpack(self.p.indexformat, ret)
267 ret = struct.unpack(self.p.indexformat, ret)
268 yield ret[-1]
268 yield ret[-1]
269 def __getitem__(self, key):
269 def __getitem__(self, key):
270 try:
270 try:
271 return self.p.map[key]
271 return self.p.map[key]
272 except KeyError:
272 except KeyError:
273 try:
273 try:
274 self.load(key)
274 self.load(key)
275 return self.p.map[key]
275 return self.p.map[key]
276 except KeyError:
276 except KeyError:
277 raise KeyError("node " + hex(key))
277 raise KeyError("node " + hex(key))
278 def __setitem__(self, key, val):
278 def __setitem__(self, key, val):
279 self.p.map[key] = val
279 self.p.map[key] = val
280 def __delitem__(self, key):
280 def __delitem__(self, key):
281 del self.p.map[key]
281 del self.p.map[key]
282
282
283 class RevlogError(Exception): pass
283 class RevlogError(Exception): pass
284 class LookupError(RevlogError): pass
284
285
285 class revlog(object):
286 class revlog(object):
286 """
287 """
287 the underlying revision storage object
288 the underlying revision storage object
288
289
289 A revlog consists of two parts, an index and the revision data.
290 A revlog consists of two parts, an index and the revision data.
290
291
291 The index is a file with a fixed record size containing
292 The index is a file with a fixed record size containing
292 information on each revision, includings its nodeid (hash), the
293 information on each revision, includings its nodeid (hash), the
293 nodeids of its parents, the position and offset of its data within
294 nodeids of its parents, the position and offset of its data within
294 the data file, and the revision it's based on. Finally, each entry
295 the data file, and the revision it's based on. Finally, each entry
295 contains a linkrev entry that can serve as a pointer to external
296 contains a linkrev entry that can serve as a pointer to external
296 data.
297 data.
297
298
298 The revision data itself is a linear collection of data chunks.
299 The revision data itself is a linear collection of data chunks.
299 Each chunk represents a revision and is usually represented as a
300 Each chunk represents a revision and is usually represented as a
300 delta against the previous chunk. To bound lookup time, runs of
301 delta against the previous chunk. To bound lookup time, runs of
301 deltas are limited to about 2 times the length of the original
302 deltas are limited to about 2 times the length of the original
302 version data. This makes retrieval of a version proportional to
303 version data. This makes retrieval of a version proportional to
303 its size, or O(1) relative to the number of revisions.
304 its size, or O(1) relative to the number of revisions.
304
305
305 Both pieces of the revlog are written to in an append-only
306 Both pieces of the revlog are written to in an append-only
306 fashion, which means we never need to rewrite a file to insert or
307 fashion, which means we never need to rewrite a file to insert or
307 remove data, and can use some simple techniques to avoid the need
308 remove data, and can use some simple techniques to avoid the need
308 for locking while reading.
309 for locking while reading.
309 """
310 """
310 def __init__(self, opener, indexfile, datafile,
311 def __init__(self, opener, indexfile, datafile,
311 defversion=REVLOG_DEFAULT_VERSION):
312 defversion=REVLOG_DEFAULT_VERSION):
312 """
313 """
313 create a revlog object
314 create a revlog object
314
315
315 opener is a function that abstracts the file opening operation
316 opener is a function that abstracts the file opening operation
316 and can be used to implement COW semantics or the like.
317 and can be used to implement COW semantics or the like.
317 """
318 """
318 self.indexfile = indexfile
319 self.indexfile = indexfile
319 self.datafile = datafile
320 self.datafile = datafile
320 self.opener = opener
321 self.opener = opener
321
322
322 self.indexstat = None
323 self.indexstat = None
323 self.cache = None
324 self.cache = None
324 self.chunkcache = None
325 self.chunkcache = None
325 self.defversion = defversion
326 self.defversion = defversion
326 self.load()
327 self.load()
327
328
328 def load(self):
329 def load(self):
329 v = self.defversion
330 v = self.defversion
330 try:
331 try:
331 f = self.opener(self.indexfile)
332 f = self.opener(self.indexfile)
332 i = f.read(4)
333 i = f.read(4)
333 f.seek(0)
334 f.seek(0)
334 except IOError, inst:
335 except IOError, inst:
335 if inst.errno != errno.ENOENT:
336 if inst.errno != errno.ENOENT:
336 raise
337 raise
337 i = ""
338 i = ""
338 else:
339 else:
339 try:
340 try:
340 st = util.fstat(f)
341 st = util.fstat(f)
341 except AttributeError, inst:
342 except AttributeError, inst:
342 st = None
343 st = None
343 else:
344 else:
344 oldst = self.indexstat
345 oldst = self.indexstat
345 if (oldst and st.st_dev == oldst.st_dev
346 if (oldst and st.st_dev == oldst.st_dev
346 and st.st_ino == oldst.st_ino
347 and st.st_ino == oldst.st_ino
347 and st.st_mtime == oldst.st_mtime
348 and st.st_mtime == oldst.st_mtime
348 and st.st_ctime == oldst.st_ctime):
349 and st.st_ctime == oldst.st_ctime):
349 return
350 return
350 self.indexstat = st
351 self.indexstat = st
351 if len(i) > 0:
352 if len(i) > 0:
352 v = struct.unpack(versionformat, i)[0]
353 v = struct.unpack(versionformat, i)[0]
353 flags = v & ~0xFFFF
354 flags = v & ~0xFFFF
354 fmt = v & 0xFFFF
355 fmt = v & 0xFFFF
355 if fmt == REVLOGV0:
356 if fmt == REVLOGV0:
356 if flags:
357 if flags:
357 raise RevlogError(_("index %s unknown flags %#04x for format v0")
358 raise RevlogError(_("index %s unknown flags %#04x for format v0")
358 % (self.indexfile, flags >> 16))
359 % (self.indexfile, flags >> 16))
359 elif fmt == REVLOGNG:
360 elif fmt == REVLOGNG:
360 if flags & ~REVLOGNGINLINEDATA:
361 if flags & ~REVLOGNGINLINEDATA:
361 raise RevlogError(_("index %s unknown flags %#04x for revlogng")
362 raise RevlogError(_("index %s unknown flags %#04x for revlogng")
362 % (self.indexfile, flags >> 16))
363 % (self.indexfile, flags >> 16))
363 else:
364 else:
364 raise RevlogError(_("index %s unknown format %d")
365 raise RevlogError(_("index %s unknown format %d")
365 % (self.indexfile, fmt))
366 % (self.indexfile, fmt))
366 self.version = v
367 self.version = v
367 if v == REVLOGV0:
368 if v == REVLOGV0:
368 self.indexformat = indexformatv0
369 self.indexformat = indexformatv0
369 shaoffset = v0shaoffset
370 shaoffset = v0shaoffset
370 else:
371 else:
371 self.indexformat = indexformatng
372 self.indexformat = indexformatng
372 shaoffset = ngshaoffset
373 shaoffset = ngshaoffset
373
374
374 if i:
375 if i:
375 if (lazyparser.safe_to_use and not self.inlinedata() and
376 if (lazyparser.safe_to_use and not self.inlinedata() and
376 st and st.st_size > 10000):
377 st and st.st_size > 10000):
377 # big index, let's parse it on demand
378 # big index, let's parse it on demand
378 parser = lazyparser(f, st.st_size, self.indexformat, shaoffset)
379 parser = lazyparser(f, st.st_size, self.indexformat, shaoffset)
379 self.index = lazyindex(parser)
380 self.index = lazyindex(parser)
380 self.nodemap = lazymap(parser)
381 self.nodemap = lazymap(parser)
381 else:
382 else:
382 self.parseindex(f, st)
383 self.parseindex(f, st)
383 if self.version != REVLOGV0:
384 if self.version != REVLOGV0:
384 e = list(self.index[0])
385 e = list(self.index[0])
385 type = self.ngtype(e[0])
386 type = self.ngtype(e[0])
386 e[0] = self.offset_type(0, type)
387 e[0] = self.offset_type(0, type)
387 self.index[0] = e
388 self.index[0] = e
388 else:
389 else:
389 self.nodemap = {nullid: nullrev}
390 self.nodemap = {nullid: nullrev}
390 self.index = []
391 self.index = []
391
392
392
393
393 def parseindex(self, fp, st):
394 def parseindex(self, fp, st):
394 s = struct.calcsize(self.indexformat)
395 s = struct.calcsize(self.indexformat)
395 self.index = []
396 self.index = []
396 self.nodemap = {nullid: nullrev}
397 self.nodemap = {nullid: nullrev}
397 inline = self.inlinedata()
398 inline = self.inlinedata()
398 n = 0
399 n = 0
399 leftover = None
400 leftover = None
400 while True:
401 while True:
401 if st:
402 if st:
402 data = fp.read(65536)
403 data = fp.read(65536)
403 else:
404 else:
404 # hack for httprangereader, it doesn't do partial reads well
405 # hack for httprangereader, it doesn't do partial reads well
405 data = fp.read()
406 data = fp.read()
406 if not data:
407 if not data:
407 break
408 break
408 if n == 0 and self.inlinedata():
409 if n == 0 and self.inlinedata():
409 # cache the first chunk
410 # cache the first chunk
410 self.chunkcache = (0, data)
411 self.chunkcache = (0, data)
411 if leftover:
412 if leftover:
412 data = leftover + data
413 data = leftover + data
413 leftover = None
414 leftover = None
414 off = 0
415 off = 0
415 l = len(data)
416 l = len(data)
416 while off < l:
417 while off < l:
417 if l - off < s:
418 if l - off < s:
418 leftover = data[off:]
419 leftover = data[off:]
419 break
420 break
420 cur = data[off:off + s]
421 cur = data[off:off + s]
421 off += s
422 off += s
422 e = struct.unpack(self.indexformat, cur)
423 e = struct.unpack(self.indexformat, cur)
423 self.index.append(e)
424 self.index.append(e)
424 self.nodemap[e[-1]] = n
425 self.nodemap[e[-1]] = n
425 n += 1
426 n += 1
426 if inline:
427 if inline:
427 off += e[1]
428 off += e[1]
428 if off > l:
429 if off > l:
429 # some things don't seek well, just read it
430 # some things don't seek well, just read it
430 fp.read(off - l)
431 fp.read(off - l)
431 if not st:
432 if not st:
432 break
433 break
433
434
434
435
435 def ngoffset(self, q):
436 def ngoffset(self, q):
436 if q & 0xFFFF:
437 if q & 0xFFFF:
437 raise RevlogError(_('%s: incompatible revision flag %x') %
438 raise RevlogError(_('%s: incompatible revision flag %x') %
438 (self.indexfile, q))
439 (self.indexfile, q))
439 return long(q >> 16)
440 return long(q >> 16)
440
441
441 def ngtype(self, q):
442 def ngtype(self, q):
442 return int(q & 0xFFFF)
443 return int(q & 0xFFFF)
443
444
444 def offset_type(self, offset, type):
445 def offset_type(self, offset, type):
445 return long(long(offset) << 16 | type)
446 return long(long(offset) << 16 | type)
446
447
447 def loadindex(self, start, end):
448 def loadindex(self, start, end):
448 """load a block of indexes all at once from the lazy parser"""
449 """load a block of indexes all at once from the lazy parser"""
449 if isinstance(self.index, lazyindex):
450 if isinstance(self.index, lazyindex):
450 self.index.p.loadindex(start, end)
451 self.index.p.loadindex(start, end)
451
452
452 def loadindexmap(self):
453 def loadindexmap(self):
453 """loads both the map and the index from the lazy parser"""
454 """loads both the map and the index from the lazy parser"""
454 if isinstance(self.index, lazyindex):
455 if isinstance(self.index, lazyindex):
455 p = self.index.p
456 p = self.index.p
456 p.loadindex()
457 p.loadindex()
457 self.nodemap = p.map
458 self.nodemap = p.map
458
459
459 def loadmap(self):
460 def loadmap(self):
460 """loads the map from the lazy parser"""
461 """loads the map from the lazy parser"""
461 if isinstance(self.nodemap, lazymap):
462 if isinstance(self.nodemap, lazymap):
462 self.nodemap.p.loadmap()
463 self.nodemap.p.loadmap()
463 self.nodemap = self.nodemap.p.map
464 self.nodemap = self.nodemap.p.map
464
465
465 def inlinedata(self): return self.version & REVLOGNGINLINEDATA
466 def inlinedata(self): return self.version & REVLOGNGINLINEDATA
466 def tip(self): return self.node(len(self.index) - 1)
467 def tip(self): return self.node(len(self.index) - 1)
467 def count(self): return len(self.index)
468 def count(self): return len(self.index)
468 def node(self, rev):
469 def node(self, rev):
469 return rev == nullrev and nullid or self.index[rev][-1]
470 return rev == nullrev and nullid or self.index[rev][-1]
470 def rev(self, node):
471 def rev(self, node):
471 try:
472 try:
472 return self.nodemap[node]
473 return self.nodemap[node]
473 except KeyError:
474 except KeyError:
474 raise RevlogError(_('%s: no node %s') % (self.indexfile, hex(node)))
475 raise LookupError(_('%s: no node %s') % (self.indexfile, hex(node)))
475 def linkrev(self, node):
476 def linkrev(self, node):
476 return (node == nullid) and nullrev or self.index[self.rev(node)][-4]
477 return (node == nullid) and nullrev or self.index[self.rev(node)][-4]
477 def parents(self, node):
478 def parents(self, node):
478 if node == nullid: return (nullid, nullid)
479 if node == nullid: return (nullid, nullid)
479 r = self.rev(node)
480 r = self.rev(node)
480 d = self.index[r][-3:-1]
481 d = self.index[r][-3:-1]
481 if self.version == REVLOGV0:
482 if self.version == REVLOGV0:
482 return d
483 return d
483 return (self.node(d[0]), self.node(d[1]))
484 return (self.node(d[0]), self.node(d[1]))
484 def parentrevs(self, rev):
485 def parentrevs(self, rev):
485 if rev == nullrev:
486 if rev == nullrev:
486 return (nullrev, nullrev)
487 return (nullrev, nullrev)
487 d = self.index[rev][-3:-1]
488 d = self.index[rev][-3:-1]
488 if self.version == REVLOGV0:
489 if self.version == REVLOGV0:
489 return (self.rev(d[0]), self.rev(d[1]))
490 return (self.rev(d[0]), self.rev(d[1]))
490 return d
491 return d
491 def start(self, rev):
492 def start(self, rev):
492 if rev == nullrev:
493 if rev == nullrev:
493 return 0
494 return 0
494 if self.version != REVLOGV0:
495 if self.version != REVLOGV0:
495 return self.ngoffset(self.index[rev][0])
496 return self.ngoffset(self.index[rev][0])
496 return self.index[rev][0]
497 return self.index[rev][0]
497
498
498 def end(self, rev): return self.start(rev) + self.length(rev)
499 def end(self, rev): return self.start(rev) + self.length(rev)
499
500
500 def size(self, rev):
501 def size(self, rev):
501 """return the length of the uncompressed text for a given revision"""
502 """return the length of the uncompressed text for a given revision"""
502 if rev == nullrev:
503 if rev == nullrev:
503 return 0
504 return 0
504 l = -1
505 l = -1
505 if self.version != REVLOGV0:
506 if self.version != REVLOGV0:
506 l = self.index[rev][2]
507 l = self.index[rev][2]
507 if l >= 0:
508 if l >= 0:
508 return l
509 return l
509
510
510 t = self.revision(self.node(rev))
511 t = self.revision(self.node(rev))
511 return len(t)
512 return len(t)
512
513
513 # alternate implementation, The advantage to this code is it
514 # alternate implementation, The advantage to this code is it
514 # will be faster for a single revision. But, the results are not
515 # will be faster for a single revision. But, the results are not
515 # cached, so finding the size of every revision will be slower.
516 # cached, so finding the size of every revision will be slower.
516 """
517 """
517 if self.cache and self.cache[1] == rev:
518 if self.cache and self.cache[1] == rev:
518 return len(self.cache[2])
519 return len(self.cache[2])
519
520
520 base = self.base(rev)
521 base = self.base(rev)
521 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
522 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
522 base = self.cache[1]
523 base = self.cache[1]
523 text = self.cache[2]
524 text = self.cache[2]
524 else:
525 else:
525 text = self.revision(self.node(base))
526 text = self.revision(self.node(base))
526
527
527 l = len(text)
528 l = len(text)
528 for x in xrange(base + 1, rev + 1):
529 for x in xrange(base + 1, rev + 1):
529 l = mdiff.patchedsize(l, self.chunk(x))
530 l = mdiff.patchedsize(l, self.chunk(x))
530 return l
531 return l
531 """
532 """
532
533
533 def length(self, rev):
534 def length(self, rev):
534 if rev == nullrev:
535 if rev == nullrev:
535 return 0
536 return 0
536 else:
537 else:
537 return self.index[rev][1]
538 return self.index[rev][1]
538 def base(self, rev):
539 def base(self, rev):
539 if (rev == nullrev):
540 if (rev == nullrev):
540 return nullrev
541 return nullrev
541 else:
542 else:
542 return self.index[rev][-5]
543 return self.index[rev][-5]
543
544
544 def reachable(self, node, stop=None):
545 def reachable(self, node, stop=None):
545 """return a hash of all nodes ancestral to a given node, including
546 """return a hash of all nodes ancestral to a given node, including
546 the node itself, stopping when stop is matched"""
547 the node itself, stopping when stop is matched"""
547 reachable = {}
548 reachable = {}
548 visit = [node]
549 visit = [node]
549 reachable[node] = 1
550 reachable[node] = 1
550 if stop:
551 if stop:
551 stopn = self.rev(stop)
552 stopn = self.rev(stop)
552 else:
553 else:
553 stopn = 0
554 stopn = 0
554 while visit:
555 while visit:
555 n = visit.pop(0)
556 n = visit.pop(0)
556 if n == stop:
557 if n == stop:
557 continue
558 continue
558 if n == nullid:
559 if n == nullid:
559 continue
560 continue
560 for p in self.parents(n):
561 for p in self.parents(n):
561 if self.rev(p) < stopn:
562 if self.rev(p) < stopn:
562 continue
563 continue
563 if p not in reachable:
564 if p not in reachable:
564 reachable[p] = 1
565 reachable[p] = 1
565 visit.append(p)
566 visit.append(p)
566 return reachable
567 return reachable
567
568
568 def nodesbetween(self, roots=None, heads=None):
569 def nodesbetween(self, roots=None, heads=None):
569 """Return a tuple containing three elements. Elements 1 and 2 contain
570 """Return a tuple containing three elements. Elements 1 and 2 contain
570 a final list bases and heads after all the unreachable ones have been
571 a final list bases and heads after all the unreachable ones have been
571 pruned. Element 0 contains a topologically sorted list of all
572 pruned. Element 0 contains a topologically sorted list of all
572
573
573 nodes that satisfy these constraints:
574 nodes that satisfy these constraints:
574 1. All nodes must be descended from a node in roots (the nodes on
575 1. All nodes must be descended from a node in roots (the nodes on
575 roots are considered descended from themselves).
576 roots are considered descended from themselves).
576 2. All nodes must also be ancestors of a node in heads (the nodes in
577 2. All nodes must also be ancestors of a node in heads (the nodes in
577 heads are considered to be their own ancestors).
578 heads are considered to be their own ancestors).
578
579
579 If roots is unspecified, nullid is assumed as the only root.
580 If roots is unspecified, nullid is assumed as the only root.
580 If heads is unspecified, it is taken to be the output of the
581 If heads is unspecified, it is taken to be the output of the
581 heads method (i.e. a list of all nodes in the repository that
582 heads method (i.e. a list of all nodes in the repository that
582 have no children)."""
583 have no children)."""
583 nonodes = ([], [], [])
584 nonodes = ([], [], [])
584 if roots is not None:
585 if roots is not None:
585 roots = list(roots)
586 roots = list(roots)
586 if not roots:
587 if not roots:
587 return nonodes
588 return nonodes
588 lowestrev = min([self.rev(n) for n in roots])
589 lowestrev = min([self.rev(n) for n in roots])
589 else:
590 else:
590 roots = [nullid] # Everybody's a descendent of nullid
591 roots = [nullid] # Everybody's a descendent of nullid
591 lowestrev = nullrev
592 lowestrev = nullrev
592 if (lowestrev == nullrev) and (heads is None):
593 if (lowestrev == nullrev) and (heads is None):
593 # We want _all_ the nodes!
594 # We want _all_ the nodes!
594 return ([self.node(r) for r in xrange(0, self.count())],
595 return ([self.node(r) for r in xrange(0, self.count())],
595 [nullid], list(self.heads()))
596 [nullid], list(self.heads()))
596 if heads is None:
597 if heads is None:
597 # All nodes are ancestors, so the latest ancestor is the last
598 # All nodes are ancestors, so the latest ancestor is the last
598 # node.
599 # node.
599 highestrev = self.count() - 1
600 highestrev = self.count() - 1
600 # Set ancestors to None to signal that every node is an ancestor.
601 # Set ancestors to None to signal that every node is an ancestor.
601 ancestors = None
602 ancestors = None
602 # Set heads to an empty dictionary for later discovery of heads
603 # Set heads to an empty dictionary for later discovery of heads
603 heads = {}
604 heads = {}
604 else:
605 else:
605 heads = list(heads)
606 heads = list(heads)
606 if not heads:
607 if not heads:
607 return nonodes
608 return nonodes
608 ancestors = {}
609 ancestors = {}
609 # Turn heads into a dictionary so we can remove 'fake' heads.
610 # Turn heads into a dictionary so we can remove 'fake' heads.
610 # Also, later we will be using it to filter out the heads we can't
611 # Also, later we will be using it to filter out the heads we can't
611 # find from roots.
612 # find from roots.
612 heads = dict.fromkeys(heads, 0)
613 heads = dict.fromkeys(heads, 0)
613 # Start at the top and keep marking parents until we're done.
614 # Start at the top and keep marking parents until we're done.
614 nodestotag = heads.keys()
615 nodestotag = heads.keys()
615 # Remember where the top was so we can use it as a limit later.
616 # Remember where the top was so we can use it as a limit later.
616 highestrev = max([self.rev(n) for n in nodestotag])
617 highestrev = max([self.rev(n) for n in nodestotag])
617 while nodestotag:
618 while nodestotag:
618 # grab a node to tag
619 # grab a node to tag
619 n = nodestotag.pop()
620 n = nodestotag.pop()
620 # Never tag nullid
621 # Never tag nullid
621 if n == nullid:
622 if n == nullid:
622 continue
623 continue
623 # A node's revision number represents its place in a
624 # A node's revision number represents its place in a
624 # topologically sorted list of nodes.
625 # topologically sorted list of nodes.
625 r = self.rev(n)
626 r = self.rev(n)
626 if r >= lowestrev:
627 if r >= lowestrev:
627 if n not in ancestors:
628 if n not in ancestors:
628 # If we are possibly a descendent of one of the roots
629 # If we are possibly a descendent of one of the roots
629 # and we haven't already been marked as an ancestor
630 # and we haven't already been marked as an ancestor
630 ancestors[n] = 1 # Mark as ancestor
631 ancestors[n] = 1 # Mark as ancestor
631 # Add non-nullid parents to list of nodes to tag.
632 # Add non-nullid parents to list of nodes to tag.
632 nodestotag.extend([p for p in self.parents(n) if
633 nodestotag.extend([p for p in self.parents(n) if
633 p != nullid])
634 p != nullid])
634 elif n in heads: # We've seen it before, is it a fake head?
635 elif n in heads: # We've seen it before, is it a fake head?
635 # So it is, real heads should not be the ancestors of
636 # So it is, real heads should not be the ancestors of
636 # any other heads.
637 # any other heads.
637 heads.pop(n)
638 heads.pop(n)
638 if not ancestors:
639 if not ancestors:
639 return nonodes
640 return nonodes
640 # Now that we have our set of ancestors, we want to remove any
641 # Now that we have our set of ancestors, we want to remove any
641 # roots that are not ancestors.
642 # roots that are not ancestors.
642
643
643 # If one of the roots was nullid, everything is included anyway.
644 # If one of the roots was nullid, everything is included anyway.
644 if lowestrev > nullrev:
645 if lowestrev > nullrev:
645 # But, since we weren't, let's recompute the lowest rev to not
646 # But, since we weren't, let's recompute the lowest rev to not
646 # include roots that aren't ancestors.
647 # include roots that aren't ancestors.
647
648
648 # Filter out roots that aren't ancestors of heads
649 # Filter out roots that aren't ancestors of heads
649 roots = [n for n in roots if n in ancestors]
650 roots = [n for n in roots if n in ancestors]
650 # Recompute the lowest revision
651 # Recompute the lowest revision
651 if roots:
652 if roots:
652 lowestrev = min([self.rev(n) for n in roots])
653 lowestrev = min([self.rev(n) for n in roots])
653 else:
654 else:
654 # No more roots? Return empty list
655 # No more roots? Return empty list
655 return nonodes
656 return nonodes
656 else:
657 else:
657 # We are descending from nullid, and don't need to care about
658 # We are descending from nullid, and don't need to care about
658 # any other roots.
659 # any other roots.
659 lowestrev = nullrev
660 lowestrev = nullrev
660 roots = [nullid]
661 roots = [nullid]
661 # Transform our roots list into a 'set' (i.e. a dictionary where the
662 # Transform our roots list into a 'set' (i.e. a dictionary where the
662 # values don't matter.
663 # values don't matter.
663 descendents = dict.fromkeys(roots, 1)
664 descendents = dict.fromkeys(roots, 1)
664 # Also, keep the original roots so we can filter out roots that aren't
665 # Also, keep the original roots so we can filter out roots that aren't
665 # 'real' roots (i.e. are descended from other roots).
666 # 'real' roots (i.e. are descended from other roots).
666 roots = descendents.copy()
667 roots = descendents.copy()
667 # Our topologically sorted list of output nodes.
668 # Our topologically sorted list of output nodes.
668 orderedout = []
669 orderedout = []
669 # Don't start at nullid since we don't want nullid in our output list,
670 # Don't start at nullid since we don't want nullid in our output list,
670 # and if nullid shows up in descedents, empty parents will look like
671 # and if nullid shows up in descedents, empty parents will look like
671 # they're descendents.
672 # they're descendents.
672 for r in xrange(max(lowestrev, 0), highestrev + 1):
673 for r in xrange(max(lowestrev, 0), highestrev + 1):
673 n = self.node(r)
674 n = self.node(r)
674 isdescendent = False
675 isdescendent = False
675 if lowestrev == nullrev: # Everybody is a descendent of nullid
676 if lowestrev == nullrev: # Everybody is a descendent of nullid
676 isdescendent = True
677 isdescendent = True
677 elif n in descendents:
678 elif n in descendents:
678 # n is already a descendent
679 # n is already a descendent
679 isdescendent = True
680 isdescendent = True
680 # This check only needs to be done here because all the roots
681 # This check only needs to be done here because all the roots
681 # will start being marked is descendents before the loop.
682 # will start being marked is descendents before the loop.
682 if n in roots:
683 if n in roots:
683 # If n was a root, check if it's a 'real' root.
684 # If n was a root, check if it's a 'real' root.
684 p = tuple(self.parents(n))
685 p = tuple(self.parents(n))
685 # If any of its parents are descendents, it's not a root.
686 # If any of its parents are descendents, it's not a root.
686 if (p[0] in descendents) or (p[1] in descendents):
687 if (p[0] in descendents) or (p[1] in descendents):
687 roots.pop(n)
688 roots.pop(n)
688 else:
689 else:
689 p = tuple(self.parents(n))
690 p = tuple(self.parents(n))
690 # A node is a descendent if either of its parents are
691 # A node is a descendent if either of its parents are
691 # descendents. (We seeded the dependents list with the roots
692 # descendents. (We seeded the dependents list with the roots
692 # up there, remember?)
693 # up there, remember?)
693 if (p[0] in descendents) or (p[1] in descendents):
694 if (p[0] in descendents) or (p[1] in descendents):
694 descendents[n] = 1
695 descendents[n] = 1
695 isdescendent = True
696 isdescendent = True
696 if isdescendent and ((ancestors is None) or (n in ancestors)):
697 if isdescendent and ((ancestors is None) or (n in ancestors)):
697 # Only include nodes that are both descendents and ancestors.
698 # Only include nodes that are both descendents and ancestors.
698 orderedout.append(n)
699 orderedout.append(n)
699 if (ancestors is not None) and (n in heads):
700 if (ancestors is not None) and (n in heads):
700 # We're trying to figure out which heads are reachable
701 # We're trying to figure out which heads are reachable
701 # from roots.
702 # from roots.
702 # Mark this head as having been reached
703 # Mark this head as having been reached
703 heads[n] = 1
704 heads[n] = 1
704 elif ancestors is None:
705 elif ancestors is None:
705 # Otherwise, we're trying to discover the heads.
706 # Otherwise, we're trying to discover the heads.
706 # Assume this is a head because if it isn't, the next step
707 # Assume this is a head because if it isn't, the next step
707 # will eventually remove it.
708 # will eventually remove it.
708 heads[n] = 1
709 heads[n] = 1
709 # But, obviously its parents aren't.
710 # But, obviously its parents aren't.
710 for p in self.parents(n):
711 for p in self.parents(n):
711 heads.pop(p, None)
712 heads.pop(p, None)
712 heads = [n for n in heads.iterkeys() if heads[n] != 0]
713 heads = [n for n in heads.iterkeys() if heads[n] != 0]
713 roots = roots.keys()
714 roots = roots.keys()
714 assert orderedout
715 assert orderedout
715 assert roots
716 assert roots
716 assert heads
717 assert heads
717 return (orderedout, roots, heads)
718 return (orderedout, roots, heads)
718
719
719 def heads(self, start=None, stop=None):
720 def heads(self, start=None, stop=None):
720 """return the list of all nodes that have no children
721 """return the list of all nodes that have no children
721
722
722 if start is specified, only heads that are descendants of
723 if start is specified, only heads that are descendants of
723 start will be returned
724 start will be returned
724 if stop is specified, it will consider all the revs from stop
725 if stop is specified, it will consider all the revs from stop
725 as if they had no children
726 as if they had no children
726 """
727 """
727 if start is None:
728 if start is None:
728 start = nullid
729 start = nullid
729 if stop is None:
730 if stop is None:
730 stop = []
731 stop = []
731 stoprevs = dict.fromkeys([self.rev(n) for n in stop])
732 stoprevs = dict.fromkeys([self.rev(n) for n in stop])
732 startrev = self.rev(start)
733 startrev = self.rev(start)
733 reachable = {startrev: 1}
734 reachable = {startrev: 1}
734 heads = {startrev: 1}
735 heads = {startrev: 1}
735
736
736 parentrevs = self.parentrevs
737 parentrevs = self.parentrevs
737 for r in xrange(startrev + 1, self.count()):
738 for r in xrange(startrev + 1, self.count()):
738 for p in parentrevs(r):
739 for p in parentrevs(r):
739 if p in reachable:
740 if p in reachable:
740 if r not in stoprevs:
741 if r not in stoprevs:
741 reachable[r] = 1
742 reachable[r] = 1
742 heads[r] = 1
743 heads[r] = 1
743 if p in heads and p not in stoprevs:
744 if p in heads and p not in stoprevs:
744 del heads[p]
745 del heads[p]
745
746
746 return [self.node(r) for r in heads]
747 return [self.node(r) for r in heads]
747
748
748 def children(self, node):
749 def children(self, node):
749 """find the children of a given node"""
750 """find the children of a given node"""
750 c = []
751 c = []
751 p = self.rev(node)
752 p = self.rev(node)
752 for r in range(p + 1, self.count()):
753 for r in range(p + 1, self.count()):
753 for pr in self.parentrevs(r):
754 for pr in self.parentrevs(r):
754 if pr == p:
755 if pr == p:
755 c.append(self.node(r))
756 c.append(self.node(r))
756 return c
757 return c
757
758
758 def _match(self, id):
759 def _match(self, id):
759 if isinstance(id, (long, int)):
760 if isinstance(id, (long, int)):
760 # rev
761 # rev
761 return self.node(id)
762 return self.node(id)
762 if len(id) == 20:
763 if len(id) == 20:
763 # possibly a binary node
764 # possibly a binary node
764 # odds of a binary node being all hex in ASCII are 1 in 10**25
765 # odds of a binary node being all hex in ASCII are 1 in 10**25
765 try:
766 try:
766 node = id
767 node = id
767 r = self.rev(node) # quick search the index
768 r = self.rev(node) # quick search the index
768 return node
769 return node
769 except RevlogError:
770 except LookupError:
770 pass # may be partial hex id
771 pass # may be partial hex id
771 try:
772 try:
772 # str(rev)
773 # str(rev)
773 rev = int(id)
774 rev = int(id)
774 if str(rev) != id: raise ValueError
775 if str(rev) != id: raise ValueError
775 if rev < 0: rev = self.count() + rev
776 if rev < 0: rev = self.count() + rev
776 if rev < 0 or rev >= self.count(): raise ValueError
777 if rev < 0 or rev >= self.count(): raise ValueError
777 return self.node(rev)
778 return self.node(rev)
778 except (ValueError, OverflowError):
779 except (ValueError, OverflowError):
779 pass
780 pass
780 if len(id) == 40:
781 if len(id) == 40:
781 try:
782 try:
782 # a full hex nodeid?
783 # a full hex nodeid?
783 node = bin(id)
784 node = bin(id)
784 r = self.rev(node)
785 r = self.rev(node)
785 return node
786 return node
786 except TypeError:
787 except TypeError:
787 pass
788 pass
788
789
789 def _partialmatch(self, id):
790 def _partialmatch(self, id):
790 if len(id) < 40:
791 if len(id) < 40:
791 try:
792 try:
792 # hex(node)[:...]
793 # hex(node)[:...]
793 bin_id = bin(id[:len(id) & ~1]) # grab an even number of digits
794 bin_id = bin(id[:len(id) & ~1]) # grab an even number of digits
794 node = None
795 node = None
795 for n in self.nodemap:
796 for n in self.nodemap:
796 if n.startswith(bin_id) and hex(n).startswith(id):
797 if n.startswith(bin_id) and hex(n).startswith(id):
797 if node is not None:
798 if node is not None:
798 raise RevlogError(_("Ambiguous identifier"))
799 raise LookupError(_("Ambiguous identifier"))
799 node = n
800 node = n
800 if node is not None:
801 if node is not None:
801 return node
802 return node
802 except TypeError:
803 except TypeError:
803 pass
804 pass
804
805
805 def lookup(self, id):
806 def lookup(self, id):
806 """locate a node based on:
807 """locate a node based on:
807 - revision number or str(revision number)
808 - revision number or str(revision number)
808 - nodeid or subset of hex nodeid
809 - nodeid or subset of hex nodeid
809 """
810 """
810
811
811 n = self._match(id)
812 n = self._match(id)
812 if n is not None:
813 if n is not None:
813 return n
814 return n
814 n = self._partialmatch(id)
815 n = self._partialmatch(id)
815 if n:
816 if n:
816 return n
817 return n
817
818
818 raise RevlogError(_("No match found"))
819 raise LookupError(_("No match found"))
819
820
820 def cmp(self, node, text):
821 def cmp(self, node, text):
821 """compare text with a given file revision"""
822 """compare text with a given file revision"""
822 p1, p2 = self.parents(node)
823 p1, p2 = self.parents(node)
823 return hash(text, p1, p2) != node
824 return hash(text, p1, p2) != node
824
825
825 def makenode(self, node, text):
826 def makenode(self, node, text):
826 """calculate a file nodeid for text, descended or possibly
827 """calculate a file nodeid for text, descended or possibly
827 unchanged from node"""
828 unchanged from node"""
828
829
829 if self.cmp(node, text):
830 if self.cmp(node, text):
830 return hash(text, node, nullid)
831 return hash(text, node, nullid)
831 return node
832 return node
832
833
833 def diff(self, a, b):
834 def diff(self, a, b):
834 """return a delta between two revisions"""
835 """return a delta between two revisions"""
835 return mdiff.textdiff(a, b)
836 return mdiff.textdiff(a, b)
836
837
837 def patches(self, t, pl):
838 def patches(self, t, pl):
838 """apply a list of patches to a string"""
839 """apply a list of patches to a string"""
839 return mdiff.patches(t, pl)
840 return mdiff.patches(t, pl)
840
841
841 def chunk(self, rev, df=None, cachelen=4096):
842 def chunk(self, rev, df=None, cachelen=4096):
842 start, length = self.start(rev), self.length(rev)
843 start, length = self.start(rev), self.length(rev)
843 inline = self.inlinedata()
844 inline = self.inlinedata()
844 if inline:
845 if inline:
845 start += (rev + 1) * struct.calcsize(self.indexformat)
846 start += (rev + 1) * struct.calcsize(self.indexformat)
846 end = start + length
847 end = start + length
847 def loadcache(df):
848 def loadcache(df):
848 cache_length = max(cachelen, length) # 4k
849 cache_length = max(cachelen, length) # 4k
849 if not df:
850 if not df:
850 if inline:
851 if inline:
851 df = self.opener(self.indexfile)
852 df = self.opener(self.indexfile)
852 else:
853 else:
853 df = self.opener(self.datafile)
854 df = self.opener(self.datafile)
854 df.seek(start)
855 df.seek(start)
855 self.chunkcache = (start, df.read(cache_length))
856 self.chunkcache = (start, df.read(cache_length))
856
857
857 if not self.chunkcache:
858 if not self.chunkcache:
858 loadcache(df)
859 loadcache(df)
859
860
860 cache_start = self.chunkcache[0]
861 cache_start = self.chunkcache[0]
861 cache_end = cache_start + len(self.chunkcache[1])
862 cache_end = cache_start + len(self.chunkcache[1])
862 if start >= cache_start and end <= cache_end:
863 if start >= cache_start and end <= cache_end:
863 # it is cached
864 # it is cached
864 offset = start - cache_start
865 offset = start - cache_start
865 else:
866 else:
866 loadcache(df)
867 loadcache(df)
867 offset = 0
868 offset = 0
868
869
869 #def checkchunk():
870 #def checkchunk():
870 # df = self.opener(self.datafile)
871 # df = self.opener(self.datafile)
871 # df.seek(start)
872 # df.seek(start)
872 # return df.read(length)
873 # return df.read(length)
873 #assert s == checkchunk()
874 #assert s == checkchunk()
874 return decompress(self.chunkcache[1][offset:offset + length])
875 return decompress(self.chunkcache[1][offset:offset + length])
875
876
876 def delta(self, node):
877 def delta(self, node):
877 """return or calculate a delta between a node and its predecessor"""
878 """return or calculate a delta between a node and its predecessor"""
878 r = self.rev(node)
879 r = self.rev(node)
879 return self.revdiff(r - 1, r)
880 return self.revdiff(r - 1, r)
880
881
881 def revdiff(self, rev1, rev2):
882 def revdiff(self, rev1, rev2):
882 """return or calculate a delta between two revisions"""
883 """return or calculate a delta between two revisions"""
883 b1 = self.base(rev1)
884 b1 = self.base(rev1)
884 b2 = self.base(rev2)
885 b2 = self.base(rev2)
885 if b1 == b2 and rev1 + 1 == rev2:
886 if b1 == b2 and rev1 + 1 == rev2:
886 return self.chunk(rev2)
887 return self.chunk(rev2)
887 else:
888 else:
888 return self.diff(self.revision(self.node(rev1)),
889 return self.diff(self.revision(self.node(rev1)),
889 self.revision(self.node(rev2)))
890 self.revision(self.node(rev2)))
890
891
891 def revision(self, node):
892 def revision(self, node):
892 """return an uncompressed revision of a given"""
893 """return an uncompressed revision of a given"""
893 if node == nullid: return ""
894 if node == nullid: return ""
894 if self.cache and self.cache[0] == node: return self.cache[2]
895 if self.cache and self.cache[0] == node: return self.cache[2]
895
896
896 # look up what we need to read
897 # look up what we need to read
897 text = None
898 text = None
898 rev = self.rev(node)
899 rev = self.rev(node)
899 base = self.base(rev)
900 base = self.base(rev)
900
901
901 if self.inlinedata():
902 if self.inlinedata():
902 # we probably have the whole chunk cached
903 # we probably have the whole chunk cached
903 df = None
904 df = None
904 else:
905 else:
905 df = self.opener(self.datafile)
906 df = self.opener(self.datafile)
906
907
907 # do we have useful data cached?
908 # do we have useful data cached?
908 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
909 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
909 base = self.cache[1]
910 base = self.cache[1]
910 text = self.cache[2]
911 text = self.cache[2]
911 self.loadindex(base, rev + 1)
912 self.loadindex(base, rev + 1)
912 else:
913 else:
913 self.loadindex(base, rev + 1)
914 self.loadindex(base, rev + 1)
914 text = self.chunk(base, df=df)
915 text = self.chunk(base, df=df)
915
916
916 bins = []
917 bins = []
917 for r in xrange(base + 1, rev + 1):
918 for r in xrange(base + 1, rev + 1):
918 bins.append(self.chunk(r, df=df))
919 bins.append(self.chunk(r, df=df))
919
920
920 text = self.patches(text, bins)
921 text = self.patches(text, bins)
921
922
922 p1, p2 = self.parents(node)
923 p1, p2 = self.parents(node)
923 if node != hash(text, p1, p2):
924 if node != hash(text, p1, p2):
924 raise RevlogError(_("integrity check failed on %s:%d")
925 raise RevlogError(_("integrity check failed on %s:%d")
925 % (self.datafile, rev))
926 % (self.datafile, rev))
926
927
927 self.cache = (node, rev, text)
928 self.cache = (node, rev, text)
928 return text
929 return text
929
930
930 def checkinlinesize(self, tr, fp=None):
931 def checkinlinesize(self, tr, fp=None):
931 if not self.inlinedata():
932 if not self.inlinedata():
932 return
933 return
933 if not fp:
934 if not fp:
934 fp = self.opener(self.indexfile, 'r')
935 fp = self.opener(self.indexfile, 'r')
935 fp.seek(0, 2)
936 fp.seek(0, 2)
936 size = fp.tell()
937 size = fp.tell()
937 if size < 131072:
938 if size < 131072:
938 return
939 return
939 trinfo = tr.find(self.indexfile)
940 trinfo = tr.find(self.indexfile)
940 if trinfo == None:
941 if trinfo == None:
941 raise RevlogError(_("%s not found in the transaction")
942 raise RevlogError(_("%s not found in the transaction")
942 % self.indexfile)
943 % self.indexfile)
943
944
944 trindex = trinfo[2]
945 trindex = trinfo[2]
945 dataoff = self.start(trindex)
946 dataoff = self.start(trindex)
946
947
947 tr.add(self.datafile, dataoff)
948 tr.add(self.datafile, dataoff)
948 df = self.opener(self.datafile, 'w')
949 df = self.opener(self.datafile, 'w')
949 calc = struct.calcsize(self.indexformat)
950 calc = struct.calcsize(self.indexformat)
950 for r in xrange(self.count()):
951 for r in xrange(self.count()):
951 start = self.start(r) + (r + 1) * calc
952 start = self.start(r) + (r + 1) * calc
952 length = self.length(r)
953 length = self.length(r)
953 fp.seek(start)
954 fp.seek(start)
954 d = fp.read(length)
955 d = fp.read(length)
955 df.write(d)
956 df.write(d)
956 fp.close()
957 fp.close()
957 df.close()
958 df.close()
958 fp = self.opener(self.indexfile, 'w', atomictemp=True)
959 fp = self.opener(self.indexfile, 'w', atomictemp=True)
959 self.version &= ~(REVLOGNGINLINEDATA)
960 self.version &= ~(REVLOGNGINLINEDATA)
960 if self.count():
961 if self.count():
961 x = self.index[0]
962 x = self.index[0]
962 e = struct.pack(self.indexformat, *x)[4:]
963 e = struct.pack(self.indexformat, *x)[4:]
963 l = struct.pack(versionformat, self.version)
964 l = struct.pack(versionformat, self.version)
964 fp.write(l)
965 fp.write(l)
965 fp.write(e)
966 fp.write(e)
966
967
967 for i in xrange(1, self.count()):
968 for i in xrange(1, self.count()):
968 x = self.index[i]
969 x = self.index[i]
969 e = struct.pack(self.indexformat, *x)
970 e = struct.pack(self.indexformat, *x)
970 fp.write(e)
971 fp.write(e)
971
972
972 # if we don't call rename, the temp file will never replace the
973 # if we don't call rename, the temp file will never replace the
973 # real index
974 # real index
974 fp.rename()
975 fp.rename()
975
976
976 tr.replace(self.indexfile, trindex * calc)
977 tr.replace(self.indexfile, trindex * calc)
977 self.chunkcache = None
978 self.chunkcache = None
978
979
979 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
980 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
980 """add a revision to the log
981 """add a revision to the log
981
982
982 text - the revision data to add
983 text - the revision data to add
983 transaction - the transaction object used for rollback
984 transaction - the transaction object used for rollback
984 link - the linkrev data to add
985 link - the linkrev data to add
985 p1, p2 - the parent nodeids of the revision
986 p1, p2 - the parent nodeids of the revision
986 d - an optional precomputed delta
987 d - an optional precomputed delta
987 """
988 """
988 if not self.inlinedata():
989 if not self.inlinedata():
989 dfh = self.opener(self.datafile, "a")
990 dfh = self.opener(self.datafile, "a")
990 else:
991 else:
991 dfh = None
992 dfh = None
992 ifh = self.opener(self.indexfile, "a+")
993 ifh = self.opener(self.indexfile, "a+")
993 return self._addrevision(text, transaction, link, p1, p2, d, ifh, dfh)
994 return self._addrevision(text, transaction, link, p1, p2, d, ifh, dfh)
994
995
995 def _addrevision(self, text, transaction, link, p1, p2, d, ifh, dfh):
996 def _addrevision(self, text, transaction, link, p1, p2, d, ifh, dfh):
996 if text is None: text = ""
997 if text is None: text = ""
997 if p1 is None: p1 = self.tip()
998 if p1 is None: p1 = self.tip()
998 if p2 is None: p2 = nullid
999 if p2 is None: p2 = nullid
999
1000
1000 node = hash(text, p1, p2)
1001 node = hash(text, p1, p2)
1001
1002
1002 if node in self.nodemap:
1003 if node in self.nodemap:
1003 return node
1004 return node
1004
1005
1005 n = self.count()
1006 n = self.count()
1006 t = n - 1
1007 t = n - 1
1007
1008
1008 if n:
1009 if n:
1009 base = self.base(t)
1010 base = self.base(t)
1010 start = self.start(base)
1011 start = self.start(base)
1011 end = self.end(t)
1012 end = self.end(t)
1012 if not d:
1013 if not d:
1013 prev = self.revision(self.tip())
1014 prev = self.revision(self.tip())
1014 d = self.diff(prev, text)
1015 d = self.diff(prev, text)
1015 data = compress(d)
1016 data = compress(d)
1016 l = len(data[1]) + len(data[0])
1017 l = len(data[1]) + len(data[0])
1017 dist = end - start + l
1018 dist = end - start + l
1018
1019
1019 # full versions are inserted when the needed deltas
1020 # full versions are inserted when the needed deltas
1020 # become comparable to the uncompressed text
1021 # become comparable to the uncompressed text
1021 if not n or dist > len(text) * 2:
1022 if not n or dist > len(text) * 2:
1022 data = compress(text)
1023 data = compress(text)
1023 l = len(data[1]) + len(data[0])
1024 l = len(data[1]) + len(data[0])
1024 base = n
1025 base = n
1025 else:
1026 else:
1026 base = self.base(t)
1027 base = self.base(t)
1027
1028
1028 offset = 0
1029 offset = 0
1029 if t >= 0:
1030 if t >= 0:
1030 offset = self.end(t)
1031 offset = self.end(t)
1031
1032
1032 if self.version == REVLOGV0:
1033 if self.version == REVLOGV0:
1033 e = (offset, l, base, link, p1, p2, node)
1034 e = (offset, l, base, link, p1, p2, node)
1034 else:
1035 else:
1035 e = (self.offset_type(offset, 0), l, len(text),
1036 e = (self.offset_type(offset, 0), l, len(text),
1036 base, link, self.rev(p1), self.rev(p2), node)
1037 base, link, self.rev(p1), self.rev(p2), node)
1037
1038
1038 self.index.append(e)
1039 self.index.append(e)
1039 self.nodemap[node] = n
1040 self.nodemap[node] = n
1040 entry = struct.pack(self.indexformat, *e)
1041 entry = struct.pack(self.indexformat, *e)
1041
1042
1042 if not self.inlinedata():
1043 if not self.inlinedata():
1043 transaction.add(self.datafile, offset)
1044 transaction.add(self.datafile, offset)
1044 transaction.add(self.indexfile, n * len(entry))
1045 transaction.add(self.indexfile, n * len(entry))
1045 if data[0]:
1046 if data[0]:
1046 dfh.write(data[0])
1047 dfh.write(data[0])
1047 dfh.write(data[1])
1048 dfh.write(data[1])
1048 dfh.flush()
1049 dfh.flush()
1049 else:
1050 else:
1050 ifh.seek(0, 2)
1051 ifh.seek(0, 2)
1051 transaction.add(self.indexfile, ifh.tell(), self.count() - 1)
1052 transaction.add(self.indexfile, ifh.tell(), self.count() - 1)
1052
1053
1053 if len(self.index) == 1 and self.version != REVLOGV0:
1054 if len(self.index) == 1 and self.version != REVLOGV0:
1054 l = struct.pack(versionformat, self.version)
1055 l = struct.pack(versionformat, self.version)
1055 ifh.write(l)
1056 ifh.write(l)
1056 entry = entry[4:]
1057 entry = entry[4:]
1057
1058
1058 ifh.write(entry)
1059 ifh.write(entry)
1059
1060
1060 if self.inlinedata():
1061 if self.inlinedata():
1061 ifh.write(data[0])
1062 ifh.write(data[0])
1062 ifh.write(data[1])
1063 ifh.write(data[1])
1063 self.checkinlinesize(transaction, ifh)
1064 self.checkinlinesize(transaction, ifh)
1064
1065
1065 self.cache = (node, n, text)
1066 self.cache = (node, n, text)
1066 return node
1067 return node
1067
1068
1068 def ancestor(self, a, b):
1069 def ancestor(self, a, b):
1069 """calculate the least common ancestor of nodes a and b"""
1070 """calculate the least common ancestor of nodes a and b"""
1070
1071
1071 def parents(rev):
1072 def parents(rev):
1072 return [p for p in self.parentrevs(rev) if p != nullrev]
1073 return [p for p in self.parentrevs(rev) if p != nullrev]
1073
1074
1074 c = ancestor.ancestor(self.rev(a), self.rev(b), parents)
1075 c = ancestor.ancestor(self.rev(a), self.rev(b), parents)
1075 if c is None:
1076 if c is None:
1076 return nullid
1077 return nullid
1077
1078
1078 return self.node(c)
1079 return self.node(c)
1079
1080
1080 def group(self, nodelist, lookup, infocollect=None):
1081 def group(self, nodelist, lookup, infocollect=None):
1081 """calculate a delta group
1082 """calculate a delta group
1082
1083
1083 Given a list of changeset revs, return a set of deltas and
1084 Given a list of changeset revs, return a set of deltas and
1084 metadata corresponding to nodes. the first delta is
1085 metadata corresponding to nodes. the first delta is
1085 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
1086 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
1086 have this parent as it has all history before these
1087 have this parent as it has all history before these
1087 changesets. parent is parent[0]
1088 changesets. parent is parent[0]
1088 """
1089 """
1089 revs = [self.rev(n) for n in nodelist]
1090 revs = [self.rev(n) for n in nodelist]
1090
1091
1091 # if we don't have any revisions touched by these changesets, bail
1092 # if we don't have any revisions touched by these changesets, bail
1092 if not revs:
1093 if not revs:
1093 yield changegroup.closechunk()
1094 yield changegroup.closechunk()
1094 return
1095 return
1095
1096
1096 # add the parent of the first rev
1097 # add the parent of the first rev
1097 p = self.parents(self.node(revs[0]))[0]
1098 p = self.parents(self.node(revs[0]))[0]
1098 revs.insert(0, self.rev(p))
1099 revs.insert(0, self.rev(p))
1099
1100
1100 # build deltas
1101 # build deltas
1101 for d in xrange(0, len(revs) - 1):
1102 for d in xrange(0, len(revs) - 1):
1102 a, b = revs[d], revs[d + 1]
1103 a, b = revs[d], revs[d + 1]
1103 nb = self.node(b)
1104 nb = self.node(b)
1104
1105
1105 if infocollect is not None:
1106 if infocollect is not None:
1106 infocollect(nb)
1107 infocollect(nb)
1107
1108
1108 d = self.revdiff(a, b)
1109 d = self.revdiff(a, b)
1109 p = self.parents(nb)
1110 p = self.parents(nb)
1110 meta = nb + p[0] + p[1] + lookup(nb)
1111 meta = nb + p[0] + p[1] + lookup(nb)
1111 yield changegroup.genchunk("%s%s" % (meta, d))
1112 yield changegroup.genchunk("%s%s" % (meta, d))
1112
1113
1113 yield changegroup.closechunk()
1114 yield changegroup.closechunk()
1114
1115
1115 def addgroup(self, revs, linkmapper, transaction, unique=0):
1116 def addgroup(self, revs, linkmapper, transaction, unique=0):
1116 """
1117 """
1117 add a delta group
1118 add a delta group
1118
1119
1119 given a set of deltas, add them to the revision log. the
1120 given a set of deltas, add them to the revision log. the
1120 first delta is against its parent, which should be in our
1121 first delta is against its parent, which should be in our
1121 log, the rest are against the previous delta.
1122 log, the rest are against the previous delta.
1122 """
1123 """
1123
1124
1124 #track the base of the current delta log
1125 #track the base of the current delta log
1125 r = self.count()
1126 r = self.count()
1126 t = r - 1
1127 t = r - 1
1127 node = None
1128 node = None
1128
1129
1129 base = prev = nullrev
1130 base = prev = nullrev
1130 start = end = textlen = 0
1131 start = end = textlen = 0
1131 if r:
1132 if r:
1132 end = self.end(t)
1133 end = self.end(t)
1133
1134
1134 ifh = self.opener(self.indexfile, "a+")
1135 ifh = self.opener(self.indexfile, "a+")
1135 ifh.seek(0, 2)
1136 ifh.seek(0, 2)
1136 transaction.add(self.indexfile, ifh.tell(), self.count())
1137 transaction.add(self.indexfile, ifh.tell(), self.count())
1137 if self.inlinedata():
1138 if self.inlinedata():
1138 dfh = None
1139 dfh = None
1139 else:
1140 else:
1140 transaction.add(self.datafile, end)
1141 transaction.add(self.datafile, end)
1141 dfh = self.opener(self.datafile, "a")
1142 dfh = self.opener(self.datafile, "a")
1142
1143
1143 # loop through our set of deltas
1144 # loop through our set of deltas
1144 chain = None
1145 chain = None
1145 for chunk in revs:
1146 for chunk in revs:
1146 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
1147 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
1147 link = linkmapper(cs)
1148 link = linkmapper(cs)
1148 if node in self.nodemap:
1149 if node in self.nodemap:
1149 # this can happen if two branches make the same change
1150 # this can happen if two branches make the same change
1150 # if unique:
1151 # if unique:
1151 # raise RevlogError(_("already have %s") % hex(node[:4]))
1152 # raise RevlogError(_("already have %s") % hex(node[:4]))
1152 chain = node
1153 chain = node
1153 continue
1154 continue
1154 delta = chunk[80:]
1155 delta = chunk[80:]
1155
1156
1156 for p in (p1, p2):
1157 for p in (p1, p2):
1157 if not p in self.nodemap:
1158 if not p in self.nodemap:
1158 raise RevlogError(_("unknown parent %s") % short(p))
1159 raise LookupError(_("unknown parent %s") % short(p))
1159
1160
1160 if not chain:
1161 if not chain:
1161 # retrieve the parent revision of the delta chain
1162 # retrieve the parent revision of the delta chain
1162 chain = p1
1163 chain = p1
1163 if not chain in self.nodemap:
1164 if not chain in self.nodemap:
1164 raise RevlogError(_("unknown base %s") % short(chain[:4]))
1165 raise LookupError(_("unknown base %s") % short(chain[:4]))
1165
1166
1166 # full versions are inserted when the needed deltas become
1167 # full versions are inserted when the needed deltas become
1167 # comparable to the uncompressed text or when the previous
1168 # comparable to the uncompressed text or when the previous
1168 # version is not the one we have a delta against. We use
1169 # version is not the one we have a delta against. We use
1169 # the size of the previous full rev as a proxy for the
1170 # the size of the previous full rev as a proxy for the
1170 # current size.
1171 # current size.
1171
1172
1172 if chain == prev:
1173 if chain == prev:
1173 tempd = compress(delta)
1174 tempd = compress(delta)
1174 cdelta = tempd[0] + tempd[1]
1175 cdelta = tempd[0] + tempd[1]
1175 textlen = mdiff.patchedsize(textlen, delta)
1176 textlen = mdiff.patchedsize(textlen, delta)
1176
1177
1177 if chain != prev or (end - start + len(cdelta)) > textlen * 2:
1178 if chain != prev or (end - start + len(cdelta)) > textlen * 2:
1178 # flush our writes here so we can read it in revision
1179 # flush our writes here so we can read it in revision
1179 if dfh:
1180 if dfh:
1180 dfh.flush()
1181 dfh.flush()
1181 ifh.flush()
1182 ifh.flush()
1182 text = self.revision(chain)
1183 text = self.revision(chain)
1183 text = self.patches(text, [delta])
1184 text = self.patches(text, [delta])
1184 chk = self._addrevision(text, transaction, link, p1, p2, None,
1185 chk = self._addrevision(text, transaction, link, p1, p2, None,
1185 ifh, dfh)
1186 ifh, dfh)
1186 if not dfh and not self.inlinedata():
1187 if not dfh and not self.inlinedata():
1187 # addrevision switched from inline to conventional
1188 # addrevision switched from inline to conventional
1188 # reopen the index
1189 # reopen the index
1189 dfh = self.opener(self.datafile, "a")
1190 dfh = self.opener(self.datafile, "a")
1190 ifh = self.opener(self.indexfile, "a")
1191 ifh = self.opener(self.indexfile, "a")
1191 if chk != node:
1192 if chk != node:
1192 raise RevlogError(_("consistency error adding group"))
1193 raise RevlogError(_("consistency error adding group"))
1193 textlen = len(text)
1194 textlen = len(text)
1194 else:
1195 else:
1195 if self.version == REVLOGV0:
1196 if self.version == REVLOGV0:
1196 e = (end, len(cdelta), base, link, p1, p2, node)
1197 e = (end, len(cdelta), base, link, p1, p2, node)
1197 else:
1198 else:
1198 e = (self.offset_type(end, 0), len(cdelta), textlen, base,
1199 e = (self.offset_type(end, 0), len(cdelta), textlen, base,
1199 link, self.rev(p1), self.rev(p2), node)
1200 link, self.rev(p1), self.rev(p2), node)
1200 self.index.append(e)
1201 self.index.append(e)
1201 self.nodemap[node] = r
1202 self.nodemap[node] = r
1202 if self.inlinedata():
1203 if self.inlinedata():
1203 ifh.write(struct.pack(self.indexformat, *e))
1204 ifh.write(struct.pack(self.indexformat, *e))
1204 ifh.write(cdelta)
1205 ifh.write(cdelta)
1205 self.checkinlinesize(transaction, ifh)
1206 self.checkinlinesize(transaction, ifh)
1206 if not self.inlinedata():
1207 if not self.inlinedata():
1207 dfh = self.opener(self.datafile, "a")
1208 dfh = self.opener(self.datafile, "a")
1208 ifh = self.opener(self.indexfile, "a")
1209 ifh = self.opener(self.indexfile, "a")
1209 else:
1210 else:
1210 dfh.write(cdelta)
1211 dfh.write(cdelta)
1211 ifh.write(struct.pack(self.indexformat, *e))
1212 ifh.write(struct.pack(self.indexformat, *e))
1212
1213
1213 t, r, chain, prev = r, r + 1, node, node
1214 t, r, chain, prev = r, r + 1, node, node
1214 base = self.base(t)
1215 base = self.base(t)
1215 start = self.start(base)
1216 start = self.start(base)
1216 end = self.end(t)
1217 end = self.end(t)
1217
1218
1218 return node
1219 return node
1219
1220
1220 def strip(self, rev, minlink):
1221 def strip(self, rev, minlink):
1221 if self.count() == 0 or rev >= self.count():
1222 if self.count() == 0 or rev >= self.count():
1222 return
1223 return
1223
1224
1224 if isinstance(self.index, lazyindex):
1225 if isinstance(self.index, lazyindex):
1225 self.loadindexmap()
1226 self.loadindexmap()
1226
1227
1227 # When stripping away a revision, we need to make sure it
1228 # When stripping away a revision, we need to make sure it
1228 # does not actually belong to an older changeset.
1229 # does not actually belong to an older changeset.
1229 # The minlink parameter defines the oldest revision
1230 # The minlink parameter defines the oldest revision
1230 # we're allowed to strip away.
1231 # we're allowed to strip away.
1231 while minlink > self.index[rev][-4]:
1232 while minlink > self.index[rev][-4]:
1232 rev += 1
1233 rev += 1
1233 if rev >= self.count():
1234 if rev >= self.count():
1234 return
1235 return
1235
1236
1236 # first truncate the files on disk
1237 # first truncate the files on disk
1237 end = self.start(rev)
1238 end = self.start(rev)
1238 if not self.inlinedata():
1239 if not self.inlinedata():
1239 df = self.opener(self.datafile, "a")
1240 df = self.opener(self.datafile, "a")
1240 df.truncate(end)
1241 df.truncate(end)
1241 end = rev * struct.calcsize(self.indexformat)
1242 end = rev * struct.calcsize(self.indexformat)
1242 else:
1243 else:
1243 end += rev * struct.calcsize(self.indexformat)
1244 end += rev * struct.calcsize(self.indexformat)
1244
1245
1245 indexf = self.opener(self.indexfile, "a")
1246 indexf = self.opener(self.indexfile, "a")
1246 indexf.truncate(end)
1247 indexf.truncate(end)
1247
1248
1248 # then reset internal state in memory to forget those revisions
1249 # then reset internal state in memory to forget those revisions
1249 self.cache = None
1250 self.cache = None
1250 self.chunkcache = None
1251 self.chunkcache = None
1251 for x in xrange(rev, self.count()):
1252 for x in xrange(rev, self.count()):
1252 del self.nodemap[self.node(x)]
1253 del self.nodemap[self.node(x)]
1253
1254
1254 del self.index[rev:]
1255 del self.index[rev:]
1255
1256
1256 def checksize(self):
1257 def checksize(self):
1257 expected = 0
1258 expected = 0
1258 if self.count():
1259 if self.count():
1259 expected = self.end(self.count() - 1)
1260 expected = self.end(self.count() - 1)
1260
1261
1261 try:
1262 try:
1262 f = self.opener(self.datafile)
1263 f = self.opener(self.datafile)
1263 f.seek(0, 2)
1264 f.seek(0, 2)
1264 actual = f.tell()
1265 actual = f.tell()
1265 dd = actual - expected
1266 dd = actual - expected
1266 except IOError, inst:
1267 except IOError, inst:
1267 if inst.errno != errno.ENOENT:
1268 if inst.errno != errno.ENOENT:
1268 raise
1269 raise
1269 dd = 0
1270 dd = 0
1270
1271
1271 try:
1272 try:
1272 f = self.opener(self.indexfile)
1273 f = self.opener(self.indexfile)
1273 f.seek(0, 2)
1274 f.seek(0, 2)
1274 actual = f.tell()
1275 actual = f.tell()
1275 s = struct.calcsize(self.indexformat)
1276 s = struct.calcsize(self.indexformat)
1276 i = actual / s
1277 i = actual / s
1277 di = actual - (i * s)
1278 di = actual - (i * s)
1278 if self.inlinedata():
1279 if self.inlinedata():
1279 databytes = 0
1280 databytes = 0
1280 for r in xrange(self.count()):
1281 for r in xrange(self.count()):
1281 databytes += self.length(r)
1282 databytes += self.length(r)
1282 dd = 0
1283 dd = 0
1283 di = actual - self.count() * s - databytes
1284 di = actual - self.count() * s - databytes
1284 except IOError, inst:
1285 except IOError, inst:
1285 if inst.errno != errno.ENOENT:
1286 if inst.errno != errno.ENOENT:
1286 raise
1287 raise
1287 di = 0
1288 di = 0
1288
1289
1289 return (dd, di)
1290 return (dd, di)
1290
1291
1291
1292
General Comments 0
You need to be logged in to leave comments. Login now