##// END OF EJS Templates
Add revlog.LookupError exception, and use it instead of RevlogError....
Brendan Cully -
r3930:01d98d68 default
parent child Browse files
Show More
@@ -1,255 +1,255 b''
1 1 """
2 2 bundlerepo.py - repository class for viewing uncompressed bundles
3 3
4 4 This provides a read-only repository interface to bundles as if
5 5 they were part of the actual repository.
6 6
7 7 Copyright 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
8 8
9 9 This software may be used and distributed according to the terms
10 10 of the GNU General Public License, incorporated herein by reference.
11 11 """
12 12
13 13 from node import *
14 14 from i18n import _
15 15 import changegroup, util, os, struct, bz2, tempfile
16 16
17 17 import localrepo, changelog, manifest, filelog, revlog
18 18
19 19 class bundlerevlog(revlog.revlog):
20 20 def __init__(self, opener, indexfile, datafile, bundlefile,
21 21 linkmapper=None):
22 22 # How it works:
23 23 # to retrieve a revision, we need to know the offset of
24 24 # the revision in the bundlefile (an opened file).
25 25 #
26 26 # We store this offset in the index (start), to differentiate a
27 27 # rev in the bundle and from a rev in the revlog, we check
28 28 # len(index[r]). If the tuple is bigger than 7, it is a bundle
29 29 # (it is bigger since we store the node to which the delta is)
30 30 #
31 31 revlog.revlog.__init__(self, opener, indexfile, datafile)
32 32 self.bundlefile = bundlefile
33 33 self.basemap = {}
34 34 def chunkpositer():
35 35 for chunk in changegroup.chunkiter(bundlefile):
36 36 pos = bundlefile.tell()
37 37 yield chunk, pos - len(chunk)
38 38 n = self.count()
39 39 prev = None
40 40 for chunk, start in chunkpositer():
41 41 size = len(chunk)
42 42 if size < 80:
43 43 raise util.Abort("invalid changegroup")
44 44 start += 80
45 45 size -= 80
46 46 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
47 47 if node in self.nodemap:
48 48 prev = node
49 49 continue
50 50 for p in (p1, p2):
51 51 if not p in self.nodemap:
52 raise revlog.RevlogError(_("unknown parent %s") % short(p1))
52 raise revlog.LookupError(_("unknown parent %s") % short(p1))
53 53 if linkmapper is None:
54 54 link = n
55 55 else:
56 56 link = linkmapper(cs)
57 57
58 58 if not prev:
59 59 prev = p1
60 60 # start, size, base is not used, link, p1, p2, delta ref
61 61 if self.version == revlog.REVLOGV0:
62 62 e = (start, size, None, link, p1, p2, node)
63 63 else:
64 64 e = (self.offset_type(start, 0), size, -1, None, link,
65 65 self.rev(p1), self.rev(p2), node)
66 66 self.basemap[n] = prev
67 67 self.index.append(e)
68 68 self.nodemap[node] = n
69 69 prev = node
70 70 n += 1
71 71
72 72 def bundle(self, rev):
73 73 """is rev from the bundle"""
74 74 if rev < 0:
75 75 return False
76 76 return rev in self.basemap
77 77 def bundlebase(self, rev): return self.basemap[rev]
78 78 def chunk(self, rev, df=None, cachelen=4096):
79 79 # Warning: in case of bundle, the diff is against bundlebase,
80 80 # not against rev - 1
81 81 # XXX: could use some caching
82 82 if not self.bundle(rev):
83 83 return revlog.revlog.chunk(self, rev, df, cachelen)
84 84 self.bundlefile.seek(self.start(rev))
85 85 return self.bundlefile.read(self.length(rev))
86 86
87 87 def revdiff(self, rev1, rev2):
88 88 """return or calculate a delta between two revisions"""
89 89 if self.bundle(rev1) and self.bundle(rev2):
90 90 # hot path for bundle
91 91 revb = self.rev(self.bundlebase(rev2))
92 92 if revb == rev1:
93 93 return self.chunk(rev2)
94 94 elif not self.bundle(rev1) and not self.bundle(rev2):
95 95 return revlog.revlog.chunk(self, rev1, rev2)
96 96
97 97 return self.diff(self.revision(self.node(rev1)),
98 98 self.revision(self.node(rev2)))
99 99
100 100 def revision(self, node):
101 101 """return an uncompressed revision of a given"""
102 102 if node == nullid: return ""
103 103
104 104 text = None
105 105 chain = []
106 106 iter_node = node
107 107 rev = self.rev(iter_node)
108 108 # reconstruct the revision if it is from a changegroup
109 109 while self.bundle(rev):
110 110 if self.cache and self.cache[0] == iter_node:
111 111 text = self.cache[2]
112 112 break
113 113 chain.append(rev)
114 114 iter_node = self.bundlebase(rev)
115 115 rev = self.rev(iter_node)
116 116 if text is None:
117 117 text = revlog.revlog.revision(self, iter_node)
118 118
119 119 while chain:
120 120 delta = self.chunk(chain.pop())
121 121 text = self.patches(text, [delta])
122 122
123 123 p1, p2 = self.parents(node)
124 124 if node != revlog.hash(text, p1, p2):
125 125 raise revlog.RevlogError(_("integrity check failed on %s:%d")
126 126 % (self.datafile, self.rev(node)))
127 127
128 128 self.cache = (node, self.rev(node), text)
129 129 return text
130 130
131 131 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
132 132 raise NotImplementedError
133 133 def addgroup(self, revs, linkmapper, transaction, unique=0):
134 134 raise NotImplementedError
135 135 def strip(self, rev, minlink):
136 136 raise NotImplementedError
137 137 def checksize(self):
138 138 raise NotImplementedError
139 139
140 140 class bundlechangelog(bundlerevlog, changelog.changelog):
141 141 def __init__(self, opener, bundlefile):
142 142 changelog.changelog.__init__(self, opener)
143 143 bundlerevlog.__init__(self, opener, self.indexfile, self.datafile,
144 144 bundlefile)
145 145
146 146 class bundlemanifest(bundlerevlog, manifest.manifest):
147 147 def __init__(self, opener, bundlefile, linkmapper):
148 148 manifest.manifest.__init__(self, opener)
149 149 bundlerevlog.__init__(self, opener, self.indexfile, self.datafile,
150 150 bundlefile, linkmapper)
151 151
152 152 class bundlefilelog(bundlerevlog, filelog.filelog):
153 153 def __init__(self, opener, path, bundlefile, linkmapper):
154 154 filelog.filelog.__init__(self, opener, path)
155 155 bundlerevlog.__init__(self, opener, self.indexfile, self.datafile,
156 156 bundlefile, linkmapper)
157 157
158 158 class bundlerepository(localrepo.localrepository):
159 159 def __init__(self, ui, path, bundlename):
160 160 localrepo.localrepository.__init__(self, ui, path)
161 161
162 162 self._url = 'bundle:' + bundlename
163 163 if path: self._url += '+' + path
164 164
165 165 self.tempfile = None
166 166 self.bundlefile = open(bundlename, "rb")
167 167 header = self.bundlefile.read(6)
168 168 if not header.startswith("HG"):
169 169 raise util.Abort(_("%s: not a Mercurial bundle file") % bundlename)
170 170 elif not header.startswith("HG10"):
171 171 raise util.Abort(_("%s: unknown bundle version") % bundlename)
172 172 elif header == "HG10BZ":
173 173 fdtemp, temp = tempfile.mkstemp(prefix="hg-bundle-",
174 174 suffix=".hg10un", dir=self.path)
175 175 self.tempfile = temp
176 176 fptemp = os.fdopen(fdtemp, 'wb')
177 177 def generator(f):
178 178 zd = bz2.BZ2Decompressor()
179 179 zd.decompress("BZ")
180 180 for chunk in f:
181 181 yield zd.decompress(chunk)
182 182 gen = generator(util.filechunkiter(self.bundlefile, 4096))
183 183
184 184 try:
185 185 fptemp.write("HG10UN")
186 186 for chunk in gen:
187 187 fptemp.write(chunk)
188 188 finally:
189 189 fptemp.close()
190 190 self.bundlefile.close()
191 191
192 192 self.bundlefile = open(self.tempfile, "rb")
193 193 # seek right after the header
194 194 self.bundlefile.seek(6)
195 195 elif header == "HG10UN":
196 196 # nothing to do
197 197 pass
198 198 else:
199 199 raise util.Abort(_("%s: unknown bundle compression type")
200 200 % bundlename)
201 201 self.changelog = bundlechangelog(self.sopener, self.bundlefile)
202 202 self.manifest = bundlemanifest(self.sopener, self.bundlefile,
203 203 self.changelog.rev)
204 204 # dict with the mapping 'filename' -> position in the bundle
205 205 self.bundlefilespos = {}
206 206 while 1:
207 207 f = changegroup.getchunk(self.bundlefile)
208 208 if not f:
209 209 break
210 210 self.bundlefilespos[f] = self.bundlefile.tell()
211 211 for c in changegroup.chunkiter(self.bundlefile):
212 212 pass
213 213
214 214 def url(self):
215 215 return self._url
216 216
217 217 def dev(self):
218 218 return -1
219 219
220 220 def file(self, f):
221 221 if f[0] == '/':
222 222 f = f[1:]
223 223 if f in self.bundlefilespos:
224 224 self.bundlefile.seek(self.bundlefilespos[f])
225 225 return bundlefilelog(self.sopener, f, self.bundlefile,
226 226 self.changelog.rev)
227 227 else:
228 228 return filelog.filelog(self.sopener, f)
229 229
230 230 def close(self):
231 231 """Close assigned bundle file immediately."""
232 232 self.bundlefile.close()
233 233
234 234 def __del__(self):
235 235 bundlefile = getattr(self, 'bundlefile', None)
236 236 if bundlefile and not bundlefile.closed:
237 237 bundlefile.close()
238 238 tempfile = getattr(self, 'tempfile', None)
239 239 if tempfile is not None:
240 240 os.unlink(tempfile)
241 241
242 242 def instance(ui, path, create):
243 243 if create:
244 244 raise util.Abort(_('cannot create new bundle repository'))
245 245 path = util.drop_scheme('file', path)
246 246 if path.startswith('bundle:'):
247 247 path = util.drop_scheme('bundle', path)
248 248 s = path.split("+", 1)
249 249 if len(s) == 1:
250 250 repopath, bundlename = "", s[0]
251 251 else:
252 252 repopath, bundlename = s
253 253 else:
254 254 repopath, bundlename = '', path
255 255 return bundlerepository(ui, repopath, bundlename)
@@ -1,3284 +1,3285 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import demandimport; demandimport.enable()
9 9 from node import *
10 10 from i18n import _
11 11 import bisect, os, re, sys, signal, imp, urllib, pdb, shlex, stat
12 12 import fancyopts, ui, hg, util, lock, revlog, bundlerepo
13 13 import difflib, patch, time, help, mdiff, tempfile
14 14 import traceback, errno, version, atexit
15 15 import archival, changegroup, cmdutil, hgweb.server, sshserver
16 16
17 17 class UnknownCommand(Exception):
18 18 """Exception raised if command is not in the command table."""
19 19 class AmbiguousCommand(Exception):
20 20 """Exception raised if command shortcut matches more than one command."""
21 21
22 22 def bail_if_changed(repo):
23 23 modified, added, removed, deleted = repo.status()[:4]
24 24 if modified or added or removed or deleted:
25 25 raise util.Abort(_("outstanding uncommitted changes"))
26 26
27 27 def logmessage(opts):
28 28 """ get the log message according to -m and -l option """
29 29 message = opts['message']
30 30 logfile = opts['logfile']
31 31
32 32 if message and logfile:
33 33 raise util.Abort(_('options --message and --logfile are mutually '
34 34 'exclusive'))
35 35 if not message and logfile:
36 36 try:
37 37 if logfile == '-':
38 38 message = sys.stdin.read()
39 39 else:
40 40 message = open(logfile).read()
41 41 except IOError, inst:
42 42 raise util.Abort(_("can't read commit message '%s': %s") %
43 43 (logfile, inst.strerror))
44 44 return message
45 45
46 46 def setremoteconfig(ui, opts):
47 47 "copy remote options to ui tree"
48 48 if opts.get('ssh'):
49 49 ui.setconfig("ui", "ssh", opts['ssh'])
50 50 if opts.get('remotecmd'):
51 51 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
52 52
53 53 # Commands start here, listed alphabetically
54 54
55 55 def add(ui, repo, *pats, **opts):
56 56 """add the specified files on the next commit
57 57
58 58 Schedule files to be version controlled and added to the repository.
59 59
60 60 The files will be added to the repository at the next commit. To
61 61 undo an add before that, see hg revert.
62 62
63 63 If no names are given, add all files in the repository.
64 64 """
65 65
66 66 names = []
67 67 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
68 68 if exact:
69 69 if ui.verbose:
70 70 ui.status(_('adding %s\n') % rel)
71 71 names.append(abs)
72 72 elif repo.dirstate.state(abs) == '?':
73 73 ui.status(_('adding %s\n') % rel)
74 74 names.append(abs)
75 75 if not opts.get('dry_run'):
76 76 repo.add(names)
77 77
78 78 def addremove(ui, repo, *pats, **opts):
79 79 """add all new files, delete all missing files
80 80
81 81 Add all new files and remove all missing files from the repository.
82 82
83 83 New files are ignored if they match any of the patterns in .hgignore. As
84 84 with add, these changes take effect at the next commit.
85 85
86 86 Use the -s option to detect renamed files. With a parameter > 0,
87 87 this compares every removed file with every added file and records
88 88 those similar enough as renames. This option takes a percentage
89 89 between 0 (disabled) and 100 (files must be identical) as its
90 90 parameter. Detecting renamed files this way can be expensive.
91 91 """
92 92 sim = float(opts.get('similarity') or 0)
93 93 if sim < 0 or sim > 100:
94 94 raise util.Abort(_('similarity must be between 0 and 100'))
95 95 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
96 96
97 97 def annotate(ui, repo, *pats, **opts):
98 98 """show changeset information per file line
99 99
100 100 List changes in files, showing the revision id responsible for each line
101 101
102 102 This command is useful to discover who did a change or when a change took
103 103 place.
104 104
105 105 Without the -a option, annotate will avoid processing files it
106 106 detects as binary. With -a, annotate will generate an annotation
107 107 anyway, probably with undesirable results.
108 108 """
109 109 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
110 110
111 111 if not pats:
112 112 raise util.Abort(_('at least one file name or pattern required'))
113 113
114 114 opmap = [['user', lambda x: ui.shortuser(x.user())],
115 115 ['number', lambda x: str(x.rev())],
116 116 ['changeset', lambda x: short(x.node())],
117 117 ['date', getdate], ['follow', lambda x: x.path()]]
118 118 if (not opts['user'] and not opts['changeset'] and not opts['date']
119 119 and not opts['follow']):
120 120 opts['number'] = 1
121 121
122 122 ctx = repo.changectx(opts['rev'])
123 123
124 124 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
125 125 node=ctx.node()):
126 126 fctx = ctx.filectx(abs)
127 127 if not opts['text'] and util.binary(fctx.data()):
128 128 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
129 129 continue
130 130
131 131 lines = fctx.annotate(follow=opts.get('follow'))
132 132 pieces = []
133 133
134 134 for o, f in opmap:
135 135 if opts[o]:
136 136 l = [f(n) for n, dummy in lines]
137 137 if l:
138 138 m = max(map(len, l))
139 139 pieces.append(["%*s" % (m, x) for x in l])
140 140
141 141 if pieces:
142 142 for p, l in zip(zip(*pieces), lines):
143 143 ui.write("%s: %s" % (" ".join(p), l[1]))
144 144
145 145 def archive(ui, repo, dest, **opts):
146 146 '''create unversioned archive of a repository revision
147 147
148 148 By default, the revision used is the parent of the working
149 149 directory; use "-r" to specify a different revision.
150 150
151 151 To specify the type of archive to create, use "-t". Valid
152 152 types are:
153 153
154 154 "files" (default): a directory full of files
155 155 "tar": tar archive, uncompressed
156 156 "tbz2": tar archive, compressed using bzip2
157 157 "tgz": tar archive, compressed using gzip
158 158 "uzip": zip archive, uncompressed
159 159 "zip": zip archive, compressed using deflate
160 160
161 161 The exact name of the destination archive or directory is given
162 162 using a format string; see "hg help export" for details.
163 163
164 164 Each member added to an archive file has a directory prefix
165 165 prepended. Use "-p" to specify a format string for the prefix.
166 166 The default is the basename of the archive, with suffixes removed.
167 167 '''
168 168
169 169 node = repo.changectx(opts['rev']).node()
170 170 dest = cmdutil.make_filename(repo, dest, node)
171 171 if os.path.realpath(dest) == repo.root:
172 172 raise util.Abort(_('repository root cannot be destination'))
173 173 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
174 174 kind = opts.get('type') or 'files'
175 175 prefix = opts['prefix']
176 176 if dest == '-':
177 177 if kind == 'files':
178 178 raise util.Abort(_('cannot archive plain files to stdout'))
179 179 dest = sys.stdout
180 180 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
181 181 prefix = cmdutil.make_filename(repo, prefix, node)
182 182 archival.archive(repo, dest, node, kind, not opts['no_decode'],
183 183 matchfn, prefix)
184 184
185 185 def backout(ui, repo, rev, **opts):
186 186 '''reverse effect of earlier changeset
187 187
188 188 Commit the backed out changes as a new changeset. The new
189 189 changeset is a child of the backed out changeset.
190 190
191 191 If you back out a changeset other than the tip, a new head is
192 192 created. This head is the parent of the working directory. If
193 193 you back out an old changeset, your working directory will appear
194 194 old after the backout. You should merge the backout changeset
195 195 with another head.
196 196
197 197 The --merge option remembers the parent of the working directory
198 198 before starting the backout, then merges the new head with that
199 199 changeset afterwards. This saves you from doing the merge by
200 200 hand. The result of this merge is not committed, as for a normal
201 201 merge.'''
202 202
203 203 bail_if_changed(repo)
204 204 op1, op2 = repo.dirstate.parents()
205 205 if op2 != nullid:
206 206 raise util.Abort(_('outstanding uncommitted merge'))
207 207 node = repo.lookup(rev)
208 208 p1, p2 = repo.changelog.parents(node)
209 209 if p1 == nullid:
210 210 raise util.Abort(_('cannot back out a change with no parents'))
211 211 if p2 != nullid:
212 212 if not opts['parent']:
213 213 raise util.Abort(_('cannot back out a merge changeset without '
214 214 '--parent'))
215 215 p = repo.lookup(opts['parent'])
216 216 if p not in (p1, p2):
217 217 raise util.Abort(_('%s is not a parent of %s') %
218 218 (short(p), short(node)))
219 219 parent = p
220 220 else:
221 221 if opts['parent']:
222 222 raise util.Abort(_('cannot use --parent on non-merge changeset'))
223 223 parent = p1
224 224 hg.clean(repo, node, show_stats=False)
225 225 revert_opts = opts.copy()
226 226 revert_opts['date'] = None
227 227 revert_opts['all'] = True
228 228 revert_opts['rev'] = hex(parent)
229 229 revert(ui, repo, **revert_opts)
230 230 commit_opts = opts.copy()
231 231 commit_opts['addremove'] = False
232 232 if not commit_opts['message'] and not commit_opts['logfile']:
233 233 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
234 234 commit_opts['force_editor'] = True
235 235 commit(ui, repo, **commit_opts)
236 236 def nice(node):
237 237 return '%d:%s' % (repo.changelog.rev(node), short(node))
238 238 ui.status(_('changeset %s backs out changeset %s\n') %
239 239 (nice(repo.changelog.tip()), nice(node)))
240 240 if op1 != node:
241 241 if opts['merge']:
242 242 ui.status(_('merging with changeset %s\n') % nice(op1))
243 243 hg.merge(repo, hex(op1))
244 244 else:
245 245 ui.status(_('the backout changeset is a new head - '
246 246 'do not forget to merge\n'))
247 247 ui.status(_('(use "backout --merge" '
248 248 'if you want to auto-merge)\n'))
249 249
250 250 def branch(ui, repo, label=None):
251 251 """set or show the current branch name
252 252
253 253 With <name>, set the current branch name. Otherwise, show the
254 254 current branch name.
255 255 """
256 256
257 257 if label is not None:
258 258 repo.opener("branch", "w").write(util.fromlocal(label) + '\n')
259 259 else:
260 260 b = util.tolocal(repo.workingctx().branch())
261 261 if b:
262 262 ui.write("%s\n" % b)
263 263
264 264 def branches(ui, repo):
265 265 """list repository named branches
266 266
267 267 List the repository's named branches.
268 268 """
269 269 b = repo.branchtags()
270 270 l = [(-repo.changelog.rev(n), n, t) for t, n in b.items()]
271 271 l.sort()
272 272 for r, n, t in l:
273 273 hexfunc = ui.debugflag and hex or short
274 274 if ui.quiet:
275 275 ui.write("%s\n" % t)
276 276 else:
277 277 t = util.localsub(t, 30)
278 278 t += " " * (30 - util.locallen(t))
279 279 ui.write("%s %s:%s\n" % (t, -r, hexfunc(n)))
280 280
281 281 def bundle(ui, repo, fname, dest=None, **opts):
282 282 """create a changegroup file
283 283
284 284 Generate a compressed changegroup file collecting changesets not
285 285 found in the other repository.
286 286
287 287 If no destination repository is specified the destination is assumed
288 288 to have all the nodes specified by one or more --base parameters.
289 289
290 290 The bundle file can then be transferred using conventional means and
291 291 applied to another repository with the unbundle or pull command.
292 292 This is useful when direct push and pull are not available or when
293 293 exporting an entire repository is undesirable.
294 294
295 295 Applying bundles preserves all changeset contents including
296 296 permissions, copy/rename information, and revision history.
297 297 """
298 298 revs = opts.get('rev') or None
299 299 if revs:
300 300 revs = [repo.lookup(rev) for rev in revs]
301 301 base = opts.get('base')
302 302 if base:
303 303 if dest:
304 304 raise util.Abort(_("--base is incompatible with specifiying "
305 305 "a destination"))
306 306 base = [repo.lookup(rev) for rev in base]
307 307 # create the right base
308 308 # XXX: nodesbetween / changegroup* should be "fixed" instead
309 309 o = []
310 310 has = {nullid: None}
311 311 for n in base:
312 312 has.update(repo.changelog.reachable(n))
313 313 if revs:
314 314 visit = list(revs)
315 315 else:
316 316 visit = repo.changelog.heads()
317 317 seen = {}
318 318 while visit:
319 319 n = visit.pop(0)
320 320 parents = [p for p in repo.changelog.parents(n) if p not in has]
321 321 if len(parents) == 0:
322 322 o.insert(0, n)
323 323 else:
324 324 for p in parents:
325 325 if p not in seen:
326 326 seen[p] = 1
327 327 visit.append(p)
328 328 else:
329 329 setremoteconfig(ui, opts)
330 330 dest = ui.expandpath(dest or 'default-push', dest or 'default')
331 331 other = hg.repository(ui, dest)
332 332 o = repo.findoutgoing(other, force=opts['force'])
333 333
334 334 if revs:
335 335 cg = repo.changegroupsubset(o, revs, 'bundle')
336 336 else:
337 337 cg = repo.changegroup(o, 'bundle')
338 338 changegroup.writebundle(cg, fname, "HG10BZ")
339 339
340 340 def cat(ui, repo, file1, *pats, **opts):
341 341 """output the current or given revision of files
342 342
343 343 Print the specified files as they were at the given revision.
344 344 If no revision is given, the parent of the working directory is used,
345 345 or tip if no revision is checked out.
346 346
347 347 Output may be to a file, in which case the name of the file is
348 348 given using a format string. The formatting rules are the same as
349 349 for the export command, with the following additions:
350 350
351 351 %s basename of file being printed
352 352 %d dirname of file being printed, or '.' if in repo root
353 353 %p root-relative path name of file being printed
354 354 """
355 355 ctx = repo.changectx(opts['rev'])
356 356 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
357 357 ctx.node()):
358 358 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
359 359 fp.write(ctx.filectx(abs).data())
360 360
361 361 def clone(ui, source, dest=None, **opts):
362 362 """make a copy of an existing repository
363 363
364 364 Create a copy of an existing repository in a new directory.
365 365
366 366 If no destination directory name is specified, it defaults to the
367 367 basename of the source.
368 368
369 369 The location of the source is added to the new repository's
370 370 .hg/hgrc file, as the default to be used for future pulls.
371 371
372 372 For efficiency, hardlinks are used for cloning whenever the source
373 373 and destination are on the same filesystem (note this applies only
374 374 to the repository data, not to the checked out files). Some
375 375 filesystems, such as AFS, implement hardlinking incorrectly, but
376 376 do not report errors. In these cases, use the --pull option to
377 377 avoid hardlinking.
378 378
379 379 You can safely clone repositories and checked out files using full
380 380 hardlinks with
381 381
382 382 $ cp -al REPO REPOCLONE
383 383
384 384 which is the fastest way to clone. However, the operation is not
385 385 atomic (making sure REPO is not modified during the operation is
386 386 up to you) and you have to make sure your editor breaks hardlinks
387 387 (Emacs and most Linux Kernel tools do so).
388 388
389 389 If you use the -r option to clone up to a specific revision, no
390 390 subsequent revisions will be present in the cloned repository.
391 391 This option implies --pull, even on local repositories.
392 392
393 393 See pull for valid source format details.
394 394
395 395 It is possible to specify an ssh:// URL as the destination, but no
396 396 .hg/hgrc and working directory will be created on the remote side.
397 397 Look at the help text for the pull command for important details
398 398 about ssh:// URLs.
399 399 """
400 400 setremoteconfig(ui, opts)
401 401 hg.clone(ui, ui.expandpath(source), dest,
402 402 pull=opts['pull'],
403 403 stream=opts['uncompressed'],
404 404 rev=opts['rev'],
405 405 update=not opts['noupdate'])
406 406
407 407 def commit(ui, repo, *pats, **opts):
408 408 """commit the specified files or all outstanding changes
409 409
410 410 Commit changes to the given files into the repository.
411 411
412 412 If a list of files is omitted, all changes reported by "hg status"
413 413 will be committed.
414 414
415 415 If no commit message is specified, the editor configured in your hgrc
416 416 or in the EDITOR environment variable is started to enter a message.
417 417 """
418 418 message = logmessage(opts)
419 419
420 420 if opts['addremove']:
421 421 cmdutil.addremove(repo, pats, opts)
422 422 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
423 423 if pats:
424 424 status = repo.status(files=fns, match=match)
425 425 modified, added, removed, deleted, unknown = status[:5]
426 426 files = modified + added + removed
427 427 slist = None
428 428 for f in fns:
429 429 if f not in files:
430 430 rf = repo.wjoin(f)
431 431 if f in unknown:
432 432 raise util.Abort(_("file %s not tracked!") % rf)
433 433 try:
434 434 mode = os.lstat(rf)[stat.ST_MODE]
435 435 except OSError:
436 436 raise util.Abort(_("file %s not found!") % rf)
437 437 if stat.S_ISDIR(mode):
438 438 name = f + '/'
439 439 if slist is None:
440 440 slist = list(files)
441 441 slist.sort()
442 442 i = bisect.bisect(slist, name)
443 443 if i >= len(slist) or not slist[i].startswith(name):
444 444 raise util.Abort(_("no match under directory %s!")
445 445 % rf)
446 446 elif not stat.S_ISREG(mode):
447 447 raise util.Abort(_("can't commit %s: "
448 448 "unsupported file type!") % rf)
449 449 else:
450 450 files = []
451 451 try:
452 452 repo.commit(files, message, opts['user'], opts['date'], match,
453 453 force_editor=opts.get('force_editor'))
454 454 except ValueError, inst:
455 455 raise util.Abort(str(inst))
456 456
457 457 def docopy(ui, repo, pats, opts, wlock):
458 458 # called with the repo lock held
459 459 #
460 460 # hgsep => pathname that uses "/" to separate directories
461 461 # ossep => pathname that uses os.sep to separate directories
462 462 cwd = repo.getcwd()
463 463 errors = 0
464 464 copied = []
465 465 targets = {}
466 466
467 467 # abs: hgsep
468 468 # rel: ossep
469 469 # return: hgsep
470 470 def okaytocopy(abs, rel, exact):
471 471 reasons = {'?': _('is not managed'),
472 472 'a': _('has been marked for add'),
473 473 'r': _('has been marked for remove')}
474 474 state = repo.dirstate.state(abs)
475 475 reason = reasons.get(state)
476 476 if reason:
477 477 if state == 'a':
478 478 origsrc = repo.dirstate.copied(abs)
479 479 if origsrc is not None:
480 480 return origsrc
481 481 if exact:
482 482 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
483 483 else:
484 484 return abs
485 485
486 486 # origsrc: hgsep
487 487 # abssrc: hgsep
488 488 # relsrc: ossep
489 489 # target: ossep
490 490 def copy(origsrc, abssrc, relsrc, target, exact):
491 491 abstarget = util.canonpath(repo.root, cwd, target)
492 492 reltarget = util.pathto(cwd, abstarget)
493 493 prevsrc = targets.get(abstarget)
494 494 if prevsrc is not None:
495 495 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
496 496 (reltarget, util.localpath(abssrc),
497 497 util.localpath(prevsrc)))
498 498 return
499 499 if (not opts['after'] and os.path.exists(reltarget) or
500 500 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
501 501 if not opts['force']:
502 502 ui.warn(_('%s: not overwriting - file exists\n') %
503 503 reltarget)
504 504 return
505 505 if not opts['after'] and not opts.get('dry_run'):
506 506 os.unlink(reltarget)
507 507 if opts['after']:
508 508 if not os.path.exists(reltarget):
509 509 return
510 510 else:
511 511 targetdir = os.path.dirname(reltarget) or '.'
512 512 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
513 513 os.makedirs(targetdir)
514 514 try:
515 515 restore = repo.dirstate.state(abstarget) == 'r'
516 516 if restore and not opts.get('dry_run'):
517 517 repo.undelete([abstarget], wlock)
518 518 try:
519 519 if not opts.get('dry_run'):
520 520 util.copyfile(relsrc, reltarget)
521 521 restore = False
522 522 finally:
523 523 if restore:
524 524 repo.remove([abstarget], wlock)
525 525 except IOError, inst:
526 526 if inst.errno == errno.ENOENT:
527 527 ui.warn(_('%s: deleted in working copy\n') % relsrc)
528 528 else:
529 529 ui.warn(_('%s: cannot copy - %s\n') %
530 530 (relsrc, inst.strerror))
531 531 errors += 1
532 532 return
533 533 if ui.verbose or not exact:
534 534 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
535 535 targets[abstarget] = abssrc
536 536 if abstarget != origsrc and not opts.get('dry_run'):
537 537 repo.copy(origsrc, abstarget, wlock)
538 538 copied.append((abssrc, relsrc, exact))
539 539
540 540 # pat: ossep
541 541 # dest ossep
542 542 # srcs: list of (hgsep, hgsep, ossep, bool)
543 543 # return: function that takes hgsep and returns ossep
544 544 def targetpathfn(pat, dest, srcs):
545 545 if os.path.isdir(pat):
546 546 abspfx = util.canonpath(repo.root, cwd, pat)
547 547 abspfx = util.localpath(abspfx)
548 548 if destdirexists:
549 549 striplen = len(os.path.split(abspfx)[0])
550 550 else:
551 551 striplen = len(abspfx)
552 552 if striplen:
553 553 striplen += len(os.sep)
554 554 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
555 555 elif destdirexists:
556 556 res = lambda p: os.path.join(dest,
557 557 os.path.basename(util.localpath(p)))
558 558 else:
559 559 res = lambda p: dest
560 560 return res
561 561
562 562 # pat: ossep
563 563 # dest ossep
564 564 # srcs: list of (hgsep, hgsep, ossep, bool)
565 565 # return: function that takes hgsep and returns ossep
566 566 def targetpathafterfn(pat, dest, srcs):
567 567 if util.patkind(pat, None)[0]:
568 568 # a mercurial pattern
569 569 res = lambda p: os.path.join(dest,
570 570 os.path.basename(util.localpath(p)))
571 571 else:
572 572 abspfx = util.canonpath(repo.root, cwd, pat)
573 573 if len(abspfx) < len(srcs[0][0]):
574 574 # A directory. Either the target path contains the last
575 575 # component of the source path or it does not.
576 576 def evalpath(striplen):
577 577 score = 0
578 578 for s in srcs:
579 579 t = os.path.join(dest, util.localpath(s[0])[striplen:])
580 580 if os.path.exists(t):
581 581 score += 1
582 582 return score
583 583
584 584 abspfx = util.localpath(abspfx)
585 585 striplen = len(abspfx)
586 586 if striplen:
587 587 striplen += len(os.sep)
588 588 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
589 589 score = evalpath(striplen)
590 590 striplen1 = len(os.path.split(abspfx)[0])
591 591 if striplen1:
592 592 striplen1 += len(os.sep)
593 593 if evalpath(striplen1) > score:
594 594 striplen = striplen1
595 595 res = lambda p: os.path.join(dest,
596 596 util.localpath(p)[striplen:])
597 597 else:
598 598 # a file
599 599 if destdirexists:
600 600 res = lambda p: os.path.join(dest,
601 601 os.path.basename(util.localpath(p)))
602 602 else:
603 603 res = lambda p: dest
604 604 return res
605 605
606 606
607 607 pats = list(pats)
608 608 if not pats:
609 609 raise util.Abort(_('no source or destination specified'))
610 610 if len(pats) == 1:
611 611 raise util.Abort(_('no destination specified'))
612 612 dest = pats.pop()
613 613 destdirexists = os.path.isdir(dest)
614 614 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
615 615 raise util.Abort(_('with multiple sources, destination must be an '
616 616 'existing directory'))
617 617 if opts['after']:
618 618 tfn = targetpathafterfn
619 619 else:
620 620 tfn = targetpathfn
621 621 copylist = []
622 622 for pat in pats:
623 623 srcs = []
624 624 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
625 625 origsrc = okaytocopy(abssrc, relsrc, exact)
626 626 if origsrc:
627 627 srcs.append((origsrc, abssrc, relsrc, exact))
628 628 if not srcs:
629 629 continue
630 630 copylist.append((tfn(pat, dest, srcs), srcs))
631 631 if not copylist:
632 632 raise util.Abort(_('no files to copy'))
633 633
634 634 for targetpath, srcs in copylist:
635 635 for origsrc, abssrc, relsrc, exact in srcs:
636 636 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
637 637
638 638 if errors:
639 639 ui.warn(_('(consider using --after)\n'))
640 640 return errors, copied
641 641
642 642 def copy(ui, repo, *pats, **opts):
643 643 """mark files as copied for the next commit
644 644
645 645 Mark dest as having copies of source files. If dest is a
646 646 directory, copies are put in that directory. If dest is a file,
647 647 there can only be one source.
648 648
649 649 By default, this command copies the contents of files as they
650 650 stand in the working directory. If invoked with --after, the
651 651 operation is recorded, but no copying is performed.
652 652
653 653 This command takes effect in the next commit. To undo a copy
654 654 before that, see hg revert.
655 655 """
656 656 wlock = repo.wlock(0)
657 657 errs, copied = docopy(ui, repo, pats, opts, wlock)
658 658 return errs
659 659
660 660 def debugancestor(ui, index, rev1, rev2):
661 661 """find the ancestor revision of two revisions in a given index"""
662 662 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
663 663 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
664 664 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
665 665
666 666 def debugcomplete(ui, cmd='', **opts):
667 667 """returns the completion list associated with the given command"""
668 668
669 669 if opts['options']:
670 670 options = []
671 671 otables = [globalopts]
672 672 if cmd:
673 673 aliases, entry = findcmd(ui, cmd)
674 674 otables.append(entry[1])
675 675 for t in otables:
676 676 for o in t:
677 677 if o[0]:
678 678 options.append('-%s' % o[0])
679 679 options.append('--%s' % o[1])
680 680 ui.write("%s\n" % "\n".join(options))
681 681 return
682 682
683 683 clist = findpossible(ui, cmd).keys()
684 684 clist.sort()
685 685 ui.write("%s\n" % "\n".join(clist))
686 686
687 687 def debugrebuildstate(ui, repo, rev=None):
688 688 """rebuild the dirstate as it would look like for the given revision"""
689 689 if not rev:
690 690 rev = repo.changelog.tip()
691 691 else:
692 692 rev = repo.lookup(rev)
693 693 change = repo.changelog.read(rev)
694 694 n = change[0]
695 695 files = repo.manifest.read(n)
696 696 wlock = repo.wlock()
697 697 repo.dirstate.rebuild(rev, files)
698 698
699 699 def debugcheckstate(ui, repo):
700 700 """validate the correctness of the current dirstate"""
701 701 parent1, parent2 = repo.dirstate.parents()
702 702 repo.dirstate.read()
703 703 dc = repo.dirstate.map
704 704 keys = dc.keys()
705 705 keys.sort()
706 706 m1n = repo.changelog.read(parent1)[0]
707 707 m2n = repo.changelog.read(parent2)[0]
708 708 m1 = repo.manifest.read(m1n)
709 709 m2 = repo.manifest.read(m2n)
710 710 errors = 0
711 711 for f in dc:
712 712 state = repo.dirstate.state(f)
713 713 if state in "nr" and f not in m1:
714 714 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
715 715 errors += 1
716 716 if state in "a" and f in m1:
717 717 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
718 718 errors += 1
719 719 if state in "m" and f not in m1 and f not in m2:
720 720 ui.warn(_("%s in state %s, but not in either manifest\n") %
721 721 (f, state))
722 722 errors += 1
723 723 for f in m1:
724 724 state = repo.dirstate.state(f)
725 725 if state not in "nrm":
726 726 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
727 727 errors += 1
728 728 if errors:
729 729 error = _(".hg/dirstate inconsistent with current parent's manifest")
730 730 raise util.Abort(error)
731 731
732 732 def showconfig(ui, repo, *values, **opts):
733 733 """show combined config settings from all hgrc files
734 734
735 735 With no args, print names and values of all config items.
736 736
737 737 With one arg of the form section.name, print just the value of
738 738 that config item.
739 739
740 740 With multiple args, print names and values of all config items
741 741 with matching section names."""
742 742
743 743 untrusted = bool(opts.get('untrusted'))
744 744 if values:
745 745 if len([v for v in values if '.' in v]) > 1:
746 746 raise util.Abort(_('only one config item permitted'))
747 747 for section, name, value in ui.walkconfig(untrusted=untrusted):
748 748 sectname = section + '.' + name
749 749 if values:
750 750 for v in values:
751 751 if v == section:
752 752 ui.write('%s=%s\n' % (sectname, value))
753 753 elif v == sectname:
754 754 ui.write(value, '\n')
755 755 else:
756 756 ui.write('%s=%s\n' % (sectname, value))
757 757
758 758 def debugsetparents(ui, repo, rev1, rev2=None):
759 759 """manually set the parents of the current working directory
760 760
761 761 This is useful for writing repository conversion tools, but should
762 762 be used with care.
763 763 """
764 764
765 765 if not rev2:
766 766 rev2 = hex(nullid)
767 767
768 768 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
769 769
770 770 def debugstate(ui, repo):
771 771 """show the contents of the current dirstate"""
772 772 repo.dirstate.read()
773 773 dc = repo.dirstate.map
774 774 keys = dc.keys()
775 775 keys.sort()
776 776 for file_ in keys:
777 777 ui.write("%c %3o %10d %s %s\n"
778 778 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
779 779 time.strftime("%x %X",
780 780 time.localtime(dc[file_][3])), file_))
781 781 for f in repo.dirstate.copies():
782 782 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
783 783
784 784 def debugdata(ui, file_, rev):
785 785 """dump the contents of an data file revision"""
786 786 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
787 787 file_[:-2] + ".i", file_, 0)
788 788 try:
789 789 ui.write(r.revision(r.lookup(rev)))
790 790 except KeyError:
791 791 raise util.Abort(_('invalid revision identifier %s') % rev)
792 792
793 793 def debugdate(ui, date, range=None, **opts):
794 794 """parse and display a date"""
795 795 if opts["extended"]:
796 796 d = util.parsedate(date, util.extendeddateformats)
797 797 else:
798 798 d = util.parsedate(date)
799 799 ui.write("internal: %s %s\n" % d)
800 800 ui.write("standard: %s\n" % util.datestr(d))
801 801 if range:
802 802 m = util.matchdate(range)
803 803 ui.write("match: %s\n" % m(d[0]))
804 804
805 805 def debugindex(ui, file_):
806 806 """dump the contents of an index file"""
807 807 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
808 808 ui.write(" rev offset length base linkrev" +
809 809 " nodeid p1 p2\n")
810 810 for i in xrange(r.count()):
811 811 node = r.node(i)
812 812 pp = r.parents(node)
813 813 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
814 814 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
815 815 short(node), short(pp[0]), short(pp[1])))
816 816
817 817 def debugindexdot(ui, file_):
818 818 """dump an index DAG as a .dot file"""
819 819 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
820 820 ui.write("digraph G {\n")
821 821 for i in xrange(r.count()):
822 822 node = r.node(i)
823 823 pp = r.parents(node)
824 824 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
825 825 if pp[1] != nullid:
826 826 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
827 827 ui.write("}\n")
828 828
829 829 def debuginstall(ui):
830 830 '''test Mercurial installation'''
831 831
832 832 def writetemp(contents):
833 833 (fd, name) = tempfile.mkstemp()
834 834 f = os.fdopen(fd, "wb")
835 835 f.write(contents)
836 836 f.close()
837 837 return name
838 838
839 839 problems = 0
840 840
841 841 # encoding
842 842 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
843 843 try:
844 844 util.fromlocal("test")
845 845 except util.Abort, inst:
846 846 ui.write(" %s\n" % inst)
847 847 ui.write(_(" (check that your locale is properly set)\n"))
848 848 problems += 1
849 849
850 850 # compiled modules
851 851 ui.status(_("Checking extensions...\n"))
852 852 try:
853 853 import bdiff, mpatch, base85
854 854 except Exception, inst:
855 855 ui.write(" %s\n" % inst)
856 856 ui.write(_(" One or more extensions could not be found"))
857 857 ui.write(_(" (check that you compiled the extensions)\n"))
858 858 problems += 1
859 859
860 860 # templates
861 861 ui.status(_("Checking templates...\n"))
862 862 try:
863 863 import templater
864 864 t = templater.templater(templater.templatepath("map-cmdline.default"))
865 865 except Exception, inst:
866 866 ui.write(" %s\n" % inst)
867 867 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
868 868 problems += 1
869 869
870 870 # patch
871 871 ui.status(_("Checking patch...\n"))
872 872 path = os.environ.get('PATH', '')
873 873 patcher = util.find_in_path('gpatch', path,
874 874 util.find_in_path('patch', path, None))
875 875 if not patcher:
876 876 ui.write(_(" Can't find patch or gpatch in PATH\n"))
877 877 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
878 878 problems += 1
879 879 else:
880 880 # actually attempt a patch here
881 881 a = "1\n2\n3\n4\n"
882 882 b = "1\n2\n3\ninsert\n4\n"
883 883 d = mdiff.unidiff(a, None, b, None, "a")
884 884 fa = writetemp(a)
885 885 fd = writetemp(d)
886 886 fp = os.popen('%s %s %s' % (patcher, fa, fd))
887 887 files = []
888 888 output = ""
889 889 for line in fp:
890 890 output += line
891 891 if line.startswith('patching file '):
892 892 pf = util.parse_patch_output(line.rstrip())
893 893 files.append(pf)
894 894 if files != [fa]:
895 895 ui.write(_(" unexpected patch output!"))
896 896 ui.write(_(" (you may have an incompatible version of patch)\n"))
897 897 ui.write(output)
898 898 problems += 1
899 899 a = file(fa).read()
900 900 if a != b:
901 901 ui.write(_(" patch test failed!"))
902 902 ui.write(_(" (you may have an incompatible version of patch)\n"))
903 903 problems += 1
904 904 os.unlink(fa)
905 905 os.unlink(fd)
906 906
907 907 # merge helper
908 908 ui.status(_("Checking merge helper...\n"))
909 909 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
910 910 or "hgmerge")
911 911 cmdpath = util.find_in_path(cmd, path)
912 912 if not cmdpath:
913 913 cmdpath = util.find_in_path(cmd.split()[0], path)
914 914 if not cmdpath:
915 915 if cmd == 'hgmerge':
916 916 ui.write(_(" No merge helper set and can't find default"
917 917 " hgmerge script in PATH\n"))
918 918 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
919 919 else:
920 920 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
921 921 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
922 922 problems += 1
923 923 else:
924 924 # actually attempt a patch here
925 925 fa = writetemp("1\n2\n3\n4\n")
926 926 fl = writetemp("1\n2\n3\ninsert\n4\n")
927 927 fr = writetemp("begin\n1\n2\n3\n4\n")
928 928 r = os.system('%s %s %s %s' % (cmd, fl, fa, fr))
929 929 if r:
930 930 ui.write(_(" got unexpected merge error %d!") % r)
931 931 problems += 1
932 932 m = file(fl).read()
933 933 if m != "begin\n1\n2\n3\ninsert\n4\n":
934 934 ui.write(_(" got unexpected merge results!") % r)
935 935 ui.write(_(" (your merge helper may have the"
936 936 " wrong argument order)\n"))
937 937 ui.write(m)
938 938 os.unlink(fa)
939 939 os.unlink(fl)
940 940 os.unlink(fr)
941 941
942 942 # editor
943 943 ui.status(_("Checking commit editor...\n"))
944 944 editor = (os.environ.get("HGEDITOR") or
945 945 ui.config("ui", "editor") or
946 946 os.environ.get("EDITOR", "vi"))
947 947 cmdpath = util.find_in_path(editor, path)
948 948 if not cmdpath:
949 949 cmdpath = util.find_in_path(editor.split()[0], path)
950 950 if not cmdpath:
951 951 if editor == 'vi':
952 952 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
953 953 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
954 954 else:
955 955 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
956 956 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
957 957 problems += 1
958 958
959 959 # check username
960 960 ui.status(_("Checking username...\n"))
961 961 user = os.environ.get("HGUSER")
962 962 if user is None:
963 963 user = ui.config("ui", "username")
964 964 if user is None:
965 965 user = os.environ.get("EMAIL")
966 966 if not user:
967 967 ui.warn(" ")
968 968 ui.username()
969 969 ui.write(_(" (specify a username in your .hgrc file)\n"))
970 970
971 971 if not problems:
972 972 ui.status(_("No problems detected\n"))
973 973 else:
974 974 ui.write(_("%s problems detected,"
975 975 " please check your install!\n") % problems)
976 976
977 977 return problems
978 978
979 979 def debugrename(ui, repo, file1, *pats, **opts):
980 980 """dump rename information"""
981 981
982 982 ctx = repo.changectx(opts.get('rev', 'tip'))
983 983 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
984 984 ctx.node()):
985 985 m = ctx.filectx(abs).renamed()
986 986 if m:
987 987 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
988 988 else:
989 989 ui.write(_("%s not renamed\n") % rel)
990 990
991 991 def debugwalk(ui, repo, *pats, **opts):
992 992 """show how files match on given patterns"""
993 993 items = list(cmdutil.walk(repo, pats, opts))
994 994 if not items:
995 995 return
996 996 fmt = '%%s %%-%ds %%-%ds %%s' % (
997 997 max([len(abs) for (src, abs, rel, exact) in items]),
998 998 max([len(rel) for (src, abs, rel, exact) in items]))
999 999 for src, abs, rel, exact in items:
1000 1000 line = fmt % (src, abs, rel, exact and 'exact' or '')
1001 1001 ui.write("%s\n" % line.rstrip())
1002 1002
1003 1003 def diff(ui, repo, *pats, **opts):
1004 1004 """diff repository (or selected files)
1005 1005
1006 1006 Show differences between revisions for the specified files.
1007 1007
1008 1008 Differences between files are shown using the unified diff format.
1009 1009
1010 1010 NOTE: diff may generate unexpected results for merges, as it will
1011 1011 default to comparing against the working directory's first parent
1012 1012 changeset if no revisions are specified.
1013 1013
1014 1014 When two revision arguments are given, then changes are shown
1015 1015 between those revisions. If only one revision is specified then
1016 1016 that revision is compared to the working directory, and, when no
1017 1017 revisions are specified, the working directory files are compared
1018 1018 to its parent.
1019 1019
1020 1020 Without the -a option, diff will avoid generating diffs of files
1021 1021 it detects as binary. With -a, diff will generate a diff anyway,
1022 1022 probably with undesirable results.
1023 1023 """
1024 1024 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1025 1025
1026 1026 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1027 1027
1028 1028 patch.diff(repo, node1, node2, fns, match=matchfn,
1029 1029 opts=patch.diffopts(ui, opts))
1030 1030
1031 1031 def export(ui, repo, *changesets, **opts):
1032 1032 """dump the header and diffs for one or more changesets
1033 1033
1034 1034 Print the changeset header and diffs for one or more revisions.
1035 1035
1036 1036 The information shown in the changeset header is: author,
1037 1037 changeset hash, parent(s) and commit comment.
1038 1038
1039 1039 NOTE: export may generate unexpected diff output for merge changesets,
1040 1040 as it will compare the merge changeset against its first parent only.
1041 1041
1042 1042 Output may be to a file, in which case the name of the file is
1043 1043 given using a format string. The formatting rules are as follows:
1044 1044
1045 1045 %% literal "%" character
1046 1046 %H changeset hash (40 bytes of hexadecimal)
1047 1047 %N number of patches being generated
1048 1048 %R changeset revision number
1049 1049 %b basename of the exporting repository
1050 1050 %h short-form changeset hash (12 bytes of hexadecimal)
1051 1051 %n zero-padded sequence number, starting at 1
1052 1052 %r zero-padded changeset revision number
1053 1053
1054 1054 Without the -a option, export will avoid generating diffs of files
1055 1055 it detects as binary. With -a, export will generate a diff anyway,
1056 1056 probably with undesirable results.
1057 1057
1058 1058 With the --switch-parent option, the diff will be against the second
1059 1059 parent. It can be useful to review a merge.
1060 1060 """
1061 1061 if not changesets:
1062 1062 raise util.Abort(_("export requires at least one changeset"))
1063 1063 revs = cmdutil.revrange(repo, changesets)
1064 1064 if len(revs) > 1:
1065 1065 ui.note(_('exporting patches:\n'))
1066 1066 else:
1067 1067 ui.note(_('exporting patch:\n'))
1068 1068 patch.export(repo, revs, template=opts['output'],
1069 1069 switch_parent=opts['switch_parent'],
1070 1070 opts=patch.diffopts(ui, opts))
1071 1071
1072 1072 def grep(ui, repo, pattern, *pats, **opts):
1073 1073 """search for a pattern in specified files and revisions
1074 1074
1075 1075 Search revisions of files for a regular expression.
1076 1076
1077 1077 This command behaves differently than Unix grep. It only accepts
1078 1078 Python/Perl regexps. It searches repository history, not the
1079 1079 working directory. It always prints the revision number in which
1080 1080 a match appears.
1081 1081
1082 1082 By default, grep only prints output for the first revision of a
1083 1083 file in which it finds a match. To get it to print every revision
1084 1084 that contains a change in match status ("-" for a match that
1085 1085 becomes a non-match, or "+" for a non-match that becomes a match),
1086 1086 use the --all flag.
1087 1087 """
1088 1088 reflags = 0
1089 1089 if opts['ignore_case']:
1090 1090 reflags |= re.I
1091 1091 regexp = re.compile(pattern, reflags)
1092 1092 sep, eol = ':', '\n'
1093 1093 if opts['print0']:
1094 1094 sep = eol = '\0'
1095 1095
1096 1096 fcache = {}
1097 1097 def getfile(fn):
1098 1098 if fn not in fcache:
1099 1099 fcache[fn] = repo.file(fn)
1100 1100 return fcache[fn]
1101 1101
1102 1102 def matchlines(body):
1103 1103 begin = 0
1104 1104 linenum = 0
1105 1105 while True:
1106 1106 match = regexp.search(body, begin)
1107 1107 if not match:
1108 1108 break
1109 1109 mstart, mend = match.span()
1110 1110 linenum += body.count('\n', begin, mstart) + 1
1111 1111 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1112 1112 lend = body.find('\n', mend)
1113 1113 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1114 1114 begin = lend + 1
1115 1115
1116 1116 class linestate(object):
1117 1117 def __init__(self, line, linenum, colstart, colend):
1118 1118 self.line = line
1119 1119 self.linenum = linenum
1120 1120 self.colstart = colstart
1121 1121 self.colend = colend
1122 1122
1123 1123 def __eq__(self, other):
1124 1124 return self.line == other.line
1125 1125
1126 1126 matches = {}
1127 1127 copies = {}
1128 1128 def grepbody(fn, rev, body):
1129 1129 matches[rev].setdefault(fn, [])
1130 1130 m = matches[rev][fn]
1131 1131 for lnum, cstart, cend, line in matchlines(body):
1132 1132 s = linestate(line, lnum, cstart, cend)
1133 1133 m.append(s)
1134 1134
1135 1135 def difflinestates(a, b):
1136 1136 sm = difflib.SequenceMatcher(None, a, b)
1137 1137 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1138 1138 if tag == 'insert':
1139 1139 for i in xrange(blo, bhi):
1140 1140 yield ('+', b[i])
1141 1141 elif tag == 'delete':
1142 1142 for i in xrange(alo, ahi):
1143 1143 yield ('-', a[i])
1144 1144 elif tag == 'replace':
1145 1145 for i in xrange(alo, ahi):
1146 1146 yield ('-', a[i])
1147 1147 for i in xrange(blo, bhi):
1148 1148 yield ('+', b[i])
1149 1149
1150 1150 prev = {}
1151 1151 def display(fn, rev, states, prevstates):
1152 1152 counts = {'-': 0, '+': 0}
1153 1153 filerevmatches = {}
1154 1154 if incrementing or not opts['all']:
1155 1155 a, b, r = prevstates, states, rev
1156 1156 else:
1157 1157 a, b, r = states, prevstates, prev.get(fn, -1)
1158 1158 for change, l in difflinestates(a, b):
1159 1159 cols = [fn, str(r)]
1160 1160 if opts['line_number']:
1161 1161 cols.append(str(l.linenum))
1162 1162 if opts['all']:
1163 1163 cols.append(change)
1164 1164 if opts['user']:
1165 1165 cols.append(ui.shortuser(get(r)[1]))
1166 1166 if opts['files_with_matches']:
1167 1167 c = (fn, r)
1168 1168 if c in filerevmatches:
1169 1169 continue
1170 1170 filerevmatches[c] = 1
1171 1171 else:
1172 1172 cols.append(l.line)
1173 1173 ui.write(sep.join(cols), eol)
1174 1174 counts[change] += 1
1175 1175 return counts['+'], counts['-']
1176 1176
1177 1177 fstate = {}
1178 1178 skip = {}
1179 1179 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1180 1180 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1181 1181 count = 0
1182 1182 incrementing = False
1183 1183 follow = opts.get('follow')
1184 1184 for st, rev, fns in changeiter:
1185 1185 if st == 'window':
1186 1186 incrementing = rev
1187 1187 matches.clear()
1188 1188 elif st == 'add':
1189 1189 mf = repo.changectx(rev).manifest()
1190 1190 matches[rev] = {}
1191 1191 for fn in fns:
1192 1192 if fn in skip:
1193 1193 continue
1194 1194 fstate.setdefault(fn, {})
1195 1195 try:
1196 1196 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1197 1197 if follow:
1198 1198 copied = getfile(fn).renamed(mf[fn])
1199 1199 if copied:
1200 1200 copies.setdefault(rev, {})[fn] = copied[0]
1201 1201 except KeyError:
1202 1202 pass
1203 1203 elif st == 'iter':
1204 1204 states = matches[rev].items()
1205 1205 states.sort()
1206 1206 for fn, m in states:
1207 1207 copy = copies.get(rev, {}).get(fn)
1208 1208 if fn in skip:
1209 1209 if copy:
1210 1210 skip[copy] = True
1211 1211 continue
1212 1212 if incrementing or not opts['all'] or fstate[fn]:
1213 1213 pos, neg = display(fn, rev, m, fstate[fn])
1214 1214 count += pos + neg
1215 1215 if pos and not opts['all']:
1216 1216 skip[fn] = True
1217 1217 if copy:
1218 1218 skip[copy] = True
1219 1219 fstate[fn] = m
1220 1220 if copy:
1221 1221 fstate[copy] = m
1222 1222 prev[fn] = rev
1223 1223
1224 1224 if not incrementing:
1225 1225 fstate = fstate.items()
1226 1226 fstate.sort()
1227 1227 for fn, state in fstate:
1228 1228 if fn in skip:
1229 1229 continue
1230 1230 if fn not in copies.get(prev[fn], {}):
1231 1231 display(fn, rev, {}, state)
1232 1232 return (count == 0 and 1) or 0
1233 1233
1234 1234 def heads(ui, repo, **opts):
1235 1235 """show current repository heads
1236 1236
1237 1237 Show all repository head changesets.
1238 1238
1239 1239 Repository "heads" are changesets that don't have children
1240 1240 changesets. They are where development generally takes place and
1241 1241 are the usual targets for update and merge operations.
1242 1242 """
1243 1243 if opts['rev']:
1244 1244 heads = repo.heads(repo.lookup(opts['rev']))
1245 1245 else:
1246 1246 heads = repo.heads()
1247 1247 displayer = cmdutil.show_changeset(ui, repo, opts)
1248 1248 for n in heads:
1249 1249 displayer.show(changenode=n)
1250 1250
1251 1251 def help_(ui, name=None, with_version=False):
1252 1252 """show help for a command, extension, or list of commands
1253 1253
1254 1254 With no arguments, print a list of commands and short help.
1255 1255
1256 1256 Given a command name, print help for that command.
1257 1257
1258 1258 Given an extension name, print help for that extension, and the
1259 1259 commands it provides."""
1260 1260 option_lists = []
1261 1261
1262 1262 def helpcmd(name):
1263 1263 if with_version:
1264 1264 version_(ui)
1265 1265 ui.write('\n')
1266 1266 aliases, i = findcmd(ui, name)
1267 1267 # synopsis
1268 1268 ui.write("%s\n\n" % i[2])
1269 1269
1270 1270 # description
1271 1271 doc = i[0].__doc__
1272 1272 if not doc:
1273 1273 doc = _("(No help text available)")
1274 1274 if ui.quiet:
1275 1275 doc = doc.splitlines(0)[0]
1276 1276 ui.write("%s\n" % doc.rstrip())
1277 1277
1278 1278 if not ui.quiet:
1279 1279 # aliases
1280 1280 if len(aliases) > 1:
1281 1281 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1282 1282
1283 1283 # options
1284 1284 if i[1]:
1285 1285 option_lists.append(("options", i[1]))
1286 1286
1287 1287 def helplist(select=None):
1288 1288 h = {}
1289 1289 cmds = {}
1290 1290 for c, e in table.items():
1291 1291 f = c.split("|", 1)[0]
1292 1292 if select and not select(f):
1293 1293 continue
1294 1294 if name == "shortlist" and not f.startswith("^"):
1295 1295 continue
1296 1296 f = f.lstrip("^")
1297 1297 if not ui.debugflag and f.startswith("debug"):
1298 1298 continue
1299 1299 doc = e[0].__doc__
1300 1300 if not doc:
1301 1301 doc = _("(No help text available)")
1302 1302 h[f] = doc.splitlines(0)[0].rstrip()
1303 1303 cmds[f] = c.lstrip("^")
1304 1304
1305 1305 fns = h.keys()
1306 1306 fns.sort()
1307 1307 m = max(map(len, fns))
1308 1308 for f in fns:
1309 1309 if ui.verbose:
1310 1310 commands = cmds[f].replace("|",", ")
1311 1311 ui.write(" %s:\n %s\n"%(commands, h[f]))
1312 1312 else:
1313 1313 ui.write(' %-*s %s\n' % (m, f, h[f]))
1314 1314
1315 1315 def helptopic(name):
1316 1316 v = None
1317 1317 for i in help.helptable:
1318 1318 l = i.split('|')
1319 1319 if name in l:
1320 1320 v = i
1321 1321 header = l[-1]
1322 1322 if not v:
1323 1323 raise UnknownCommand(name)
1324 1324
1325 1325 # description
1326 1326 doc = help.helptable[v]
1327 1327 if not doc:
1328 1328 doc = _("(No help text available)")
1329 1329 if callable(doc):
1330 1330 doc = doc()
1331 1331
1332 1332 ui.write("%s\n" % header)
1333 1333 ui.write("%s\n" % doc.rstrip())
1334 1334
1335 1335 def helpext(name):
1336 1336 try:
1337 1337 mod = findext(name)
1338 1338 except KeyError:
1339 1339 raise UnknownCommand(name)
1340 1340
1341 1341 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1342 1342 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1343 1343 for d in doc[1:]:
1344 1344 ui.write(d, '\n')
1345 1345
1346 1346 ui.status('\n')
1347 1347 if ui.verbose:
1348 1348 ui.status(_('list of commands:\n\n'))
1349 1349 else:
1350 1350 ui.status(_('list of commands (use "hg help -v %s" '
1351 1351 'to show aliases and global options):\n\n') % name)
1352 1352
1353 1353 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
1354 1354 helplist(modcmds.has_key)
1355 1355
1356 1356 if name and name != 'shortlist':
1357 1357 i = None
1358 1358 for f in (helpcmd, helptopic, helpext):
1359 1359 try:
1360 1360 f(name)
1361 1361 i = None
1362 1362 break
1363 1363 except UnknownCommand, inst:
1364 1364 i = inst
1365 1365 if i:
1366 1366 raise i
1367 1367
1368 1368 else:
1369 1369 # program name
1370 1370 if ui.verbose or with_version:
1371 1371 version_(ui)
1372 1372 else:
1373 1373 ui.status(_("Mercurial Distributed SCM\n"))
1374 1374 ui.status('\n')
1375 1375
1376 1376 # list of commands
1377 1377 if name == "shortlist":
1378 1378 ui.status(_('basic commands (use "hg help" '
1379 1379 'for the full list or option "-v" for details):\n\n'))
1380 1380 elif ui.verbose:
1381 1381 ui.status(_('list of commands:\n\n'))
1382 1382 else:
1383 1383 ui.status(_('list of commands (use "hg help -v" '
1384 1384 'to show aliases and global options):\n\n'))
1385 1385
1386 1386 helplist()
1387 1387
1388 1388 # global options
1389 1389 if ui.verbose:
1390 1390 option_lists.append(("global options", globalopts))
1391 1391
1392 1392 # list all option lists
1393 1393 opt_output = []
1394 1394 for title, options in option_lists:
1395 1395 opt_output.append(("\n%s:\n" % title, None))
1396 1396 for shortopt, longopt, default, desc in options:
1397 1397 if "DEPRECATED" in desc and not ui.verbose: continue
1398 1398 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1399 1399 longopt and " --%s" % longopt),
1400 1400 "%s%s" % (desc,
1401 1401 default
1402 1402 and _(" (default: %s)") % default
1403 1403 or "")))
1404 1404
1405 1405 if opt_output:
1406 1406 opts_len = max([len(line[0]) for line in opt_output if line[1]])
1407 1407 for first, second in opt_output:
1408 1408 if second:
1409 1409 ui.write(" %-*s %s\n" % (opts_len, first, second))
1410 1410 else:
1411 1411 ui.write("%s\n" % first)
1412 1412
1413 1413 def identify(ui, repo):
1414 1414 """print information about the working copy
1415 1415
1416 1416 Print a short summary of the current state of the repo.
1417 1417
1418 1418 This summary identifies the repository state using one or two parent
1419 1419 hash identifiers, followed by a "+" if there are uncommitted changes
1420 1420 in the working directory, followed by a list of tags for this revision.
1421 1421 """
1422 1422 parents = [p for p in repo.dirstate.parents() if p != nullid]
1423 1423 if not parents:
1424 1424 ui.write(_("unknown\n"))
1425 1425 return
1426 1426
1427 1427 hexfunc = ui.debugflag and hex or short
1428 1428 modified, added, removed, deleted = repo.status()[:4]
1429 1429 output = ["%s%s" %
1430 1430 ('+'.join([hexfunc(parent) for parent in parents]),
1431 1431 (modified or added or removed or deleted) and "+" or "")]
1432 1432
1433 1433 if not ui.quiet:
1434 1434
1435 1435 branch = util.tolocal(repo.workingctx().branch())
1436 1436 if branch:
1437 1437 output.append("(%s)" % branch)
1438 1438
1439 1439 # multiple tags for a single parent separated by '/'
1440 1440 parenttags = ['/'.join(tags)
1441 1441 for tags in map(repo.nodetags, parents) if tags]
1442 1442 # tags for multiple parents separated by ' + '
1443 1443 if parenttags:
1444 1444 output.append(' + '.join(parenttags))
1445 1445
1446 1446 ui.write("%s\n" % ' '.join(output))
1447 1447
1448 1448 def import_(ui, repo, patch1, *patches, **opts):
1449 1449 """import an ordered set of patches
1450 1450
1451 1451 Import a list of patches and commit them individually.
1452 1452
1453 1453 If there are outstanding changes in the working directory, import
1454 1454 will abort unless given the -f flag.
1455 1455
1456 1456 You can import a patch straight from a mail message. Even patches
1457 1457 as attachments work (body part must be type text/plain or
1458 1458 text/x-patch to be used). From and Subject headers of email
1459 1459 message are used as default committer and commit message. All
1460 1460 text/plain body parts before first diff are added to commit
1461 1461 message.
1462 1462
1463 1463 If imported patch was generated by hg export, user and description
1464 1464 from patch override values from message headers and body. Values
1465 1465 given on command line with -m and -u override these.
1466 1466
1467 1467 To read a patch from standard input, use patch name "-".
1468 1468 """
1469 1469 patches = (patch1,) + patches
1470 1470
1471 1471 if not opts['force']:
1472 1472 bail_if_changed(repo)
1473 1473
1474 1474 d = opts["base"]
1475 1475 strip = opts["strip"]
1476 1476
1477 1477 wlock = repo.wlock()
1478 1478 lock = repo.lock()
1479 1479
1480 1480 for p in patches:
1481 1481 pf = os.path.join(d, p)
1482 1482
1483 1483 if pf == '-':
1484 1484 ui.status(_("applying patch from stdin\n"))
1485 1485 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1486 1486 else:
1487 1487 ui.status(_("applying %s\n") % p)
1488 1488 tmpname, message, user, date = patch.extract(ui, file(pf))
1489 1489
1490 1490 if tmpname is None:
1491 1491 raise util.Abort(_('no diffs found'))
1492 1492
1493 1493 try:
1494 1494 cmdline_message = logmessage(opts)
1495 1495 if cmdline_message:
1496 1496 # pickup the cmdline msg
1497 1497 message = cmdline_message
1498 1498 elif message:
1499 1499 # pickup the patch msg
1500 1500 message = message.strip()
1501 1501 else:
1502 1502 # launch the editor
1503 1503 message = None
1504 1504 ui.debug(_('message:\n%s\n') % message)
1505 1505
1506 1506 files = {}
1507 1507 try:
1508 1508 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1509 1509 files=files)
1510 1510 finally:
1511 1511 files = patch.updatedir(ui, repo, files, wlock=wlock)
1512 1512 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1513 1513 finally:
1514 1514 os.unlink(tmpname)
1515 1515
1516 1516 def incoming(ui, repo, source="default", **opts):
1517 1517 """show new changesets found in source
1518 1518
1519 1519 Show new changesets found in the specified path/URL or the default
1520 1520 pull location. These are the changesets that would be pulled if a pull
1521 1521 was requested.
1522 1522
1523 1523 For remote repository, using --bundle avoids downloading the changesets
1524 1524 twice if the incoming is followed by a pull.
1525 1525
1526 1526 See pull for valid source format details.
1527 1527 """
1528 1528 source = ui.expandpath(source)
1529 1529 setremoteconfig(ui, opts)
1530 1530
1531 1531 other = hg.repository(ui, source)
1532 1532 incoming = repo.findincoming(other, force=opts["force"])
1533 1533 if not incoming:
1534 1534 ui.status(_("no changes found\n"))
1535 1535 return
1536 1536
1537 1537 cleanup = None
1538 1538 try:
1539 1539 fname = opts["bundle"]
1540 1540 if fname or not other.local():
1541 1541 # create a bundle (uncompressed if other repo is not local)
1542 1542 cg = other.changegroup(incoming, "incoming")
1543 1543 bundletype = other.local() and "HG10BZ" or "HG10UN"
1544 1544 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1545 1545 # keep written bundle?
1546 1546 if opts["bundle"]:
1547 1547 cleanup = None
1548 1548 if not other.local():
1549 1549 # use the created uncompressed bundlerepo
1550 1550 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1551 1551
1552 1552 revs = None
1553 1553 if opts['rev']:
1554 1554 revs = [other.lookup(rev) for rev in opts['rev']]
1555 1555 o = other.changelog.nodesbetween(incoming, revs)[0]
1556 1556 if opts['newest_first']:
1557 1557 o.reverse()
1558 1558 displayer = cmdutil.show_changeset(ui, other, opts)
1559 1559 for n in o:
1560 1560 parents = [p for p in other.changelog.parents(n) if p != nullid]
1561 1561 if opts['no_merges'] and len(parents) == 2:
1562 1562 continue
1563 1563 displayer.show(changenode=n)
1564 1564 finally:
1565 1565 if hasattr(other, 'close'):
1566 1566 other.close()
1567 1567 if cleanup:
1568 1568 os.unlink(cleanup)
1569 1569
1570 1570 def init(ui, dest=".", **opts):
1571 1571 """create a new repository in the given directory
1572 1572
1573 1573 Initialize a new repository in the given directory. If the given
1574 1574 directory does not exist, it is created.
1575 1575
1576 1576 If no directory is given, the current directory is used.
1577 1577
1578 1578 It is possible to specify an ssh:// URL as the destination.
1579 1579 Look at the help text for the pull command for important details
1580 1580 about ssh:// URLs.
1581 1581 """
1582 1582 setremoteconfig(ui, opts)
1583 1583 hg.repository(ui, dest, create=1)
1584 1584
1585 1585 def locate(ui, repo, *pats, **opts):
1586 1586 """locate files matching specific patterns
1587 1587
1588 1588 Print all files under Mercurial control whose names match the
1589 1589 given patterns.
1590 1590
1591 1591 This command searches the current directory and its
1592 1592 subdirectories. To search an entire repository, move to the root
1593 1593 of the repository.
1594 1594
1595 1595 If no patterns are given to match, this command prints all file
1596 1596 names.
1597 1597
1598 1598 If you want to feed the output of this command into the "xargs"
1599 1599 command, use the "-0" option to both this command and "xargs".
1600 1600 This will avoid the problem of "xargs" treating single filenames
1601 1601 that contain white space as multiple filenames.
1602 1602 """
1603 1603 end = opts['print0'] and '\0' or '\n'
1604 1604 rev = opts['rev']
1605 1605 if rev:
1606 1606 node = repo.lookup(rev)
1607 1607 else:
1608 1608 node = None
1609 1609
1610 1610 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1611 1611 head='(?:.*/|)'):
1612 1612 if not node and repo.dirstate.state(abs) == '?':
1613 1613 continue
1614 1614 if opts['fullpath']:
1615 1615 ui.write(os.path.join(repo.root, abs), end)
1616 1616 else:
1617 1617 ui.write(((pats and rel) or abs), end)
1618 1618
1619 1619 def log(ui, repo, *pats, **opts):
1620 1620 """show revision history of entire repository or files
1621 1621
1622 1622 Print the revision history of the specified files or the entire
1623 1623 project.
1624 1624
1625 1625 File history is shown without following rename or copy history of
1626 1626 files. Use -f/--follow with a file name to follow history across
1627 1627 renames and copies. --follow without a file name will only show
1628 1628 ancestors or descendants of the starting revision. --follow-first
1629 1629 only follows the first parent of merge revisions.
1630 1630
1631 1631 If no revision range is specified, the default is tip:0 unless
1632 1632 --follow is set, in which case the working directory parent is
1633 1633 used as the starting revision.
1634 1634
1635 1635 By default this command outputs: changeset id and hash, tags,
1636 1636 non-trivial parents, user, date and time, and a summary for each
1637 1637 commit. When the -v/--verbose switch is used, the list of changed
1638 1638 files and full commit message is shown.
1639 1639
1640 1640 NOTE: log -p may generate unexpected diff output for merge
1641 1641 changesets, as it will compare the merge changeset against its
1642 1642 first parent only. Also, the files: list will only reflect files
1643 1643 that are different from BOTH parents.
1644 1644
1645 1645 """
1646 1646
1647 1647 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1648 1648 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1649 1649
1650 1650 if opts['limit']:
1651 1651 try:
1652 1652 limit = int(opts['limit'])
1653 1653 except ValueError:
1654 1654 raise util.Abort(_('limit must be a positive integer'))
1655 1655 if limit <= 0: raise util.Abort(_('limit must be positive'))
1656 1656 else:
1657 1657 limit = sys.maxint
1658 1658 count = 0
1659 1659
1660 1660 if opts['copies'] and opts['rev']:
1661 1661 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1662 1662 else:
1663 1663 endrev = repo.changelog.count()
1664 1664 rcache = {}
1665 1665 ncache = {}
1666 1666 dcache = []
1667 1667 def getrenamed(fn, rev, man):
1668 1668 '''looks up all renames for a file (up to endrev) the first
1669 1669 time the file is given. It indexes on the changerev and only
1670 1670 parses the manifest if linkrev != changerev.
1671 1671 Returns rename info for fn at changerev rev.'''
1672 1672 if fn not in rcache:
1673 1673 rcache[fn] = {}
1674 1674 ncache[fn] = {}
1675 1675 fl = repo.file(fn)
1676 1676 for i in xrange(fl.count()):
1677 1677 node = fl.node(i)
1678 1678 lr = fl.linkrev(node)
1679 1679 renamed = fl.renamed(node)
1680 1680 rcache[fn][lr] = renamed
1681 1681 if renamed:
1682 1682 ncache[fn][node] = renamed
1683 1683 if lr >= endrev:
1684 1684 break
1685 1685 if rev in rcache[fn]:
1686 1686 return rcache[fn][rev]
1687 1687 mr = repo.manifest.rev(man)
1688 1688 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1689 1689 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1690 1690 if not dcache or dcache[0] != man:
1691 1691 dcache[:] = [man, repo.manifest.readdelta(man)]
1692 1692 if fn in dcache[1]:
1693 1693 return ncache[fn].get(dcache[1][fn])
1694 1694 return None
1695 1695
1696 1696 df = False
1697 1697 if opts["date"]:
1698 1698 df = util.matchdate(opts["date"])
1699 1699
1700 1700 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1701 1701 for st, rev, fns in changeiter:
1702 1702 if st == 'add':
1703 1703 changenode = repo.changelog.node(rev)
1704 1704 parents = [p for p in repo.changelog.parentrevs(rev)
1705 1705 if p != nullrev]
1706 1706 if opts['no_merges'] and len(parents) == 2:
1707 1707 continue
1708 1708 if opts['only_merges'] and len(parents) != 2:
1709 1709 continue
1710 1710
1711 1711 if df:
1712 1712 changes = get(rev)
1713 1713 if not df(changes[2][0]):
1714 1714 continue
1715 1715
1716 1716 if opts['keyword']:
1717 1717 changes = get(rev)
1718 1718 miss = 0
1719 1719 for k in [kw.lower() for kw in opts['keyword']]:
1720 1720 if not (k in changes[1].lower() or
1721 1721 k in changes[4].lower() or
1722 1722 k in " ".join(changes[3][:20]).lower()):
1723 1723 miss = 1
1724 1724 break
1725 1725 if miss:
1726 1726 continue
1727 1727
1728 1728 copies = []
1729 1729 if opts.get('copies') and rev:
1730 1730 mf = get(rev)[0]
1731 1731 for fn in get(rev)[3]:
1732 1732 rename = getrenamed(fn, rev, mf)
1733 1733 if rename:
1734 1734 copies.append((fn, rename[0]))
1735 1735 displayer.show(rev, changenode, copies=copies)
1736 1736 elif st == 'iter':
1737 1737 if count == limit: break
1738 1738 if displayer.flush(rev):
1739 1739 count += 1
1740 1740
1741 1741 def manifest(ui, repo, rev=None):
1742 1742 """output the current or given revision of the project manifest
1743 1743
1744 1744 Print a list of version controlled files for the given revision.
1745 1745 If no revision is given, the parent of the working directory is used,
1746 1746 or tip if no revision is checked out.
1747 1747
1748 1748 The manifest is the list of files being version controlled. If no revision
1749 1749 is given then the first parent of the working directory is used.
1750 1750
1751 1751 With -v flag, print file permissions. With --debug flag, print
1752 1752 file revision hashes.
1753 1753 """
1754 1754
1755 1755 m = repo.changectx(rev).manifest()
1756 1756 files = m.keys()
1757 1757 files.sort()
1758 1758
1759 1759 for f in files:
1760 1760 if ui.debugflag:
1761 1761 ui.write("%40s " % hex(m[f]))
1762 1762 if ui.verbose:
1763 1763 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1764 1764 ui.write("%s\n" % f)
1765 1765
1766 1766 def merge(ui, repo, node=None, force=None):
1767 1767 """Merge working directory with another revision
1768 1768
1769 1769 Merge the contents of the current working directory and the
1770 1770 requested revision. Files that changed between either parent are
1771 1771 marked as changed for the next commit and a commit must be
1772 1772 performed before any further updates are allowed.
1773 1773
1774 1774 If no revision is specified, the working directory's parent is a
1775 1775 head revision, and the repository contains exactly one other head,
1776 1776 the other head is merged with by default. Otherwise, an explicit
1777 1777 revision to merge with must be provided.
1778 1778 """
1779 1779
1780 1780 if not node:
1781 1781 heads = repo.heads()
1782 1782 if len(heads) > 2:
1783 1783 raise util.Abort(_('repo has %d heads - '
1784 1784 'please merge with an explicit rev') %
1785 1785 len(heads))
1786 1786 if len(heads) == 1:
1787 1787 raise util.Abort(_('there is nothing to merge - '
1788 1788 'use "hg update" instead'))
1789 1789 parent = repo.dirstate.parents()[0]
1790 1790 if parent not in heads:
1791 1791 raise util.Abort(_('working dir not at a head rev - '
1792 1792 'use "hg update" or merge with an explicit rev'))
1793 1793 node = parent == heads[0] and heads[-1] or heads[0]
1794 1794 return hg.merge(repo, node, force=force)
1795 1795
1796 1796 def outgoing(ui, repo, dest=None, **opts):
1797 1797 """show changesets not found in destination
1798 1798
1799 1799 Show changesets not found in the specified destination repository or
1800 1800 the default push location. These are the changesets that would be pushed
1801 1801 if a push was requested.
1802 1802
1803 1803 See pull for valid destination format details.
1804 1804 """
1805 1805 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1806 1806 setremoteconfig(ui, opts)
1807 1807 revs = None
1808 1808 if opts['rev']:
1809 1809 revs = [repo.lookup(rev) for rev in opts['rev']]
1810 1810
1811 1811 other = hg.repository(ui, dest)
1812 1812 o = repo.findoutgoing(other, force=opts['force'])
1813 1813 if not o:
1814 1814 ui.status(_("no changes found\n"))
1815 1815 return
1816 1816 o = repo.changelog.nodesbetween(o, revs)[0]
1817 1817 if opts['newest_first']:
1818 1818 o.reverse()
1819 1819 displayer = cmdutil.show_changeset(ui, repo, opts)
1820 1820 for n in o:
1821 1821 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1822 1822 if opts['no_merges'] and len(parents) == 2:
1823 1823 continue
1824 1824 displayer.show(changenode=n)
1825 1825
1826 1826 def parents(ui, repo, file_=None, **opts):
1827 1827 """show the parents of the working dir or revision
1828 1828
1829 1829 Print the working directory's parent revisions.
1830 1830 """
1831 1831 rev = opts.get('rev')
1832 1832 if rev:
1833 1833 if file_:
1834 1834 ctx = repo.filectx(file_, changeid=rev)
1835 1835 else:
1836 1836 ctx = repo.changectx(rev)
1837 1837 p = [cp.node() for cp in ctx.parents()]
1838 1838 else:
1839 1839 p = repo.dirstate.parents()
1840 1840
1841 1841 displayer = cmdutil.show_changeset(ui, repo, opts)
1842 1842 for n in p:
1843 1843 if n != nullid:
1844 1844 displayer.show(changenode=n)
1845 1845
1846 1846 def paths(ui, repo, search=None):
1847 1847 """show definition of symbolic path names
1848 1848
1849 1849 Show definition of symbolic path name NAME. If no name is given, show
1850 1850 definition of available names.
1851 1851
1852 1852 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1853 1853 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1854 1854 """
1855 1855 if search:
1856 1856 for name, path in ui.configitems("paths"):
1857 1857 if name == search:
1858 1858 ui.write("%s\n" % path)
1859 1859 return
1860 1860 ui.warn(_("not found!\n"))
1861 1861 return 1
1862 1862 else:
1863 1863 for name, path in ui.configitems("paths"):
1864 1864 ui.write("%s = %s\n" % (name, path))
1865 1865
1866 1866 def postincoming(ui, repo, modheads, optupdate):
1867 1867 if modheads == 0:
1868 1868 return
1869 1869 if optupdate:
1870 1870 if modheads == 1:
1871 1871 return hg.update(repo, repo.changelog.tip()) # update
1872 1872 else:
1873 1873 ui.status(_("not updating, since new heads added\n"))
1874 1874 if modheads > 1:
1875 1875 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1876 1876 else:
1877 1877 ui.status(_("(run 'hg update' to get a working copy)\n"))
1878 1878
1879 1879 def pull(ui, repo, source="default", **opts):
1880 1880 """pull changes from the specified source
1881 1881
1882 1882 Pull changes from a remote repository to a local one.
1883 1883
1884 1884 This finds all changes from the repository at the specified path
1885 1885 or URL and adds them to the local repository. By default, this
1886 1886 does not update the copy of the project in the working directory.
1887 1887
1888 1888 Valid URLs are of the form:
1889 1889
1890 1890 local/filesystem/path (or file://local/filesystem/path)
1891 1891 http://[user@]host[:port]/[path]
1892 1892 https://[user@]host[:port]/[path]
1893 1893 ssh://[user@]host[:port]/[path]
1894 1894 static-http://host[:port]/[path]
1895 1895
1896 1896 Paths in the local filesystem can either point to Mercurial
1897 1897 repositories or to bundle files (as created by 'hg bundle' or
1898 1898 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1899 1899 allows access to a Mercurial repository where you simply use a web
1900 1900 server to publish the .hg directory as static content.
1901 1901
1902 1902 Some notes about using SSH with Mercurial:
1903 1903 - SSH requires an accessible shell account on the destination machine
1904 1904 and a copy of hg in the remote path or specified with as remotecmd.
1905 1905 - path is relative to the remote user's home directory by default.
1906 1906 Use an extra slash at the start of a path to specify an absolute path:
1907 1907 ssh://example.com//tmp/repository
1908 1908 - Mercurial doesn't use its own compression via SSH; the right thing
1909 1909 to do is to configure it in your ~/.ssh/config, e.g.:
1910 1910 Host *.mylocalnetwork.example.com
1911 1911 Compression no
1912 1912 Host *
1913 1913 Compression yes
1914 1914 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1915 1915 with the --ssh command line option.
1916 1916 """
1917 1917 source = ui.expandpath(source)
1918 1918 setremoteconfig(ui, opts)
1919 1919
1920 1920 other = hg.repository(ui, source)
1921 1921 ui.status(_('pulling from %s\n') % (source))
1922 1922 revs = None
1923 1923 if opts['rev']:
1924 1924 if 'lookup' in other.capabilities:
1925 1925 revs = [other.lookup(rev) for rev in opts['rev']]
1926 1926 else:
1927 1927 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1928 1928 raise util.Abort(error)
1929 1929 modheads = repo.pull(other, heads=revs, force=opts['force'])
1930 1930 return postincoming(ui, repo, modheads, opts['update'])
1931 1931
1932 1932 def push(ui, repo, dest=None, **opts):
1933 1933 """push changes to the specified destination
1934 1934
1935 1935 Push changes from the local repository to the given destination.
1936 1936
1937 1937 This is the symmetrical operation for pull. It helps to move
1938 1938 changes from the current repository to a different one. If the
1939 1939 destination is local this is identical to a pull in that directory
1940 1940 from the current one.
1941 1941
1942 1942 By default, push will refuse to run if it detects the result would
1943 1943 increase the number of remote heads. This generally indicates the
1944 1944 the client has forgotten to sync and merge before pushing.
1945 1945
1946 1946 Valid URLs are of the form:
1947 1947
1948 1948 local/filesystem/path (or file://local/filesystem/path)
1949 1949 ssh://[user@]host[:port]/[path]
1950 1950 http://[user@]host[:port]/[path]
1951 1951 https://[user@]host[:port]/[path]
1952 1952
1953 1953 Look at the help text for the pull command for important details
1954 1954 about ssh:// URLs.
1955 1955
1956 1956 Pushing to http:// and https:// URLs is only possible, if this
1957 1957 feature is explicitly enabled on the remote Mercurial server.
1958 1958 """
1959 1959 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1960 1960 setremoteconfig(ui, opts)
1961 1961
1962 1962 other = hg.repository(ui, dest)
1963 1963 ui.status('pushing to %s\n' % (dest))
1964 1964 revs = None
1965 1965 if opts['rev']:
1966 1966 revs = [repo.lookup(rev) for rev in opts['rev']]
1967 1967 r = repo.push(other, opts['force'], revs=revs)
1968 1968 return r == 0
1969 1969
1970 1970 def rawcommit(ui, repo, *pats, **opts):
1971 1971 """raw commit interface (DEPRECATED)
1972 1972
1973 1973 (DEPRECATED)
1974 1974 Lowlevel commit, for use in helper scripts.
1975 1975
1976 1976 This command is not intended to be used by normal users, as it is
1977 1977 primarily useful for importing from other SCMs.
1978 1978
1979 1979 This command is now deprecated and will be removed in a future
1980 1980 release, please use debugsetparents and commit instead.
1981 1981 """
1982 1982
1983 1983 ui.warn(_("(the rawcommit command is deprecated)\n"))
1984 1984
1985 1985 message = logmessage(opts)
1986 1986
1987 1987 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
1988 1988 if opts['files']:
1989 1989 files += open(opts['files']).read().splitlines()
1990 1990
1991 1991 parents = [repo.lookup(p) for p in opts['parent']]
1992 1992
1993 1993 try:
1994 1994 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
1995 1995 except ValueError, inst:
1996 1996 raise util.Abort(str(inst))
1997 1997
1998 1998 def recover(ui, repo):
1999 1999 """roll back an interrupted transaction
2000 2000
2001 2001 Recover from an interrupted commit or pull.
2002 2002
2003 2003 This command tries to fix the repository status after an interrupted
2004 2004 operation. It should only be necessary when Mercurial suggests it.
2005 2005 """
2006 2006 if repo.recover():
2007 2007 return hg.verify(repo)
2008 2008 return 1
2009 2009
2010 2010 def remove(ui, repo, *pats, **opts):
2011 2011 """remove the specified files on the next commit
2012 2012
2013 2013 Schedule the indicated files for removal from the repository.
2014 2014
2015 2015 This only removes files from the current branch, not from the
2016 2016 entire project history. If the files still exist in the working
2017 2017 directory, they will be deleted from it. If invoked with --after,
2018 2018 files that have been manually deleted are marked as removed.
2019 2019
2020 2020 This command schedules the files to be removed at the next commit.
2021 2021 To undo a remove before that, see hg revert.
2022 2022
2023 2023 Modified files and added files are not removed by default. To
2024 2024 remove them, use the -f/--force option.
2025 2025 """
2026 2026 names = []
2027 2027 if not opts['after'] and not pats:
2028 2028 raise util.Abort(_('no files specified'))
2029 2029 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2030 2030 exact = dict.fromkeys(files)
2031 2031 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2032 2032 modified, added, removed, deleted, unknown = mardu
2033 2033 remove, forget = [], []
2034 2034 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2035 2035 reason = None
2036 2036 if abs not in deleted and opts['after']:
2037 2037 reason = _('is still present')
2038 2038 elif abs in modified and not opts['force']:
2039 2039 reason = _('is modified (use -f to force removal)')
2040 2040 elif abs in added:
2041 2041 if opts['force']:
2042 2042 forget.append(abs)
2043 2043 continue
2044 2044 reason = _('has been marked for add (use -f to force removal)')
2045 2045 elif abs in unknown:
2046 2046 reason = _('is not managed')
2047 2047 elif abs in removed:
2048 2048 continue
2049 2049 if reason:
2050 2050 if exact:
2051 2051 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2052 2052 else:
2053 2053 if ui.verbose or not exact:
2054 2054 ui.status(_('removing %s\n') % rel)
2055 2055 remove.append(abs)
2056 2056 repo.forget(forget)
2057 2057 repo.remove(remove, unlink=not opts['after'])
2058 2058
2059 2059 def rename(ui, repo, *pats, **opts):
2060 2060 """rename files; equivalent of copy + remove
2061 2061
2062 2062 Mark dest as copies of sources; mark sources for deletion. If
2063 2063 dest is a directory, copies are put in that directory. If dest is
2064 2064 a file, there can only be one source.
2065 2065
2066 2066 By default, this command copies the contents of files as they
2067 2067 stand in the working directory. If invoked with --after, the
2068 2068 operation is recorded, but no copying is performed.
2069 2069
2070 2070 This command takes effect in the next commit. To undo a rename
2071 2071 before that, see hg revert.
2072 2072 """
2073 2073 wlock = repo.wlock(0)
2074 2074 errs, copied = docopy(ui, repo, pats, opts, wlock)
2075 2075 names = []
2076 2076 for abs, rel, exact in copied:
2077 2077 if ui.verbose or not exact:
2078 2078 ui.status(_('removing %s\n') % rel)
2079 2079 names.append(abs)
2080 2080 if not opts.get('dry_run'):
2081 2081 repo.remove(names, True, wlock)
2082 2082 return errs
2083 2083
2084 2084 def revert(ui, repo, *pats, **opts):
2085 2085 """revert files or dirs to their states as of some revision
2086 2086
2087 2087 With no revision specified, revert the named files or directories
2088 2088 to the contents they had in the parent of the working directory.
2089 2089 This restores the contents of the affected files to an unmodified
2090 2090 state and unschedules adds, removes, copies, and renames. If the
2091 2091 working directory has two parents, you must explicitly specify the
2092 2092 revision to revert to.
2093 2093
2094 2094 Modified files are saved with a .orig suffix before reverting.
2095 2095 To disable these backups, use --no-backup.
2096 2096
2097 2097 Using the -r option, revert the given files or directories to their
2098 2098 contents as of a specific revision. This can be helpful to "roll
2099 2099 back" some or all of a change that should not have been committed.
2100 2100
2101 2101 Revert modifies the working directory. It does not commit any
2102 2102 changes, or change the parent of the working directory. If you
2103 2103 revert to a revision other than the parent of the working
2104 2104 directory, the reverted files will thus appear modified
2105 2105 afterwards.
2106 2106
2107 2107 If a file has been deleted, it is recreated. If the executable
2108 2108 mode of a file was changed, it is reset.
2109 2109
2110 2110 If names are given, all files matching the names are reverted.
2111 2111
2112 2112 If no arguments are given, no files are reverted.
2113 2113 """
2114 2114
2115 2115 if opts["date"]:
2116 2116 if opts["rev"]:
2117 2117 raise util.Abort(_("you can't specify a revision and a date"))
2118 2118 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2119 2119
2120 2120 if not pats and not opts['all']:
2121 2121 raise util.Abort(_('no files or directories specified; '
2122 2122 'use --all to revert the whole repo'))
2123 2123
2124 2124 parent, p2 = repo.dirstate.parents()
2125 2125 if not opts['rev'] and p2 != nullid:
2126 2126 raise util.Abort(_('uncommitted merge - please provide a '
2127 2127 'specific revision'))
2128 2128 node = repo.changectx(opts['rev']).node()
2129 2129 mf = repo.manifest.read(repo.changelog.read(node)[0])
2130 2130 if node == parent:
2131 2131 pmf = mf
2132 2132 else:
2133 2133 pmf = None
2134 2134
2135 2135 wlock = repo.wlock()
2136 2136
2137 2137 # need all matching names in dirstate and manifest of target rev,
2138 2138 # so have to walk both. do not print errors if files exist in one
2139 2139 # but not other.
2140 2140
2141 2141 names = {}
2142 2142 target_only = {}
2143 2143
2144 2144 # walk dirstate.
2145 2145
2146 2146 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2147 2147 badmatch=mf.has_key):
2148 2148 names[abs] = (rel, exact)
2149 2149 if src == 'b':
2150 2150 target_only[abs] = True
2151 2151
2152 2152 # walk target manifest.
2153 2153
2154 2154 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2155 2155 badmatch=names.has_key):
2156 2156 if abs in names: continue
2157 2157 names[abs] = (rel, exact)
2158 2158 target_only[abs] = True
2159 2159
2160 2160 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2161 2161 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2162 2162
2163 2163 revert = ([], _('reverting %s\n'))
2164 2164 add = ([], _('adding %s\n'))
2165 2165 remove = ([], _('removing %s\n'))
2166 2166 forget = ([], _('forgetting %s\n'))
2167 2167 undelete = ([], _('undeleting %s\n'))
2168 2168 update = {}
2169 2169
2170 2170 disptable = (
2171 2171 # dispatch table:
2172 2172 # file state
2173 2173 # action if in target manifest
2174 2174 # action if not in target manifest
2175 2175 # make backup if in target manifest
2176 2176 # make backup if not in target manifest
2177 2177 (modified, revert, remove, True, True),
2178 2178 (added, revert, forget, True, False),
2179 2179 (removed, undelete, None, False, False),
2180 2180 (deleted, revert, remove, False, False),
2181 2181 (unknown, add, None, True, False),
2182 2182 (target_only, add, None, False, False),
2183 2183 )
2184 2184
2185 2185 entries = names.items()
2186 2186 entries.sort()
2187 2187
2188 2188 for abs, (rel, exact) in entries:
2189 2189 mfentry = mf.get(abs)
2190 2190 def handle(xlist, dobackup):
2191 2191 xlist[0].append(abs)
2192 2192 update[abs] = 1
2193 2193 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2194 2194 bakname = "%s.orig" % rel
2195 2195 ui.note(_('saving current version of %s as %s\n') %
2196 2196 (rel, bakname))
2197 2197 if not opts.get('dry_run'):
2198 2198 util.copyfile(rel, bakname)
2199 2199 if ui.verbose or not exact:
2200 2200 ui.status(xlist[1] % rel)
2201 2201 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2202 2202 if abs not in table: continue
2203 2203 # file has changed in dirstate
2204 2204 if mfentry:
2205 2205 handle(hitlist, backuphit)
2206 2206 elif misslist is not None:
2207 2207 handle(misslist, backupmiss)
2208 2208 else:
2209 2209 if exact: ui.warn(_('file not managed: %s\n') % rel)
2210 2210 break
2211 2211 else:
2212 2212 # file has not changed in dirstate
2213 2213 if node == parent:
2214 2214 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2215 2215 continue
2216 2216 if pmf is None:
2217 2217 # only need parent manifest in this unlikely case,
2218 2218 # so do not read by default
2219 2219 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2220 2220 if abs in pmf:
2221 2221 if mfentry:
2222 2222 # if version of file is same in parent and target
2223 2223 # manifests, do nothing
2224 2224 if pmf[abs] != mfentry:
2225 2225 handle(revert, False)
2226 2226 else:
2227 2227 handle(remove, False)
2228 2228
2229 2229 if not opts.get('dry_run'):
2230 2230 repo.dirstate.forget(forget[0])
2231 2231 r = hg.revert(repo, node, update.has_key, wlock)
2232 2232 repo.dirstate.update(add[0], 'a')
2233 2233 repo.dirstate.update(undelete[0], 'n')
2234 2234 repo.dirstate.update(remove[0], 'r')
2235 2235 return r
2236 2236
2237 2237 def rollback(ui, repo):
2238 2238 """roll back the last transaction in this repository
2239 2239
2240 2240 Roll back the last transaction in this repository, restoring the
2241 2241 project to its state prior to the transaction.
2242 2242
2243 2243 Transactions are used to encapsulate the effects of all commands
2244 2244 that create new changesets or propagate existing changesets into a
2245 2245 repository. For example, the following commands are transactional,
2246 2246 and their effects can be rolled back:
2247 2247
2248 2248 commit
2249 2249 import
2250 2250 pull
2251 2251 push (with this repository as destination)
2252 2252 unbundle
2253 2253
2254 2254 This command should be used with care. There is only one level of
2255 2255 rollback, and there is no way to undo a rollback.
2256 2256
2257 2257 This command is not intended for use on public repositories. Once
2258 2258 changes are visible for pull by other users, rolling a transaction
2259 2259 back locally is ineffective (someone else may already have pulled
2260 2260 the changes). Furthermore, a race is possible with readers of the
2261 2261 repository; for example an in-progress pull from the repository
2262 2262 may fail if a rollback is performed.
2263 2263 """
2264 2264 repo.rollback()
2265 2265
2266 2266 def root(ui, repo):
2267 2267 """print the root (top) of the current working dir
2268 2268
2269 2269 Print the root directory of the current repository.
2270 2270 """
2271 2271 ui.write(repo.root + "\n")
2272 2272
2273 2273 def serve(ui, repo, **opts):
2274 2274 """export the repository via HTTP
2275 2275
2276 2276 Start a local HTTP repository browser and pull server.
2277 2277
2278 2278 By default, the server logs accesses to stdout and errors to
2279 2279 stderr. Use the "-A" and "-E" options to log to files.
2280 2280 """
2281 2281
2282 2282 if opts["stdio"]:
2283 2283 if repo is None:
2284 2284 raise hg.RepoError(_("There is no Mercurial repository here"
2285 2285 " (.hg not found)"))
2286 2286 s = sshserver.sshserver(ui, repo)
2287 2287 s.serve_forever()
2288 2288
2289 2289 optlist = ("name templates style address port ipv6"
2290 2290 " accesslog errorlog webdir_conf")
2291 2291 for o in optlist.split():
2292 2292 if opts[o]:
2293 2293 ui.setconfig("web", o, str(opts[o]))
2294 2294
2295 2295 if repo is None and not ui.config("web", "webdir_conf"):
2296 2296 raise hg.RepoError(_("There is no Mercurial repository here"
2297 2297 " (.hg not found)"))
2298 2298
2299 2299 if opts['daemon'] and not opts['daemon_pipefds']:
2300 2300 rfd, wfd = os.pipe()
2301 2301 args = sys.argv[:]
2302 2302 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2303 2303 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2304 2304 args[0], args)
2305 2305 os.close(wfd)
2306 2306 os.read(rfd, 1)
2307 2307 os._exit(0)
2308 2308
2309 2309 httpd = hgweb.server.create_server(ui, repo)
2310 2310
2311 2311 if ui.verbose:
2312 2312 if httpd.port != 80:
2313 2313 ui.status(_('listening at http://%s:%d/\n') %
2314 2314 (httpd.addr, httpd.port))
2315 2315 else:
2316 2316 ui.status(_('listening at http://%s/\n') % httpd.addr)
2317 2317
2318 2318 if opts['pid_file']:
2319 2319 fp = open(opts['pid_file'], 'w')
2320 2320 fp.write(str(os.getpid()) + '\n')
2321 2321 fp.close()
2322 2322
2323 2323 if opts['daemon_pipefds']:
2324 2324 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2325 2325 os.close(rfd)
2326 2326 os.write(wfd, 'y')
2327 2327 os.close(wfd)
2328 2328 sys.stdout.flush()
2329 2329 sys.stderr.flush()
2330 2330 fd = os.open(util.nulldev, os.O_RDWR)
2331 2331 if fd != 0: os.dup2(fd, 0)
2332 2332 if fd != 1: os.dup2(fd, 1)
2333 2333 if fd != 2: os.dup2(fd, 2)
2334 2334 if fd not in (0, 1, 2): os.close(fd)
2335 2335
2336 2336 httpd.serve_forever()
2337 2337
2338 2338 def status(ui, repo, *pats, **opts):
2339 2339 """show changed files in the working directory
2340 2340
2341 2341 Show status of files in the repository. If names are given, only
2342 2342 files that match are shown. Files that are clean or ignored, are
2343 2343 not listed unless -c (clean), -i (ignored) or -A is given.
2344 2344
2345 2345 NOTE: status may appear to disagree with diff if permissions have
2346 2346 changed or a merge has occurred. The standard diff format does not
2347 2347 report permission changes and diff only reports changes relative
2348 2348 to one merge parent.
2349 2349
2350 2350 If one revision is given, it is used as the base revision.
2351 2351 If two revisions are given, the difference between them is shown.
2352 2352
2353 2353 The codes used to show the status of files are:
2354 2354 M = modified
2355 2355 A = added
2356 2356 R = removed
2357 2357 C = clean
2358 2358 ! = deleted, but still tracked
2359 2359 ? = not tracked
2360 2360 I = ignored (not shown by default)
2361 2361 = the previous added file was copied from here
2362 2362 """
2363 2363
2364 2364 all = opts['all']
2365 2365 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2366 2366
2367 2367 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2368 2368 cwd = (pats and repo.getcwd()) or ''
2369 2369 modified, added, removed, deleted, unknown, ignored, clean = [
2370 2370 [util.pathto(cwd, x) for x in n]
2371 2371 for n in repo.status(node1=node1, node2=node2, files=files,
2372 2372 match=matchfn,
2373 2373 list_ignored=all or opts['ignored'],
2374 2374 list_clean=all or opts['clean'])]
2375 2375
2376 2376 changetypes = (('modified', 'M', modified),
2377 2377 ('added', 'A', added),
2378 2378 ('removed', 'R', removed),
2379 2379 ('deleted', '!', deleted),
2380 2380 ('unknown', '?', unknown),
2381 2381 ('ignored', 'I', ignored))
2382 2382
2383 2383 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2384 2384
2385 2385 end = opts['print0'] and '\0' or '\n'
2386 2386
2387 2387 for opt, char, changes in ([ct for ct in explicit_changetypes
2388 2388 if all or opts[ct[0]]]
2389 2389 or changetypes):
2390 2390 if opts['no_status']:
2391 2391 format = "%%s%s" % end
2392 2392 else:
2393 2393 format = "%s %%s%s" % (char, end)
2394 2394
2395 2395 for f in changes:
2396 2396 ui.write(format % f)
2397 2397 if ((all or opts.get('copies')) and not opts.get('no_status')):
2398 2398 copied = repo.dirstate.copied(f)
2399 2399 if copied:
2400 2400 ui.write(' %s%s' % (copied, end))
2401 2401
2402 2402 def tag(ui, repo, name, rev_=None, **opts):
2403 2403 """add a tag for the current or given revision
2404 2404
2405 2405 Name a particular revision using <name>.
2406 2406
2407 2407 Tags are used to name particular revisions of the repository and are
2408 2408 very useful to compare different revision, to go back to significant
2409 2409 earlier versions or to mark branch points as releases, etc.
2410 2410
2411 2411 If no revision is given, the parent of the working directory is used,
2412 2412 or tip if no revision is checked out.
2413 2413
2414 2414 To facilitate version control, distribution, and merging of tags,
2415 2415 they are stored as a file named ".hgtags" which is managed
2416 2416 similarly to other project files and can be hand-edited if
2417 2417 necessary. The file '.hg/localtags' is used for local tags (not
2418 2418 shared among repositories).
2419 2419 """
2420 2420 if name in ['tip', '.', 'null']:
2421 2421 raise util.Abort(_("the name '%s' is reserved") % name)
2422 2422 if rev_ is not None:
2423 2423 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2424 2424 "please use 'hg tag [-r REV] NAME' instead\n"))
2425 2425 if opts['rev']:
2426 2426 raise util.Abort(_("use only one form to specify the revision"))
2427 2427 if opts['rev']:
2428 2428 rev_ = opts['rev']
2429 2429 if not rev_ and repo.dirstate.parents()[1] != nullid:
2430 2430 raise util.Abort(_('uncommitted merge - please provide a '
2431 2431 'specific revision'))
2432 2432 r = repo.changectx(rev_).node()
2433 2433
2434 2434 message = opts['message']
2435 2435 if not message:
2436 2436 message = _('Added tag %s for changeset %s') % (name, short(r))
2437 2437
2438 2438 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2439 2439
2440 2440 def tags(ui, repo):
2441 2441 """list repository tags
2442 2442
2443 2443 List the repository tags.
2444 2444
2445 2445 This lists both regular and local tags.
2446 2446 """
2447 2447
2448 2448 l = repo.tagslist()
2449 2449 l.reverse()
2450 2450 hexfunc = ui.debugflag and hex or short
2451 2451 for t, n in l:
2452 2452 try:
2453 hn = hexfunc(n)
2453 2454 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2454 except KeyError:
2455 r = " ?:?"
2455 except revlog.LookupError:
2456 r = " ?:%s" % hn
2456 2457 if ui.quiet:
2457 2458 ui.write("%s\n" % t)
2458 2459 else:
2459 2460 t = util.localsub(t, 30)
2460 2461 t += " " * (30 - util.locallen(t))
2461 2462 ui.write("%s %s\n" % (t, r))
2462 2463
2463 2464 def tip(ui, repo, **opts):
2464 2465 """show the tip revision
2465 2466
2466 2467 Show the tip revision.
2467 2468 """
2468 2469 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2469 2470
2470 2471 def unbundle(ui, repo, fname, **opts):
2471 2472 """apply a changegroup file
2472 2473
2473 2474 Apply a compressed changegroup file generated by the bundle
2474 2475 command.
2475 2476 """
2476 2477 gen = changegroup.readbundle(urllib.urlopen(fname), fname)
2477 2478 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2478 2479 return postincoming(ui, repo, modheads, opts['update'])
2479 2480
2480 2481 def update(ui, repo, node=None, clean=False, date=None):
2481 2482 """update or merge working directory
2482 2483
2483 2484 Update the working directory to the specified revision.
2484 2485
2485 2486 If there are no outstanding changes in the working directory and
2486 2487 there is a linear relationship between the current version and the
2487 2488 requested version, the result is the requested version.
2488 2489
2489 2490 To merge the working directory with another revision, use the
2490 2491 merge command.
2491 2492
2492 2493 By default, update will refuse to run if doing so would require
2493 2494 merging or discarding local changes.
2494 2495 """
2495 2496 if date:
2496 2497 if node:
2497 2498 raise util.Abort(_("you can't specify a revision and a date"))
2498 2499 node = cmdutil.finddate(ui, repo, date)
2499 2500
2500 2501 if clean:
2501 2502 return hg.clean(repo, node)
2502 2503 else:
2503 2504 return hg.update(repo, node)
2504 2505
2505 2506 def verify(ui, repo):
2506 2507 """verify the integrity of the repository
2507 2508
2508 2509 Verify the integrity of the current repository.
2509 2510
2510 2511 This will perform an extensive check of the repository's
2511 2512 integrity, validating the hashes and checksums of each entry in
2512 2513 the changelog, manifest, and tracked files, as well as the
2513 2514 integrity of their crosslinks and indices.
2514 2515 """
2515 2516 return hg.verify(repo)
2516 2517
2517 2518 def version_(ui):
2518 2519 """output version and copyright information"""
2519 2520 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2520 2521 % version.get_version())
2521 2522 ui.status(_(
2522 2523 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
2523 2524 "This is free software; see the source for copying conditions. "
2524 2525 "There is NO\nwarranty; "
2525 2526 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2526 2527 ))
2527 2528
2528 2529 # Command options and aliases are listed here, alphabetically
2529 2530
2530 2531 globalopts = [
2531 2532 ('R', 'repository', '',
2532 2533 _('repository root directory or symbolic path name')),
2533 2534 ('', 'cwd', '', _('change working directory')),
2534 2535 ('y', 'noninteractive', None,
2535 2536 _('do not prompt, assume \'yes\' for any required answers')),
2536 2537 ('q', 'quiet', None, _('suppress output')),
2537 2538 ('v', 'verbose', None, _('enable additional output')),
2538 2539 ('', 'config', [], _('set/override config option')),
2539 2540 ('', 'debug', None, _('enable debugging output')),
2540 2541 ('', 'debugger', None, _('start debugger')),
2541 2542 ('', 'encoding', util._encoding, _('set the charset encoding')),
2542 2543 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2543 2544 ('', 'lsprof', None, _('print improved command execution profile')),
2544 2545 ('', 'traceback', None, _('print traceback on exception')),
2545 2546 ('', 'time', None, _('time how long the command takes')),
2546 2547 ('', 'profile', None, _('print command execution profile')),
2547 2548 ('', 'version', None, _('output version information and exit')),
2548 2549 ('h', 'help', None, _('display help and exit')),
2549 2550 ]
2550 2551
2551 2552 dryrunopts = [('n', 'dry-run', None,
2552 2553 _('do not perform actions, just print output'))]
2553 2554
2554 2555 remoteopts = [
2555 2556 ('e', 'ssh', '', _('specify ssh command to use')),
2556 2557 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2557 2558 ]
2558 2559
2559 2560 walkopts = [
2560 2561 ('I', 'include', [], _('include names matching the given patterns')),
2561 2562 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2562 2563 ]
2563 2564
2564 2565 commitopts = [
2565 2566 ('m', 'message', '', _('use <text> as commit message')),
2566 2567 ('l', 'logfile', '', _('read commit message from <file>')),
2567 2568 ]
2568 2569
2569 2570 table = {
2570 2571 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2571 2572 "addremove":
2572 2573 (addremove,
2573 2574 [('s', 'similarity', '',
2574 2575 _('guess renamed files by similarity (0<=s<=100)')),
2575 2576 ] + walkopts + dryrunopts,
2576 2577 _('hg addremove [OPTION]... [FILE]...')),
2577 2578 "^annotate":
2578 2579 (annotate,
2579 2580 [('r', 'rev', '', _('annotate the specified revision')),
2580 2581 ('f', 'follow', None, _('follow file copies and renames')),
2581 2582 ('a', 'text', None, _('treat all files as text')),
2582 2583 ('u', 'user', None, _('list the author')),
2583 2584 ('d', 'date', None, _('list the date')),
2584 2585 ('n', 'number', None, _('list the revision number (default)')),
2585 2586 ('c', 'changeset', None, _('list the changeset')),
2586 2587 ] + walkopts,
2587 2588 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] FILE...')),
2588 2589 "archive":
2589 2590 (archive,
2590 2591 [('', 'no-decode', None, _('do not pass files through decoders')),
2591 2592 ('p', 'prefix', '', _('directory prefix for files in archive')),
2592 2593 ('r', 'rev', '', _('revision to distribute')),
2593 2594 ('t', 'type', '', _('type of distribution to create')),
2594 2595 ] + walkopts,
2595 2596 _('hg archive [OPTION]... DEST')),
2596 2597 "backout":
2597 2598 (backout,
2598 2599 [('', 'merge', None,
2599 2600 _('merge with old dirstate parent after backout')),
2600 2601 ('d', 'date', '', _('record datecode as commit date')),
2601 2602 ('', 'parent', '', _('parent to choose when backing out merge')),
2602 2603 ('u', 'user', '', _('record user as committer')),
2603 2604 ] + walkopts + commitopts,
2604 2605 _('hg backout [OPTION]... REV')),
2605 2606 "branch": (branch, [], _('hg branch [NAME]')),
2606 2607 "branches": (branches, [], _('hg branches')),
2607 2608 "bundle":
2608 2609 (bundle,
2609 2610 [('f', 'force', None,
2610 2611 _('run even when remote repository is unrelated')),
2611 2612 ('r', 'rev', [],
2612 2613 _('a changeset you would like to bundle')),
2613 2614 ('', 'base', [],
2614 2615 _('a base changeset to specify instead of a destination')),
2615 2616 ] + remoteopts,
2616 2617 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2617 2618 "cat":
2618 2619 (cat,
2619 2620 [('o', 'output', '', _('print output to file with formatted name')),
2620 2621 ('r', 'rev', '', _('print the given revision')),
2621 2622 ] + walkopts,
2622 2623 _('hg cat [OPTION]... FILE...')),
2623 2624 "^clone":
2624 2625 (clone,
2625 2626 [('U', 'noupdate', None, _('do not update the new working directory')),
2626 2627 ('r', 'rev', [],
2627 2628 _('a changeset you would like to have after cloning')),
2628 2629 ('', 'pull', None, _('use pull protocol to copy metadata')),
2629 2630 ('', 'uncompressed', None,
2630 2631 _('use uncompressed transfer (fast over LAN)')),
2631 2632 ] + remoteopts,
2632 2633 _('hg clone [OPTION]... SOURCE [DEST]')),
2633 2634 "^commit|ci":
2634 2635 (commit,
2635 2636 [('A', 'addremove', None,
2636 2637 _('mark new/missing files as added/removed before committing')),
2637 2638 ('d', 'date', '', _('record datecode as commit date')),
2638 2639 ('u', 'user', '', _('record user as commiter')),
2639 2640 ] + walkopts + commitopts,
2640 2641 _('hg commit [OPTION]... [FILE]...')),
2641 2642 "copy|cp":
2642 2643 (copy,
2643 2644 [('A', 'after', None, _('record a copy that has already occurred')),
2644 2645 ('f', 'force', None,
2645 2646 _('forcibly copy over an existing managed file')),
2646 2647 ] + walkopts + dryrunopts,
2647 2648 _('hg copy [OPTION]... [SOURCE]... DEST')),
2648 2649 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2649 2650 "debugcomplete":
2650 2651 (debugcomplete,
2651 2652 [('o', 'options', None, _('show the command options'))],
2652 2653 _('debugcomplete [-o] CMD')),
2653 2654 "debuginstall": (debuginstall, [], _('debuginstall')),
2654 2655 "debugrebuildstate":
2655 2656 (debugrebuildstate,
2656 2657 [('r', 'rev', '', _('revision to rebuild to'))],
2657 2658 _('debugrebuildstate [-r REV] [REV]')),
2658 2659 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2659 2660 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2660 2661 "debugstate": (debugstate, [], _('debugstate')),
2661 2662 "debugdate":
2662 2663 (debugdate,
2663 2664 [('e', 'extended', None, _('try extended date formats'))],
2664 2665 _('debugdate [-e] DATE [RANGE]')),
2665 2666 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2666 2667 "debugindex": (debugindex, [], _('debugindex FILE')),
2667 2668 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2668 2669 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2669 2670 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2670 2671 "^diff":
2671 2672 (diff,
2672 2673 [('r', 'rev', [], _('revision')),
2673 2674 ('a', 'text', None, _('treat all files as text')),
2674 2675 ('p', 'show-function', None,
2675 2676 _('show which function each change is in')),
2676 2677 ('g', 'git', None, _('use git extended diff format')),
2677 2678 ('', 'nodates', None, _("don't include dates in diff headers")),
2678 2679 ('w', 'ignore-all-space', None,
2679 2680 _('ignore white space when comparing lines')),
2680 2681 ('b', 'ignore-space-change', None,
2681 2682 _('ignore changes in the amount of white space')),
2682 2683 ('B', 'ignore-blank-lines', None,
2683 2684 _('ignore changes whose lines are all blank')),
2684 2685 ] + walkopts,
2685 2686 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2686 2687 "^export":
2687 2688 (export,
2688 2689 [('o', 'output', '', _('print output to file with formatted name')),
2689 2690 ('a', 'text', None, _('treat all files as text')),
2690 2691 ('g', 'git', None, _('use git extended diff format')),
2691 2692 ('', 'nodates', None, _("don't include dates in diff headers")),
2692 2693 ('', 'switch-parent', None, _('diff against the second parent'))],
2693 2694 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2694 2695 "grep":
2695 2696 (grep,
2696 2697 [('0', 'print0', None, _('end fields with NUL')),
2697 2698 ('', 'all', None, _('print all revisions that match')),
2698 2699 ('f', 'follow', None,
2699 2700 _('follow changeset history, or file history across copies and renames')),
2700 2701 ('i', 'ignore-case', None, _('ignore case when matching')),
2701 2702 ('l', 'files-with-matches', None,
2702 2703 _('print only filenames and revs that match')),
2703 2704 ('n', 'line-number', None, _('print matching line numbers')),
2704 2705 ('r', 'rev', [], _('search in given revision range')),
2705 2706 ('u', 'user', None, _('print user who committed change')),
2706 2707 ] + walkopts,
2707 2708 _('hg grep [OPTION]... PATTERN [FILE]...')),
2708 2709 "heads":
2709 2710 (heads,
2710 2711 [('', 'style', '', _('display using template map file')),
2711 2712 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2712 2713 ('', 'template', '', _('display with template'))],
2713 2714 _('hg heads [-r REV]')),
2714 2715 "help": (help_, [], _('hg help [COMMAND]')),
2715 2716 "identify|id": (identify, [], _('hg identify')),
2716 2717 "import|patch":
2717 2718 (import_,
2718 2719 [('p', 'strip', 1,
2719 2720 _('directory strip option for patch. This has the same\n'
2720 2721 'meaning as the corresponding patch option')),
2721 2722 ('b', 'base', '', _('base path')),
2722 2723 ('f', 'force', None,
2723 2724 _('skip check for outstanding uncommitted changes'))] + commitopts,
2724 2725 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2725 2726 "incoming|in": (incoming,
2726 2727 [('M', 'no-merges', None, _('do not show merges')),
2727 2728 ('f', 'force', None,
2728 2729 _('run even when remote repository is unrelated')),
2729 2730 ('', 'style', '', _('display using template map file')),
2730 2731 ('n', 'newest-first', None, _('show newest record first')),
2731 2732 ('', 'bundle', '', _('file to store the bundles into')),
2732 2733 ('p', 'patch', None, _('show patch')),
2733 2734 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2734 2735 ('', 'template', '', _('display with template')),
2735 2736 ] + remoteopts,
2736 2737 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2737 2738 ' [--bundle FILENAME] [SOURCE]')),
2738 2739 "^init":
2739 2740 (init,
2740 2741 remoteopts,
2741 2742 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2742 2743 "locate":
2743 2744 (locate,
2744 2745 [('r', 'rev', '', _('search the repository as it stood at rev')),
2745 2746 ('0', 'print0', None,
2746 2747 _('end filenames with NUL, for use with xargs')),
2747 2748 ('f', 'fullpath', None,
2748 2749 _('print complete paths from the filesystem root')),
2749 2750 ] + walkopts,
2750 2751 _('hg locate [OPTION]... [PATTERN]...')),
2751 2752 "^log|history":
2752 2753 (log,
2753 2754 [('f', 'follow', None,
2754 2755 _('follow changeset history, or file history across copies and renames')),
2755 2756 ('', 'follow-first', None,
2756 2757 _('only follow the first parent of merge changesets')),
2757 2758 ('d', 'date', '', _('show revs matching date spec')),
2758 2759 ('C', 'copies', None, _('show copied files')),
2759 2760 ('k', 'keyword', [], _('search for a keyword')),
2760 2761 ('l', 'limit', '', _('limit number of changes displayed')),
2761 2762 ('r', 'rev', [], _('show the specified revision or range')),
2762 2763 ('', 'removed', None, _('include revs where files were removed')),
2763 2764 ('M', 'no-merges', None, _('do not show merges')),
2764 2765 ('', 'style', '', _('display using template map file')),
2765 2766 ('m', 'only-merges', None, _('show only merges')),
2766 2767 ('p', 'patch', None, _('show patch')),
2767 2768 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2768 2769 ('', 'template', '', _('display with template')),
2769 2770 ] + walkopts,
2770 2771 _('hg log [OPTION]... [FILE]')),
2771 2772 "manifest": (manifest, [], _('hg manifest [REV]')),
2772 2773 "merge":
2773 2774 (merge,
2774 2775 [('f', 'force', None, _('force a merge with outstanding changes'))],
2775 2776 _('hg merge [-f] [REV]')),
2776 2777 "outgoing|out": (outgoing,
2777 2778 [('M', 'no-merges', None, _('do not show merges')),
2778 2779 ('f', 'force', None,
2779 2780 _('run even when remote repository is unrelated')),
2780 2781 ('p', 'patch', None, _('show patch')),
2781 2782 ('', 'style', '', _('display using template map file')),
2782 2783 ('r', 'rev', [], _('a specific revision you would like to push')),
2783 2784 ('n', 'newest-first', None, _('show newest record first')),
2784 2785 ('', 'template', '', _('display with template')),
2785 2786 ] + remoteopts,
2786 2787 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
2787 2788 "^parents":
2788 2789 (parents,
2789 2790 [('r', 'rev', '', _('show parents from the specified rev')),
2790 2791 ('', 'style', '', _('display using template map file')),
2791 2792 ('', 'template', '', _('display with template'))],
2792 2793 _('hg parents [-r REV] [FILE]')),
2793 2794 "paths": (paths, [], _('hg paths [NAME]')),
2794 2795 "^pull":
2795 2796 (pull,
2796 2797 [('u', 'update', None,
2797 2798 _('update to new tip if changesets were pulled')),
2798 2799 ('f', 'force', None,
2799 2800 _('run even when remote repository is unrelated')),
2800 2801 ('r', 'rev', [],
2801 2802 _('a specific revision up to which you would like to pull')),
2802 2803 ] + remoteopts,
2803 2804 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
2804 2805 "^push":
2805 2806 (push,
2806 2807 [('f', 'force', None, _('force push')),
2807 2808 ('r', 'rev', [], _('a specific revision you would like to push')),
2808 2809 ] + remoteopts,
2809 2810 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
2810 2811 "debugrawcommit|rawcommit":
2811 2812 (rawcommit,
2812 2813 [('p', 'parent', [], _('parent')),
2813 2814 ('d', 'date', '', _('date code')),
2814 2815 ('u', 'user', '', _('user')),
2815 2816 ('F', 'files', '', _('file list'))
2816 2817 ] + commitopts,
2817 2818 _('hg debugrawcommit [OPTION]... [FILE]...')),
2818 2819 "recover": (recover, [], _('hg recover')),
2819 2820 "^remove|rm":
2820 2821 (remove,
2821 2822 [('A', 'after', None, _('record remove that has already occurred')),
2822 2823 ('f', 'force', None, _('remove file even if modified')),
2823 2824 ] + walkopts,
2824 2825 _('hg remove [OPTION]... FILE...')),
2825 2826 "rename|mv":
2826 2827 (rename,
2827 2828 [('A', 'after', None, _('record a rename that has already occurred')),
2828 2829 ('f', 'force', None,
2829 2830 _('forcibly copy over an existing managed file')),
2830 2831 ] + walkopts + dryrunopts,
2831 2832 _('hg rename [OPTION]... SOURCE... DEST')),
2832 2833 "^revert":
2833 2834 (revert,
2834 2835 [('a', 'all', None, _('revert all changes when no arguments given')),
2835 2836 ('d', 'date', '', _('tipmost revision matching date')),
2836 2837 ('r', 'rev', '', _('revision to revert to')),
2837 2838 ('', 'no-backup', None, _('do not save backup copies of files')),
2838 2839 ] + walkopts + dryrunopts,
2839 2840 _('hg revert [OPTION]... [-r REV] [NAME]...')),
2840 2841 "rollback": (rollback, [], _('hg rollback')),
2841 2842 "root": (root, [], _('hg root')),
2842 2843 "showconfig|debugconfig":
2843 2844 (showconfig,
2844 2845 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2845 2846 _('showconfig [-u] [NAME]...')),
2846 2847 "^serve":
2847 2848 (serve,
2848 2849 [('A', 'accesslog', '', _('name of access log file to write to')),
2849 2850 ('d', 'daemon', None, _('run server in background')),
2850 2851 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2851 2852 ('E', 'errorlog', '', _('name of error log file to write to')),
2852 2853 ('p', 'port', 0, _('port to use (default: 8000)')),
2853 2854 ('a', 'address', '', _('address to use')),
2854 2855 ('n', 'name', '',
2855 2856 _('name to show in web pages (default: working dir)')),
2856 2857 ('', 'webdir-conf', '', _('name of the webdir config file'
2857 2858 ' (serve more than one repo)')),
2858 2859 ('', 'pid-file', '', _('name of file to write process ID to')),
2859 2860 ('', 'stdio', None, _('for remote clients')),
2860 2861 ('t', 'templates', '', _('web templates to use')),
2861 2862 ('', 'style', '', _('template style to use')),
2862 2863 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2863 2864 _('hg serve [OPTION]...')),
2864 2865 "^status|st":
2865 2866 (status,
2866 2867 [('A', 'all', None, _('show status of all files')),
2867 2868 ('m', 'modified', None, _('show only modified files')),
2868 2869 ('a', 'added', None, _('show only added files')),
2869 2870 ('r', 'removed', None, _('show only removed files')),
2870 2871 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2871 2872 ('c', 'clean', None, _('show only files without changes')),
2872 2873 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2873 2874 ('i', 'ignored', None, _('show ignored files')),
2874 2875 ('n', 'no-status', None, _('hide status prefix')),
2875 2876 ('C', 'copies', None, _('show source of copied files')),
2876 2877 ('0', 'print0', None,
2877 2878 _('end filenames with NUL, for use with xargs')),
2878 2879 ('', 'rev', [], _('show difference from revision')),
2879 2880 ] + walkopts,
2880 2881 _('hg status [OPTION]... [FILE]...')),
2881 2882 "tag":
2882 2883 (tag,
2883 2884 [('l', 'local', None, _('make the tag local')),
2884 2885 ('m', 'message', '', _('message for tag commit log entry')),
2885 2886 ('d', 'date', '', _('record datecode as commit date')),
2886 2887 ('u', 'user', '', _('record user as commiter')),
2887 2888 ('r', 'rev', '', _('revision to tag'))],
2888 2889 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2889 2890 "tags": (tags, [], _('hg tags')),
2890 2891 "tip":
2891 2892 (tip,
2892 2893 [('', 'style', '', _('display using template map file')),
2893 2894 ('p', 'patch', None, _('show patch')),
2894 2895 ('', 'template', '', _('display with template'))],
2895 2896 _('hg tip [-p]')),
2896 2897 "unbundle":
2897 2898 (unbundle,
2898 2899 [('u', 'update', None,
2899 2900 _('update to new tip if changesets were unbundled'))],
2900 2901 _('hg unbundle [-u] FILE')),
2901 2902 "^update|up|checkout|co":
2902 2903 (update,
2903 2904 [('C', 'clean', None, _('overwrite locally modified files')),
2904 2905 ('d', 'date', '', _('tipmost revision matching date'))],
2905 2906 _('hg update [-C] [-d DATE] [REV]')),
2906 2907 "verify": (verify, [], _('hg verify')),
2907 2908 "version": (version_, [], _('hg version')),
2908 2909 }
2909 2910
2910 2911 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2911 2912 " debugindex debugindexdot debugdate debuginstall")
2912 2913 optionalrepo = ("paths serve showconfig")
2913 2914
2914 2915 def findpossible(ui, cmd):
2915 2916 """
2916 2917 Return cmd -> (aliases, command table entry)
2917 2918 for each matching command.
2918 2919 Return debug commands (or their aliases) only if no normal command matches.
2919 2920 """
2920 2921 choice = {}
2921 2922 debugchoice = {}
2922 2923 for e in table.keys():
2923 2924 aliases = e.lstrip("^").split("|")
2924 2925 found = None
2925 2926 if cmd in aliases:
2926 2927 found = cmd
2927 2928 elif not ui.config("ui", "strict"):
2928 2929 for a in aliases:
2929 2930 if a.startswith(cmd):
2930 2931 found = a
2931 2932 break
2932 2933 if found is not None:
2933 2934 if aliases[0].startswith("debug") or found.startswith("debug"):
2934 2935 debugchoice[found] = (aliases, table[e])
2935 2936 else:
2936 2937 choice[found] = (aliases, table[e])
2937 2938
2938 2939 if not choice and debugchoice:
2939 2940 choice = debugchoice
2940 2941
2941 2942 return choice
2942 2943
2943 2944 def findcmd(ui, cmd):
2944 2945 """Return (aliases, command table entry) for command string."""
2945 2946 choice = findpossible(ui, cmd)
2946 2947
2947 2948 if choice.has_key(cmd):
2948 2949 return choice[cmd]
2949 2950
2950 2951 if len(choice) > 1:
2951 2952 clist = choice.keys()
2952 2953 clist.sort()
2953 2954 raise AmbiguousCommand(cmd, clist)
2954 2955
2955 2956 if choice:
2956 2957 return choice.values()[0]
2957 2958
2958 2959 raise UnknownCommand(cmd)
2959 2960
2960 2961 def catchterm(*args):
2961 2962 raise util.SignalInterrupt
2962 2963
2963 2964 def run():
2964 2965 sys.exit(dispatch(sys.argv[1:]))
2965 2966
2966 2967 class ParseError(Exception):
2967 2968 """Exception raised on errors in parsing the command line."""
2968 2969
2969 2970 def parse(ui, args):
2970 2971 options = {}
2971 2972 cmdoptions = {}
2972 2973
2973 2974 try:
2974 2975 args = fancyopts.fancyopts(args, globalopts, options)
2975 2976 except fancyopts.getopt.GetoptError, inst:
2976 2977 raise ParseError(None, inst)
2977 2978
2978 2979 if args:
2979 2980 cmd, args = args[0], args[1:]
2980 2981 aliases, i = findcmd(ui, cmd)
2981 2982 cmd = aliases[0]
2982 2983 defaults = ui.config("defaults", cmd)
2983 2984 if defaults:
2984 2985 args = shlex.split(defaults) + args
2985 2986 c = list(i[1])
2986 2987 else:
2987 2988 cmd = None
2988 2989 c = []
2989 2990
2990 2991 # combine global options into local
2991 2992 for o in globalopts:
2992 2993 c.append((o[0], o[1], options[o[1]], o[3]))
2993 2994
2994 2995 try:
2995 2996 args = fancyopts.fancyopts(args, c, cmdoptions)
2996 2997 except fancyopts.getopt.GetoptError, inst:
2997 2998 raise ParseError(cmd, inst)
2998 2999
2999 3000 # separate global options back out
3000 3001 for o in globalopts:
3001 3002 n = o[1]
3002 3003 options[n] = cmdoptions[n]
3003 3004 del cmdoptions[n]
3004 3005
3005 3006 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3006 3007
3007 3008 external = {}
3008 3009
3009 3010 def findext(name):
3010 3011 '''return module with given extension name'''
3011 3012 try:
3012 3013 return sys.modules[external[name]]
3013 3014 except KeyError:
3014 3015 for k, v in external.iteritems():
3015 3016 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3016 3017 return sys.modules[v]
3017 3018 raise KeyError(name)
3018 3019
3019 3020 def load_extensions(ui):
3020 3021 added = []
3021 3022 for ext_name, load_from_name in ui.extensions():
3022 3023 if ext_name in external:
3023 3024 continue
3024 3025 try:
3025 3026 if load_from_name:
3026 3027 # the module will be loaded in sys.modules
3027 3028 # choose an unique name so that it doesn't
3028 3029 # conflicts with other modules
3029 3030 module_name = "hgext_%s" % ext_name.replace('.', '_')
3030 3031 mod = imp.load_source(module_name, load_from_name)
3031 3032 else:
3032 3033 def importh(name):
3033 3034 mod = __import__(name)
3034 3035 components = name.split('.')
3035 3036 for comp in components[1:]:
3036 3037 mod = getattr(mod, comp)
3037 3038 return mod
3038 3039 try:
3039 3040 mod = importh("hgext.%s" % ext_name)
3040 3041 except ImportError:
3041 3042 mod = importh(ext_name)
3042 3043 external[ext_name] = mod.__name__
3043 3044 added.append((mod, ext_name))
3044 3045 except (util.SignalInterrupt, KeyboardInterrupt):
3045 3046 raise
3046 3047 except Exception, inst:
3047 3048 ui.warn(_("*** failed to import extension %s: %s\n") %
3048 3049 (ext_name, inst))
3049 3050 if ui.print_exc():
3050 3051 return 1
3051 3052
3052 3053 for mod, name in added:
3053 3054 uisetup = getattr(mod, 'uisetup', None)
3054 3055 if uisetup:
3055 3056 uisetup(ui)
3056 3057 cmdtable = getattr(mod, 'cmdtable', {})
3057 3058 for t in cmdtable:
3058 3059 if t in table:
3059 3060 ui.warn(_("module %s overrides %s\n") % (name, t))
3060 3061 table.update(cmdtable)
3061 3062
3062 3063 def parseconfig(config):
3063 3064 """parse the --config options from the command line"""
3064 3065 parsed = []
3065 3066 for cfg in config:
3066 3067 try:
3067 3068 name, value = cfg.split('=', 1)
3068 3069 section, name = name.split('.', 1)
3069 3070 if not section or not name:
3070 3071 raise IndexError
3071 3072 parsed.append((section, name, value))
3072 3073 except (IndexError, ValueError):
3073 3074 raise util.Abort(_('malformed --config option: %s') % cfg)
3074 3075 return parsed
3075 3076
3076 3077 def dispatch(args):
3077 3078 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3078 3079 num = getattr(signal, name, None)
3079 3080 if num: signal.signal(num, catchterm)
3080 3081
3081 3082 try:
3082 3083 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3083 3084 except util.Abort, inst:
3084 3085 sys.stderr.write(_("abort: %s\n") % inst)
3085 3086 return -1
3086 3087
3087 3088 load_extensions(u)
3088 3089 u.addreadhook(load_extensions)
3089 3090
3090 3091 try:
3091 3092 cmd, func, args, options, cmdoptions = parse(u, args)
3092 3093 if options["encoding"]:
3093 3094 util._encoding = options["encoding"]
3094 3095 if options["encodingmode"]:
3095 3096 util._encodingmode = options["encodingmode"]
3096 3097 if options["time"]:
3097 3098 def get_times():
3098 3099 t = os.times()
3099 3100 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3100 3101 t = (t[0], t[1], t[2], t[3], time.clock())
3101 3102 return t
3102 3103 s = get_times()
3103 3104 def print_time():
3104 3105 t = get_times()
3105 3106 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3106 3107 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3107 3108 atexit.register(print_time)
3108 3109
3109 3110 # enter the debugger before command execution
3110 3111 if options['debugger']:
3111 3112 pdb.set_trace()
3112 3113
3113 3114 try:
3114 3115 if options['cwd']:
3115 3116 os.chdir(options['cwd'])
3116 3117
3117 3118 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3118 3119 not options["noninteractive"], options["traceback"],
3119 3120 parseconfig(options["config"]))
3120 3121
3121 3122 path = u.expandpath(options["repository"]) or ""
3122 3123 repo = path and hg.repository(u, path=path) or None
3123 3124 if repo and not repo.local():
3124 3125 raise util.Abort(_("repository '%s' is not local") % path)
3125 3126
3126 3127 if options['help']:
3127 3128 return help_(u, cmd, options['version'])
3128 3129 elif options['version']:
3129 3130 return version_(u)
3130 3131 elif not cmd:
3131 3132 return help_(u, 'shortlist')
3132 3133
3133 3134 if cmd not in norepo.split():
3134 3135 try:
3135 3136 if not repo:
3136 3137 repo = hg.repository(u, path=path)
3137 3138 u = repo.ui
3138 3139 for name in external.itervalues():
3139 3140 mod = sys.modules[name]
3140 3141 if hasattr(mod, 'reposetup'):
3141 3142 mod.reposetup(u, repo)
3142 3143 hg.repo_setup_hooks.append(mod.reposetup)
3143 3144 except hg.RepoError:
3144 3145 if cmd not in optionalrepo.split():
3145 3146 raise
3146 3147 d = lambda: func(u, repo, *args, **cmdoptions)
3147 3148 else:
3148 3149 d = lambda: func(u, *args, **cmdoptions)
3149 3150
3150 3151 try:
3151 3152 if options['profile']:
3152 3153 import hotshot, hotshot.stats
3153 3154 prof = hotshot.Profile("hg.prof")
3154 3155 try:
3155 3156 try:
3156 3157 return prof.runcall(d)
3157 3158 except:
3158 3159 try:
3159 3160 u.warn(_('exception raised - generating '
3160 3161 'profile anyway\n'))
3161 3162 except:
3162 3163 pass
3163 3164 raise
3164 3165 finally:
3165 3166 prof.close()
3166 3167 stats = hotshot.stats.load("hg.prof")
3167 3168 stats.strip_dirs()
3168 3169 stats.sort_stats('time', 'calls')
3169 3170 stats.print_stats(40)
3170 3171 elif options['lsprof']:
3171 3172 try:
3172 3173 from mercurial import lsprof
3173 3174 except ImportError:
3174 3175 raise util.Abort(_(
3175 3176 'lsprof not available - install from '
3176 3177 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3177 3178 p = lsprof.Profiler()
3178 3179 p.enable(subcalls=True)
3179 3180 try:
3180 3181 return d()
3181 3182 finally:
3182 3183 p.disable()
3183 3184 stats = lsprof.Stats(p.getstats())
3184 3185 stats.sort()
3185 3186 stats.pprint(top=10, file=sys.stderr, climit=5)
3186 3187 else:
3187 3188 return d()
3188 3189 finally:
3189 3190 u.flush()
3190 3191 except:
3191 3192 # enter the debugger when we hit an exception
3192 3193 if options['debugger']:
3193 3194 pdb.post_mortem(sys.exc_info()[2])
3194 3195 u.print_exc()
3195 3196 raise
3196 3197 except ParseError, inst:
3197 3198 if inst.args[0]:
3198 3199 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3199 3200 help_(u, inst.args[0])
3200 3201 else:
3201 3202 u.warn(_("hg: %s\n") % inst.args[1])
3202 3203 help_(u, 'shortlist')
3203 3204 except AmbiguousCommand, inst:
3204 3205 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3205 3206 (inst.args[0], " ".join(inst.args[1])))
3206 3207 except UnknownCommand, inst:
3207 3208 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3208 3209 help_(u, 'shortlist')
3209 3210 except hg.RepoError, inst:
3210 3211 u.warn(_("abort: %s!\n") % inst)
3211 3212 except lock.LockHeld, inst:
3212 3213 if inst.errno == errno.ETIMEDOUT:
3213 3214 reason = _('timed out waiting for lock held by %s') % inst.locker
3214 3215 else:
3215 3216 reason = _('lock held by %s') % inst.locker
3216 3217 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3217 3218 except lock.LockUnavailable, inst:
3218 3219 u.warn(_("abort: could not lock %s: %s\n") %
3219 3220 (inst.desc or inst.filename, inst.strerror))
3220 3221 except revlog.RevlogError, inst:
3221 3222 u.warn(_("abort: %s!\n") % inst)
3222 3223 except util.SignalInterrupt:
3223 3224 u.warn(_("killed!\n"))
3224 3225 except KeyboardInterrupt:
3225 3226 try:
3226 3227 u.warn(_("interrupted!\n"))
3227 3228 except IOError, inst:
3228 3229 if inst.errno == errno.EPIPE:
3229 3230 if u.debugflag:
3230 3231 u.warn(_("\nbroken pipe\n"))
3231 3232 else:
3232 3233 raise
3233 3234 except IOError, inst:
3234 3235 if hasattr(inst, "code"):
3235 3236 u.warn(_("abort: %s\n") % inst)
3236 3237 elif hasattr(inst, "reason"):
3237 3238 u.warn(_("abort: error: %s\n") % inst.reason[1])
3238 3239 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3239 3240 if u.debugflag:
3240 3241 u.warn(_("broken pipe\n"))
3241 3242 elif getattr(inst, "strerror", None):
3242 3243 if getattr(inst, "filename", None):
3243 3244 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3244 3245 else:
3245 3246 u.warn(_("abort: %s\n") % inst.strerror)
3246 3247 else:
3247 3248 raise
3248 3249 except OSError, inst:
3249 3250 if getattr(inst, "filename", None):
3250 3251 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3251 3252 else:
3252 3253 u.warn(_("abort: %s\n") % inst.strerror)
3253 3254 except util.UnexpectedOutput, inst:
3254 3255 u.warn(_("abort: %s") % inst[0])
3255 3256 if not isinstance(inst[1], basestring):
3256 3257 u.warn(" %r\n" % (inst[1],))
3257 3258 elif not inst[1]:
3258 3259 u.warn(_(" empty string\n"))
3259 3260 else:
3260 3261 u.warn("\n%r\n" % util.ellipsis(inst[1]))
3261 3262 except util.Abort, inst:
3262 3263 u.warn(_("abort: %s\n") % inst)
3263 3264 except TypeError, inst:
3264 3265 # was this an argument error?
3265 3266 tb = traceback.extract_tb(sys.exc_info()[2])
3266 3267 if len(tb) > 2: # no
3267 3268 raise
3268 3269 u.debug(inst, "\n")
3269 3270 u.warn(_("%s: invalid arguments\n") % cmd)
3270 3271 help_(u, cmd)
3271 3272 except SystemExit, inst:
3272 3273 # Commands shouldn't sys.exit directly, but give a return code.
3273 3274 # Just in case catch this and and pass exit code to caller.
3274 3275 return inst.code
3275 3276 except:
3276 3277 u.warn(_("** unknown exception encountered, details follow\n"))
3277 3278 u.warn(_("** report bug details to "
3278 3279 "http://www.selenic.com/mercurial/bts\n"))
3279 3280 u.warn(_("** or mercurial@selenic.com\n"))
3280 3281 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3281 3282 % version.get_version())
3282 3283 raise
3283 3284
3284 3285 return -1
@@ -1,508 +1,505 b''
1 1 # context.py - changeset and file context objects for mercurial
2 2 #
3 3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import _
10 10 import ancestor, bdiff, repo, revlog, util, os
11 11
12 12 class changectx(object):
13 13 """A changecontext object makes access to data related to a particular
14 14 changeset convenient."""
15 15 def __init__(self, repo, changeid=None):
16 16 """changeid is a revision number, node, or tag"""
17 17 self._repo = repo
18 18
19 19 if not changeid and changeid != 0:
20 20 p1, p2 = self._repo.dirstate.parents()
21 21 self._rev = self._repo.changelog.rev(p1)
22 22 if self._rev == -1:
23 23 changeid = 'tip'
24 24 else:
25 25 self._node = p1
26 26 return
27 27
28 28 self._node = self._repo.lookup(changeid)
29 29 self._rev = self._repo.changelog.rev(self._node)
30 30
31 31 def __str__(self):
32 32 return short(self.node())
33 33
34 34 def __repr__(self):
35 35 return "<changectx %s>" % str(self)
36 36
37 37 def __eq__(self, other):
38 38 try:
39 39 return self._rev == other._rev
40 40 except AttributeError:
41 41 return False
42 42
43 43 def __nonzero__(self):
44 44 return self._rev != nullrev
45 45
46 46 def __getattr__(self, name):
47 47 if name == '_changeset':
48 48 self._changeset = self._repo.changelog.read(self.node())
49 49 return self._changeset
50 50 elif name == '_manifest':
51 51 self._manifest = self._repo.manifest.read(self._changeset[0])
52 52 return self._manifest
53 53 elif name == '_manifestdelta':
54 54 md = self._repo.manifest.readdelta(self._changeset[0])
55 55 self._manifestdelta = md
56 56 return self._manifestdelta
57 57 else:
58 58 raise AttributeError, name
59 59
60 60 def changeset(self): return self._changeset
61 61 def manifest(self): return self._manifest
62 62
63 63 def rev(self): return self._rev
64 64 def node(self): return self._node
65 65 def user(self): return self._changeset[1]
66 66 def date(self): return self._changeset[2]
67 67 def files(self): return self._changeset[3]
68 68 def description(self): return self._changeset[4]
69 69 def branch(self): return self._changeset[5].get("branch", "")
70 70
71 71 def parents(self):
72 72 """return contexts for each parent changeset"""
73 73 p = self._repo.changelog.parents(self._node)
74 74 return [changectx(self._repo, x) for x in p]
75 75
76 76 def children(self):
77 77 """return contexts for each child changeset"""
78 78 c = self._repo.changelog.children(self._node)
79 79 return [changectx(self._repo, x) for x in c]
80 80
81 81 def filenode(self, path):
82 82 if '_manifest' in self.__dict__:
83 83 try:
84 84 return self._manifest[path]
85 85 except KeyError:
86 raise repo.LookupError(_("'%s' not found in manifest") % path)
86 raise revlog.LookupError(_("'%s' not found in manifest") % path)
87 87 if '_manifestdelta' in self.__dict__ or path in self.files():
88 88 if path in self._manifestdelta:
89 89 return self._manifestdelta[path]
90 90 node, flag = self._repo.manifest.find(self._changeset[0], path)
91 91 if not node:
92 raise repo.LookupError(_("'%s' not found in manifest") % path)
92 raise revlog.LookupError(_("'%s' not found in manifest") % path)
93 93
94 94 return node
95 95
96 96 def filectx(self, path, fileid=None):
97 97 """get a file context from this changeset"""
98 98 if fileid is None:
99 99 fileid = self.filenode(path)
100 100 return filectx(self._repo, path, fileid=fileid, changectx=self)
101 101
102 102 def filectxs(self):
103 103 """generate a file context for each file in this changeset's
104 104 manifest"""
105 105 mf = self.manifest()
106 106 m = mf.keys()
107 107 m.sort()
108 108 for f in m:
109 109 yield self.filectx(f, fileid=mf[f])
110 110
111 111 def ancestor(self, c2):
112 112 """
113 113 return the ancestor context of self and c2
114 114 """
115 115 n = self._repo.changelog.ancestor(self._node, c2._node)
116 116 return changectx(self._repo, n)
117 117
118 118 class filectx(object):
119 119 """A filecontext object makes access to data related to a particular
120 120 filerevision convenient."""
121 121 def __init__(self, repo, path, changeid=None, fileid=None,
122 122 filelog=None, changectx=None):
123 123 """changeid can be a changeset revision, node, or tag.
124 124 fileid can be a file revision or node."""
125 125 self._repo = repo
126 126 self._path = path
127 127
128 128 assert changeid is not None or fileid is not None
129 129
130 130 if filelog:
131 131 self._filelog = filelog
132 132 if changectx:
133 133 self._changectx = changectx
134 134 self._changeid = changectx.node()
135 135
136 136 if fileid is None:
137 137 self._changeid = changeid
138 138 else:
139 139 self._fileid = fileid
140 140
141 141 def __getattr__(self, name):
142 142 if name == '_changectx':
143 143 self._changectx = changectx(self._repo, self._changeid)
144 144 return self._changectx
145 145 elif name == '_filelog':
146 146 self._filelog = self._repo.file(self._path)
147 147 return self._filelog
148 148 elif name == '_changeid':
149 149 self._changeid = self._filelog.linkrev(self._filenode)
150 150 return self._changeid
151 151 elif name == '_filenode':
152 try:
153 if '_fileid' in self.__dict__:
154 self._filenode = self._filelog.lookup(self._fileid)
155 else:
156 self._filenode = self._changectx.filenode(self._path)
157 except revlog.RevlogError, inst:
158 raise repo.LookupError(str(inst))
152 if '_fileid' in self.__dict__:
153 self._filenode = self._filelog.lookup(self._fileid)
154 else:
155 self._filenode = self._changectx.filenode(self._path)
159 156 return self._filenode
160 157 elif name == '_filerev':
161 158 self._filerev = self._filelog.rev(self._filenode)
162 159 return self._filerev
163 160 else:
164 161 raise AttributeError, name
165 162
166 163 def __nonzero__(self):
167 164 try:
168 165 n = self._filenode
169 166 return True
170 except repo.LookupError:
167 except revlog.LookupError:
171 168 # file is missing
172 169 return False
173 170
174 171 def __str__(self):
175 172 return "%s@%s" % (self.path(), short(self.node()))
176 173
177 174 def __repr__(self):
178 175 return "<filectx %s>" % str(self)
179 176
180 177 def __eq__(self, other):
181 178 try:
182 179 return (self._path == other._path
183 180 and self._changeid == other._changeid)
184 181 except AttributeError:
185 182 return False
186 183
187 184 def filectx(self, fileid):
188 185 '''opens an arbitrary revision of the file without
189 186 opening a new filelog'''
190 187 return filectx(self._repo, self._path, fileid=fileid,
191 188 filelog=self._filelog)
192 189
193 190 def filerev(self): return self._filerev
194 191 def filenode(self): return self._filenode
195 192 def filelog(self): return self._filelog
196 193
197 194 def rev(self):
198 195 if '_changectx' in self.__dict__:
199 196 return self._changectx.rev()
200 197 return self._filelog.linkrev(self._filenode)
201 198
202 199 def node(self): return self._changectx.node()
203 200 def user(self): return self._changectx.user()
204 201 def date(self): return self._changectx.date()
205 202 def files(self): return self._changectx.files()
206 203 def description(self): return self._changectx.description()
207 204 def branch(self): return self._changectx.branch()
208 205 def manifest(self): return self._changectx.manifest()
209 206 def changectx(self): return self._changectx
210 207
211 208 def data(self): return self._filelog.read(self._filenode)
212 209 def renamed(self): return self._filelog.renamed(self._filenode)
213 210 def path(self): return self._path
214 211 def size(self): return self._filelog.size(self._filerev)
215 212
216 213 def cmp(self, text): return self._filelog.cmp(self._filenode, text)
217 214
218 215 def parents(self):
219 216 p = self._path
220 217 fl = self._filelog
221 218 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
222 219
223 220 r = self.renamed()
224 221 if r:
225 222 pl[0] = (r[0], r[1], None)
226 223
227 224 return [filectx(self._repo, p, fileid=n, filelog=l)
228 225 for p,n,l in pl if n != nullid]
229 226
230 227 def children(self):
231 228 # hard for renames
232 229 c = self._filelog.children(self._filenode)
233 230 return [filectx(self._repo, self._path, fileid=x,
234 231 filelog=self._filelog) for x in c]
235 232
236 233 def annotate(self, follow=False):
237 234 '''returns a list of tuples of (ctx, line) for each line
238 235 in the file, where ctx is the filectx of the node where
239 236 that line was last changed'''
240 237
241 238 def decorate(text, rev):
242 239 return ([rev] * len(text.splitlines()), text)
243 240
244 241 def pair(parent, child):
245 242 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
246 243 child[0][b1:b2] = parent[0][a1:a2]
247 244 return child
248 245
249 246 getlog = util.cachefunc(lambda x: self._repo.file(x))
250 247 def getctx(path, fileid):
251 248 log = path == self._path and self._filelog or getlog(path)
252 249 return filectx(self._repo, path, fileid=fileid, filelog=log)
253 250 getctx = util.cachefunc(getctx)
254 251
255 252 def parents(f):
256 253 # we want to reuse filectx objects as much as possible
257 254 p = f._path
258 255 if f._filerev is None: # working dir
259 256 pl = [(n.path(), n.filerev()) for n in f.parents()]
260 257 else:
261 258 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
262 259
263 260 if follow:
264 261 r = f.renamed()
265 262 if r:
266 263 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
267 264
268 265 return [getctx(p, n) for p, n in pl if n != nullrev]
269 266
270 267 # use linkrev to find the first changeset where self appeared
271 268 if self.rev() != self._filelog.linkrev(self._filenode):
272 269 base = self.filectx(self.filerev())
273 270 else:
274 271 base = self
275 272
276 273 # find all ancestors
277 274 needed = {base: 1}
278 275 visit = [base]
279 276 files = [base._path]
280 277 while visit:
281 278 f = visit.pop(0)
282 279 for p in parents(f):
283 280 if p not in needed:
284 281 needed[p] = 1
285 282 visit.append(p)
286 283 if p._path not in files:
287 284 files.append(p._path)
288 285 else:
289 286 # count how many times we'll use this
290 287 needed[p] += 1
291 288
292 289 # sort by revision (per file) which is a topological order
293 290 visit = []
294 291 files.reverse()
295 292 for f in files:
296 293 fn = [(n._filerev, n) for n in needed.keys() if n._path == f]
297 294 fn.sort()
298 295 visit.extend(fn)
299 296 hist = {}
300 297
301 298 for r, f in visit:
302 299 curr = decorate(f.data(), f)
303 300 for p in parents(f):
304 301 if p != nullid:
305 302 curr = pair(hist[p], curr)
306 303 # trim the history of unneeded revs
307 304 needed[p] -= 1
308 305 if not needed[p]:
309 306 del hist[p]
310 307 hist[f] = curr
311 308
312 309 return zip(hist[f][0], hist[f][1].splitlines(1))
313 310
314 311 def ancestor(self, fc2):
315 312 """
316 313 find the common ancestor file context, if any, of self, and fc2
317 314 """
318 315
319 316 acache = {}
320 317
321 318 # prime the ancestor cache for the working directory
322 319 for c in (self, fc2):
323 320 if c._filerev == None:
324 321 pl = [(n.path(), n.filenode()) for n in c.parents()]
325 322 acache[(c._path, None)] = pl
326 323
327 324 flcache = {self._path:self._filelog, fc2._path:fc2._filelog}
328 325 def parents(vertex):
329 326 if vertex in acache:
330 327 return acache[vertex]
331 328 f, n = vertex
332 329 if f not in flcache:
333 330 flcache[f] = self._repo.file(f)
334 331 fl = flcache[f]
335 332 pl = [(f, p) for p in fl.parents(n) if p != nullid]
336 333 re = fl.renamed(n)
337 334 if re:
338 335 pl.append(re)
339 336 acache[vertex] = pl
340 337 return pl
341 338
342 339 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
343 340 v = ancestor.ancestor(a, b, parents)
344 341 if v:
345 342 f, n = v
346 343 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
347 344
348 345 return None
349 346
350 347 class workingctx(changectx):
351 348 """A workingctx object makes access to data related to
352 349 the current working directory convenient."""
353 350 def __init__(self, repo):
354 351 self._repo = repo
355 352 self._rev = None
356 353 self._node = None
357 354
358 355 def __str__(self):
359 356 return str(self._parents[0]) + "+"
360 357
361 358 def __nonzero__(self):
362 359 return True
363 360
364 361 def __getattr__(self, name):
365 362 if name == '_parents':
366 363 self._parents = self._repo.parents()
367 364 return self._parents
368 365 if name == '_status':
369 366 self._status = self._repo.status()
370 367 return self._status
371 368 if name == '_manifest':
372 369 self._buildmanifest()
373 370 return self._manifest
374 371 else:
375 372 raise AttributeError, name
376 373
377 374 def _buildmanifest(self):
378 375 """generate a manifest corresponding to the working directory"""
379 376
380 377 man = self._parents[0].manifest().copy()
381 378 copied = self._repo.dirstate.copies()
382 379 modified, added, removed, deleted, unknown = self._status[:5]
383 380 for i, l in (("a", added), ("m", modified), ("u", unknown)):
384 381 for f in l:
385 382 man[f] = man.get(copied.get(f, f), nullid) + i
386 383 try:
387 384 man.set(f, util.is_exec(self._repo.wjoin(f), man.execf(f)))
388 385 except OSError:
389 386 pass
390 387
391 388 for f in deleted + removed:
392 389 if f in man:
393 390 del man[f]
394 391
395 392 self._manifest = man
396 393
397 394 def manifest(self): return self._manifest
398 395
399 396 def user(self): return self._repo.ui.username()
400 397 def date(self): return util.makedate()
401 398 def description(self): return ""
402 399 def files(self):
403 400 f = self.modified() + self.added() + self.removed()
404 401 f.sort()
405 402 return f
406 403
407 404 def modified(self): return self._status[0]
408 405 def added(self): return self._status[1]
409 406 def removed(self): return self._status[2]
410 407 def deleted(self): return self._status[3]
411 408 def unknown(self): return self._status[4]
412 409 def clean(self): return self._status[5]
413 410 def branch(self):
414 411 try:
415 412 return self._repo.opener("branch").read().strip()
416 413 except IOError:
417 414 return ""
418 415
419 416 def parents(self):
420 417 """return contexts for each parent changeset"""
421 418 return self._parents
422 419
423 420 def children(self):
424 421 return []
425 422
426 423 def filectx(self, path):
427 424 """get a file context from the working directory"""
428 425 return workingfilectx(self._repo, path, workingctx=self)
429 426
430 427 def ancestor(self, c2):
431 428 """return the ancestor context of self and c2"""
432 429 return self._parents[0].ancestor(c2) # punt on two parents for now
433 430
434 431 class workingfilectx(filectx):
435 432 """A workingfilectx object makes access to data related to a particular
436 433 file in the working directory convenient."""
437 434 def __init__(self, repo, path, filelog=None, workingctx=None):
438 435 """changeid can be a changeset revision, node, or tag.
439 436 fileid can be a file revision or node."""
440 437 self._repo = repo
441 438 self._path = path
442 439 self._changeid = None
443 440 self._filerev = self._filenode = None
444 441
445 442 if filelog:
446 443 self._filelog = filelog
447 444 if workingctx:
448 445 self._changectx = workingctx
449 446
450 447 def __getattr__(self, name):
451 448 if name == '_changectx':
452 449 self._changectx = workingctx(repo)
453 450 return self._changectx
454 451 elif name == '_repopath':
455 452 self._repopath = (self._repo.dirstate.copied(self._path)
456 453 or self._path)
457 454 return self._repopath
458 455 elif name == '_filelog':
459 456 self._filelog = self._repo.file(self._repopath)
460 457 return self._filelog
461 458 else:
462 459 raise AttributeError, name
463 460
464 461 def __nonzero__(self):
465 462 return True
466 463
467 464 def __str__(self):
468 465 return "%s@%s" % (self.path(), self._changectx)
469 466
470 467 def filectx(self, fileid):
471 468 '''opens an arbitrary revision of the file without
472 469 opening a new filelog'''
473 470 return filectx(self._repo, self._repopath, fileid=fileid,
474 471 filelog=self._filelog)
475 472
476 473 def rev(self):
477 474 if '_changectx' in self.__dict__:
478 475 return self._changectx.rev()
479 476 return self._filelog.linkrev(self._filenode)
480 477
481 478 def data(self): return self._repo.wread(self._path)
482 479 def renamed(self):
483 480 rp = self._repopath
484 481 if rp == self._path:
485 482 return None
486 483 return rp, self._workingctx._parents._manifest.get(rp, nullid)
487 484
488 485 def parents(self):
489 486 '''return parent filectxs, following copies if necessary'''
490 487 p = self._path
491 488 rp = self._repopath
492 489 pcl = self._changectx._parents
493 490 fl = self._filelog
494 491 pl = [(rp, pcl[0]._manifest.get(rp, nullid), fl)]
495 492 if len(pcl) > 1:
496 493 if rp != p:
497 494 fl = None
498 495 pl.append((p, pcl[1]._manifest.get(p, nullid), fl))
499 496
500 497 return [filectx(self._repo, p, fileid=n, filelog=l)
501 498 for p,n,l in pl if n != nullid]
502 499
503 500 def children(self):
504 501 return []
505 502
506 503 def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
507 504
508 505 def cmp(self, text): return self._repo.wread(self._path) == text
@@ -1,1863 +1,1863 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import _
10 10 import repo, appendfile, changegroup
11 11 import changelog, dirstate, filelog, manifest, context
12 12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
13 13 import os, revlog, time, util
14 14
15 15 class localrepository(repo.repository):
16 16 capabilities = ('lookup', 'changegroupsubset')
17 17 supported = ('revlogv1', 'store')
18 18
19 19 def __del__(self):
20 20 self.transhandle = None
21 21 def __init__(self, parentui, path=None, create=0):
22 22 repo.repository.__init__(self)
23 23 if not path:
24 24 p = os.getcwd()
25 25 while not os.path.isdir(os.path.join(p, ".hg")):
26 26 oldp = p
27 27 p = os.path.dirname(p)
28 28 if p == oldp:
29 29 raise repo.RepoError(_("There is no Mercurial repository"
30 30 " here (.hg not found)"))
31 31 path = p
32 32
33 33 self.path = os.path.join(path, ".hg")
34 34 self.root = os.path.realpath(path)
35 35 self.origroot = path
36 36 self.opener = util.opener(self.path)
37 37 self.wopener = util.opener(self.root)
38 38
39 39 if not os.path.isdir(self.path):
40 40 if create:
41 41 if not os.path.exists(path):
42 42 os.mkdir(path)
43 43 os.mkdir(self.path)
44 44 os.mkdir(os.path.join(self.path, "store"))
45 45 requirements = ("revlogv1", "store")
46 46 reqfile = self.opener("requires", "w")
47 47 for r in requirements:
48 48 reqfile.write("%s\n" % r)
49 49 reqfile.close()
50 50 # create an invalid changelog
51 51 self.opener("00changelog.i", "a").write(
52 52 '\0\0\0\2' # represents revlogv2
53 53 ' dummy changelog to prevent using the old repo layout'
54 54 )
55 55 else:
56 56 raise repo.RepoError(_("repository %s not found") % path)
57 57 elif create:
58 58 raise repo.RepoError(_("repository %s already exists") % path)
59 59 else:
60 60 # find requirements
61 61 try:
62 62 requirements = self.opener("requires").read().splitlines()
63 63 except IOError, inst:
64 64 if inst.errno != errno.ENOENT:
65 65 raise
66 66 requirements = []
67 67 # check them
68 68 for r in requirements:
69 69 if r not in self.supported:
70 70 raise repo.RepoError(_("requirement '%s' not supported") % r)
71 71
72 72 # setup store
73 73 if "store" in requirements:
74 74 self.encodefn = util.encodefilename
75 75 self.decodefn = util.decodefilename
76 76 self.spath = os.path.join(self.path, "store")
77 77 else:
78 78 self.encodefn = lambda x: x
79 79 self.decodefn = lambda x: x
80 80 self.spath = self.path
81 81 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
82 82
83 83 self.ui = ui.ui(parentui=parentui)
84 84 try:
85 85 self.ui.readconfig(self.join("hgrc"), self.root)
86 86 except IOError:
87 87 pass
88 88
89 89 v = self.ui.configrevlog()
90 90 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
91 91 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
92 92 fl = v.get('flags', None)
93 93 flags = 0
94 94 if fl != None:
95 95 for x in fl.split():
96 96 flags |= revlog.flagstr(x)
97 97 elif self.revlogv1:
98 98 flags = revlog.REVLOG_DEFAULT_FLAGS
99 99
100 100 v = self.revlogversion | flags
101 101 self.manifest = manifest.manifest(self.sopener, v)
102 102 self.changelog = changelog.changelog(self.sopener, v)
103 103
104 104 fallback = self.ui.config('ui', 'fallbackencoding')
105 105 if fallback:
106 106 util._fallbackencoding = fallback
107 107
108 108 # the changelog might not have the inline index flag
109 109 # on. If the format of the changelog is the same as found in
110 110 # .hgrc, apply any flags found in the .hgrc as well.
111 111 # Otherwise, just version from the changelog
112 112 v = self.changelog.version
113 113 if v == self.revlogversion:
114 114 v |= flags
115 115 self.revlogversion = v
116 116
117 117 self.tagscache = None
118 118 self.branchcache = None
119 119 self.nodetagscache = None
120 120 self.encodepats = None
121 121 self.decodepats = None
122 122 self.transhandle = None
123 123
124 124 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
125 125
126 126 def url(self):
127 127 return 'file:' + self.root
128 128
129 129 def hook(self, name, throw=False, **args):
130 130 def callhook(hname, funcname):
131 131 '''call python hook. hook is callable object, looked up as
132 132 name in python module. if callable returns "true", hook
133 133 fails, else passes. if hook raises exception, treated as
134 134 hook failure. exception propagates if throw is "true".
135 135
136 136 reason for "true" meaning "hook failed" is so that
137 137 unmodified commands (e.g. mercurial.commands.update) can
138 138 be run as hooks without wrappers to convert return values.'''
139 139
140 140 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
141 141 d = funcname.rfind('.')
142 142 if d == -1:
143 143 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
144 144 % (hname, funcname))
145 145 modname = funcname[:d]
146 146 try:
147 147 obj = __import__(modname)
148 148 except ImportError:
149 149 try:
150 150 # extensions are loaded with hgext_ prefix
151 151 obj = __import__("hgext_%s" % modname)
152 152 except ImportError:
153 153 raise util.Abort(_('%s hook is invalid '
154 154 '(import of "%s" failed)') %
155 155 (hname, modname))
156 156 try:
157 157 for p in funcname.split('.')[1:]:
158 158 obj = getattr(obj, p)
159 159 except AttributeError, err:
160 160 raise util.Abort(_('%s hook is invalid '
161 161 '("%s" is not defined)') %
162 162 (hname, funcname))
163 163 if not callable(obj):
164 164 raise util.Abort(_('%s hook is invalid '
165 165 '("%s" is not callable)') %
166 166 (hname, funcname))
167 167 try:
168 168 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
169 169 except (KeyboardInterrupt, util.SignalInterrupt):
170 170 raise
171 171 except Exception, exc:
172 172 if isinstance(exc, util.Abort):
173 173 self.ui.warn(_('error: %s hook failed: %s\n') %
174 174 (hname, exc.args[0]))
175 175 else:
176 176 self.ui.warn(_('error: %s hook raised an exception: '
177 177 '%s\n') % (hname, exc))
178 178 if throw:
179 179 raise
180 180 self.ui.print_exc()
181 181 return True
182 182 if r:
183 183 if throw:
184 184 raise util.Abort(_('%s hook failed') % hname)
185 185 self.ui.warn(_('warning: %s hook failed\n') % hname)
186 186 return r
187 187
188 188 def runhook(name, cmd):
189 189 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
190 190 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
191 191 r = util.system(cmd, environ=env, cwd=self.root)
192 192 if r:
193 193 desc, r = util.explain_exit(r)
194 194 if throw:
195 195 raise util.Abort(_('%s hook %s') % (name, desc))
196 196 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
197 197 return r
198 198
199 199 r = False
200 200 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
201 201 if hname.split(".", 1)[0] == name and cmd]
202 202 hooks.sort()
203 203 for hname, cmd in hooks:
204 204 if cmd.startswith('python:'):
205 205 r = callhook(hname, cmd[7:].strip()) or r
206 206 else:
207 207 r = runhook(hname, cmd) or r
208 208 return r
209 209
210 210 tag_disallowed = ':\r\n'
211 211
212 212 def tag(self, name, node, message, local, user, date):
213 213 '''tag a revision with a symbolic name.
214 214
215 215 if local is True, the tag is stored in a per-repository file.
216 216 otherwise, it is stored in the .hgtags file, and a new
217 217 changeset is committed with the change.
218 218
219 219 keyword arguments:
220 220
221 221 local: whether to store tag in non-version-controlled file
222 222 (default False)
223 223
224 224 message: commit message to use if committing
225 225
226 226 user: name of user to use if committing
227 227
228 228 date: date tuple to use if committing'''
229 229
230 230 for c in self.tag_disallowed:
231 231 if c in name:
232 232 raise util.Abort(_('%r cannot be used in a tag name') % c)
233 233
234 234 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
235 235
236 236 if local:
237 237 # local tags are stored in the current charset
238 238 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
239 239 self.hook('tag', node=hex(node), tag=name, local=local)
240 240 return
241 241
242 242 for x in self.status()[:5]:
243 243 if '.hgtags' in x:
244 244 raise util.Abort(_('working copy of .hgtags is changed '
245 245 '(please commit .hgtags manually)'))
246 246
247 247 # committed tags are stored in UTF-8
248 248 line = '%s %s\n' % (hex(node), util.fromlocal(name))
249 249 self.wfile('.hgtags', 'ab').write(line)
250 250 if self.dirstate.state('.hgtags') == '?':
251 251 self.add(['.hgtags'])
252 252
253 253 self.commit(['.hgtags'], message, user, date)
254 254 self.hook('tag', node=hex(node), tag=name, local=local)
255 255
256 256 def tags(self):
257 257 '''return a mapping of tag to node'''
258 258 if not self.tagscache:
259 259 self.tagscache = {}
260 260
261 261 def parsetag(line, context):
262 262 if not line:
263 263 return
264 264 s = l.split(" ", 1)
265 265 if len(s) != 2:
266 266 self.ui.warn(_("%s: cannot parse entry\n") % context)
267 267 return
268 268 node, key = s
269 269 key = util.tolocal(key.strip()) # stored in UTF-8
270 270 try:
271 271 bin_n = bin(node)
272 272 except TypeError:
273 273 self.ui.warn(_("%s: node '%s' is not well formed\n") %
274 274 (context, node))
275 275 return
276 276 if bin_n not in self.changelog.nodemap:
277 277 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
278 278 (context, key))
279 279 return
280 280 self.tagscache[key] = bin_n
281 281
282 282 # read the tags file from each head, ending with the tip,
283 283 # and add each tag found to the map, with "newer" ones
284 284 # taking precedence
285 285 f = None
286 286 for rev, node, fnode in self._hgtagsnodes():
287 287 f = (f and f.filectx(fnode) or
288 288 self.filectx('.hgtags', fileid=fnode))
289 289 count = 0
290 290 for l in f.data().splitlines():
291 291 count += 1
292 292 parsetag(l, _("%s, line %d") % (str(f), count))
293 293
294 294 try:
295 295 f = self.opener("localtags")
296 296 count = 0
297 297 for l in f:
298 298 # localtags are stored in the local character set
299 299 # while the internal tag table is stored in UTF-8
300 300 l = util.fromlocal(l)
301 301 count += 1
302 302 parsetag(l, _("localtags, line %d") % count)
303 303 except IOError:
304 304 pass
305 305
306 306 self.tagscache['tip'] = self.changelog.tip()
307 307
308 308 return self.tagscache
309 309
310 310 def _hgtagsnodes(self):
311 311 heads = self.heads()
312 312 heads.reverse()
313 313 last = {}
314 314 ret = []
315 315 for node in heads:
316 316 c = self.changectx(node)
317 317 rev = c.rev()
318 318 try:
319 319 fnode = c.filenode('.hgtags')
320 except repo.LookupError:
320 except revlog.LookupError:
321 321 continue
322 322 ret.append((rev, node, fnode))
323 323 if fnode in last:
324 324 ret[last[fnode]] = None
325 325 last[fnode] = len(ret) - 1
326 326 return [item for item in ret if item]
327 327
328 328 def tagslist(self):
329 329 '''return a list of tags ordered by revision'''
330 330 l = []
331 331 for t, n in self.tags().items():
332 332 try:
333 333 r = self.changelog.rev(n)
334 334 except:
335 335 r = -2 # sort to the beginning of the list if unknown
336 336 l.append((r, t, n))
337 337 l.sort()
338 338 return [(t, n) for r, t, n in l]
339 339
340 340 def nodetags(self, node):
341 341 '''return the tags associated with a node'''
342 342 if not self.nodetagscache:
343 343 self.nodetagscache = {}
344 344 for t, n in self.tags().items():
345 345 self.nodetagscache.setdefault(n, []).append(t)
346 346 return self.nodetagscache.get(node, [])
347 347
348 348 def _branchtags(self):
349 349 partial, last, lrev = self._readbranchcache()
350 350
351 351 tiprev = self.changelog.count() - 1
352 352 if lrev != tiprev:
353 353 self._updatebranchcache(partial, lrev+1, tiprev+1)
354 354 self._writebranchcache(partial, self.changelog.tip(), tiprev)
355 355
356 356 return partial
357 357
358 358 def branchtags(self):
359 359 if self.branchcache is not None:
360 360 return self.branchcache
361 361
362 362 self.branchcache = {} # avoid recursion in changectx
363 363 partial = self._branchtags()
364 364
365 365 # the branch cache is stored on disk as UTF-8, but in the local
366 366 # charset internally
367 367 for k, v in partial.items():
368 368 self.branchcache[util.tolocal(k)] = v
369 369 return self.branchcache
370 370
371 371 def _readbranchcache(self):
372 372 partial = {}
373 373 try:
374 374 f = self.opener("branches.cache")
375 375 lines = f.read().split('\n')
376 376 f.close()
377 377 last, lrev = lines.pop(0).rstrip().split(" ", 1)
378 378 last, lrev = bin(last), int(lrev)
379 379 if not (lrev < self.changelog.count() and
380 380 self.changelog.node(lrev) == last): # sanity check
381 381 # invalidate the cache
382 382 raise ValueError('Invalid branch cache: unknown tip')
383 383 for l in lines:
384 384 if not l: continue
385 385 node, label = l.rstrip().split(" ", 1)
386 386 partial[label] = bin(node)
387 387 except (KeyboardInterrupt, util.SignalInterrupt):
388 388 raise
389 389 except Exception, inst:
390 390 if self.ui.debugflag:
391 391 self.ui.warn(str(inst), '\n')
392 392 partial, last, lrev = {}, nullid, nullrev
393 393 return partial, last, lrev
394 394
395 395 def _writebranchcache(self, branches, tip, tiprev):
396 396 try:
397 397 f = self.opener("branches.cache", "w")
398 398 f.write("%s %s\n" % (hex(tip), tiprev))
399 399 for label, node in branches.iteritems():
400 400 f.write("%s %s\n" % (hex(node), label))
401 401 except IOError:
402 402 pass
403 403
404 404 def _updatebranchcache(self, partial, start, end):
405 405 for r in xrange(start, end):
406 406 c = self.changectx(r)
407 407 b = c.branch()
408 408 if b:
409 409 partial[b] = c.node()
410 410
411 411 def lookup(self, key):
412 412 if key == '.':
413 413 key = self.dirstate.parents()[0]
414 414 if key == nullid:
415 415 raise repo.RepoError(_("no revision checked out"))
416 416 elif key == 'null':
417 417 return nullid
418 418 n = self.changelog._match(key)
419 419 if n:
420 420 return n
421 421 if key in self.tags():
422 422 return self.tags()[key]
423 423 if key in self.branchtags():
424 424 return self.branchtags()[key]
425 425 n = self.changelog._partialmatch(key)
426 426 if n:
427 427 return n
428 428 raise repo.RepoError(_("unknown revision '%s'") % key)
429 429
430 430 def dev(self):
431 431 return os.lstat(self.path).st_dev
432 432
433 433 def local(self):
434 434 return True
435 435
436 436 def join(self, f):
437 437 return os.path.join(self.path, f)
438 438
439 439 def sjoin(self, f):
440 440 f = self.encodefn(f)
441 441 return os.path.join(self.spath, f)
442 442
443 443 def wjoin(self, f):
444 444 return os.path.join(self.root, f)
445 445
446 446 def file(self, f):
447 447 if f[0] == '/':
448 448 f = f[1:]
449 449 return filelog.filelog(self.sopener, f, self.revlogversion)
450 450
451 451 def changectx(self, changeid=None):
452 452 return context.changectx(self, changeid)
453 453
454 454 def workingctx(self):
455 455 return context.workingctx(self)
456 456
457 457 def parents(self, changeid=None):
458 458 '''
459 459 get list of changectxs for parents of changeid or working directory
460 460 '''
461 461 if changeid is None:
462 462 pl = self.dirstate.parents()
463 463 else:
464 464 n = self.changelog.lookup(changeid)
465 465 pl = self.changelog.parents(n)
466 466 if pl[1] == nullid:
467 467 return [self.changectx(pl[0])]
468 468 return [self.changectx(pl[0]), self.changectx(pl[1])]
469 469
470 470 def filectx(self, path, changeid=None, fileid=None):
471 471 """changeid can be a changeset revision, node, or tag.
472 472 fileid can be a file revision or node."""
473 473 return context.filectx(self, path, changeid, fileid)
474 474
475 475 def getcwd(self):
476 476 return self.dirstate.getcwd()
477 477
478 478 def wfile(self, f, mode='r'):
479 479 return self.wopener(f, mode)
480 480
481 481 def wread(self, filename):
482 482 if self.encodepats == None:
483 483 l = []
484 484 for pat, cmd in self.ui.configitems("encode"):
485 485 mf = util.matcher(self.root, "", [pat], [], [])[1]
486 486 l.append((mf, cmd))
487 487 self.encodepats = l
488 488
489 489 data = self.wopener(filename, 'r').read()
490 490
491 491 for mf, cmd in self.encodepats:
492 492 if mf(filename):
493 493 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
494 494 data = util.filter(data, cmd)
495 495 break
496 496
497 497 return data
498 498
499 499 def wwrite(self, filename, data, fd=None):
500 500 if self.decodepats == None:
501 501 l = []
502 502 for pat, cmd in self.ui.configitems("decode"):
503 503 mf = util.matcher(self.root, "", [pat], [], [])[1]
504 504 l.append((mf, cmd))
505 505 self.decodepats = l
506 506
507 507 for mf, cmd in self.decodepats:
508 508 if mf(filename):
509 509 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
510 510 data = util.filter(data, cmd)
511 511 break
512 512
513 513 if fd:
514 514 return fd.write(data)
515 515 return self.wopener(filename, 'w').write(data)
516 516
517 517 def transaction(self):
518 518 tr = self.transhandle
519 519 if tr != None and tr.running():
520 520 return tr.nest()
521 521
522 522 # save dirstate for rollback
523 523 try:
524 524 ds = self.opener("dirstate").read()
525 525 except IOError:
526 526 ds = ""
527 527 self.opener("journal.dirstate", "w").write(ds)
528 528
529 529 renames = [(self.sjoin("journal"), self.sjoin("undo")),
530 530 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
531 531 tr = transaction.transaction(self.ui.warn, self.sopener,
532 532 self.sjoin("journal"),
533 533 aftertrans(renames))
534 534 self.transhandle = tr
535 535 return tr
536 536
537 537 def recover(self):
538 538 l = self.lock()
539 539 if os.path.exists(self.sjoin("journal")):
540 540 self.ui.status(_("rolling back interrupted transaction\n"))
541 541 transaction.rollback(self.sopener, self.sjoin("journal"))
542 542 self.reload()
543 543 return True
544 544 else:
545 545 self.ui.warn(_("no interrupted transaction available\n"))
546 546 return False
547 547
548 548 def rollback(self, wlock=None):
549 549 if not wlock:
550 550 wlock = self.wlock()
551 551 l = self.lock()
552 552 if os.path.exists(self.sjoin("undo")):
553 553 self.ui.status(_("rolling back last transaction\n"))
554 554 transaction.rollback(self.sopener, self.sjoin("undo"))
555 555 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
556 556 self.reload()
557 557 self.wreload()
558 558 else:
559 559 self.ui.warn(_("no rollback information available\n"))
560 560
561 561 def wreload(self):
562 562 self.dirstate.read()
563 563
564 564 def reload(self):
565 565 self.changelog.load()
566 566 self.manifest.load()
567 567 self.tagscache = None
568 568 self.nodetagscache = None
569 569
570 570 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
571 571 desc=None):
572 572 try:
573 573 l = lock.lock(lockname, 0, releasefn, desc=desc)
574 574 except lock.LockHeld, inst:
575 575 if not wait:
576 576 raise
577 577 self.ui.warn(_("waiting for lock on %s held by %r\n") %
578 578 (desc, inst.locker))
579 579 # default to 600 seconds timeout
580 580 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
581 581 releasefn, desc=desc)
582 582 if acquirefn:
583 583 acquirefn()
584 584 return l
585 585
586 586 def lock(self, wait=1):
587 587 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
588 588 desc=_('repository %s') % self.origroot)
589 589
590 590 def wlock(self, wait=1):
591 591 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
592 592 self.wreload,
593 593 desc=_('working directory of %s') % self.origroot)
594 594
595 595 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
596 596 """
597 597 commit an individual file as part of a larger transaction
598 598 """
599 599
600 600 t = self.wread(fn)
601 601 fl = self.file(fn)
602 602 fp1 = manifest1.get(fn, nullid)
603 603 fp2 = manifest2.get(fn, nullid)
604 604
605 605 meta = {}
606 606 cp = self.dirstate.copied(fn)
607 607 if cp:
608 608 meta["copy"] = cp
609 609 if not manifest2: # not a branch merge
610 610 meta["copyrev"] = hex(manifest1.get(cp, nullid))
611 611 fp2 = nullid
612 612 elif fp2 != nullid: # copied on remote side
613 613 meta["copyrev"] = hex(manifest1.get(cp, nullid))
614 614 elif fp1 != nullid: # copied on local side, reversed
615 615 meta["copyrev"] = hex(manifest2.get(cp))
616 616 fp2 = nullid
617 617 else: # directory rename
618 618 meta["copyrev"] = hex(manifest1.get(cp, nullid))
619 619 self.ui.debug(_(" %s: copy %s:%s\n") %
620 620 (fn, cp, meta["copyrev"]))
621 621 fp1 = nullid
622 622 elif fp2 != nullid:
623 623 # is one parent an ancestor of the other?
624 624 fpa = fl.ancestor(fp1, fp2)
625 625 if fpa == fp1:
626 626 fp1, fp2 = fp2, nullid
627 627 elif fpa == fp2:
628 628 fp2 = nullid
629 629
630 630 # is the file unmodified from the parent? report existing entry
631 631 if fp2 == nullid and not fl.cmp(fp1, t):
632 632 return fp1
633 633
634 634 changelist.append(fn)
635 635 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
636 636
637 637 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
638 638 if p1 is None:
639 639 p1, p2 = self.dirstate.parents()
640 640 return self.commit(files=files, text=text, user=user, date=date,
641 641 p1=p1, p2=p2, wlock=wlock)
642 642
643 643 def commit(self, files=None, text="", user=None, date=None,
644 644 match=util.always, force=False, lock=None, wlock=None,
645 645 force_editor=False, p1=None, p2=None, extra={}):
646 646
647 647 commit = []
648 648 remove = []
649 649 changed = []
650 650 use_dirstate = (p1 is None) # not rawcommit
651 651 extra = extra.copy()
652 652
653 653 if use_dirstate:
654 654 if files:
655 655 for f in files:
656 656 s = self.dirstate.state(f)
657 657 if s in 'nmai':
658 658 commit.append(f)
659 659 elif s == 'r':
660 660 remove.append(f)
661 661 else:
662 662 self.ui.warn(_("%s not tracked!\n") % f)
663 663 else:
664 664 changes = self.status(match=match)[:5]
665 665 modified, added, removed, deleted, unknown = changes
666 666 commit = modified + added
667 667 remove = removed
668 668 else:
669 669 commit = files
670 670
671 671 if use_dirstate:
672 672 p1, p2 = self.dirstate.parents()
673 673 update_dirstate = True
674 674 else:
675 675 p1, p2 = p1, p2 or nullid
676 676 update_dirstate = (self.dirstate.parents()[0] == p1)
677 677
678 678 c1 = self.changelog.read(p1)
679 679 c2 = self.changelog.read(p2)
680 680 m1 = self.manifest.read(c1[0]).copy()
681 681 m2 = self.manifest.read(c2[0])
682 682
683 683 if use_dirstate:
684 684 branchname = self.workingctx().branch()
685 685 try:
686 686 branchname = branchname.decode('UTF-8').encode('UTF-8')
687 687 except UnicodeDecodeError:
688 688 raise util.Abort(_('branch name not in UTF-8!'))
689 689 else:
690 690 branchname = ""
691 691
692 692 if use_dirstate:
693 693 oldname = c1[5].get("branch", "") # stored in UTF-8
694 694 if not commit and not remove and not force and p2 == nullid and \
695 695 branchname == oldname:
696 696 self.ui.status(_("nothing changed\n"))
697 697 return None
698 698
699 699 xp1 = hex(p1)
700 700 if p2 == nullid: xp2 = ''
701 701 else: xp2 = hex(p2)
702 702
703 703 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
704 704
705 705 if not wlock:
706 706 wlock = self.wlock()
707 707 if not lock:
708 708 lock = self.lock()
709 709 tr = self.transaction()
710 710
711 711 # check in files
712 712 new = {}
713 713 linkrev = self.changelog.count()
714 714 commit.sort()
715 715 for f in commit:
716 716 self.ui.note(f + "\n")
717 717 try:
718 718 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
719 719 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
720 720 except IOError:
721 721 if use_dirstate:
722 722 self.ui.warn(_("trouble committing %s!\n") % f)
723 723 raise
724 724 else:
725 725 remove.append(f)
726 726
727 727 # update manifest
728 728 m1.update(new)
729 729 remove.sort()
730 730
731 731 for f in remove:
732 732 if f in m1:
733 733 del m1[f]
734 734 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, remove))
735 735
736 736 # add changeset
737 737 new = new.keys()
738 738 new.sort()
739 739
740 740 user = user or self.ui.username()
741 741 if not text or force_editor:
742 742 edittext = []
743 743 if text:
744 744 edittext.append(text)
745 745 edittext.append("")
746 746 edittext.append("HG: user: %s" % user)
747 747 if p2 != nullid:
748 748 edittext.append("HG: branch merge")
749 749 edittext.extend(["HG: changed %s" % f for f in changed])
750 750 edittext.extend(["HG: removed %s" % f for f in remove])
751 751 if not changed and not remove:
752 752 edittext.append("HG: no files changed")
753 753 edittext.append("")
754 754 # run editor in the repository root
755 755 olddir = os.getcwd()
756 756 os.chdir(self.root)
757 757 text = self.ui.edit("\n".join(edittext), user)
758 758 os.chdir(olddir)
759 759
760 760 lines = [line.rstrip() for line in text.rstrip().splitlines()]
761 761 while lines and not lines[0]:
762 762 del lines[0]
763 763 if not lines:
764 764 return None
765 765 text = '\n'.join(lines)
766 766 if branchname:
767 767 extra["branch"] = branchname
768 768 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2,
769 769 user, date, extra)
770 770 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
771 771 parent2=xp2)
772 772 tr.close()
773 773
774 774 if use_dirstate or update_dirstate:
775 775 self.dirstate.setparents(n)
776 776 if use_dirstate:
777 777 self.dirstate.update(new, "n")
778 778 self.dirstate.forget(remove)
779 779
780 780 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
781 781 return n
782 782
783 783 def walk(self, node=None, files=[], match=util.always, badmatch=None):
784 784 '''
785 785 walk recursively through the directory tree or a given
786 786 changeset, finding all files matched by the match
787 787 function
788 788
789 789 results are yielded in a tuple (src, filename), where src
790 790 is one of:
791 791 'f' the file was found in the directory tree
792 792 'm' the file was only in the dirstate and not in the tree
793 793 'b' file was not found and matched badmatch
794 794 '''
795 795
796 796 if node:
797 797 fdict = dict.fromkeys(files)
798 798 for fn in self.manifest.read(self.changelog.read(node)[0]):
799 799 for ffn in fdict:
800 800 # match if the file is the exact name or a directory
801 801 if ffn == fn or fn.startswith("%s/" % ffn):
802 802 del fdict[ffn]
803 803 break
804 804 if match(fn):
805 805 yield 'm', fn
806 806 for fn in fdict:
807 807 if badmatch and badmatch(fn):
808 808 if match(fn):
809 809 yield 'b', fn
810 810 else:
811 811 self.ui.warn(_('%s: No such file in rev %s\n') % (
812 812 util.pathto(self.getcwd(), fn), short(node)))
813 813 else:
814 814 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
815 815 yield src, fn
816 816
817 817 def status(self, node1=None, node2=None, files=[], match=util.always,
818 818 wlock=None, list_ignored=False, list_clean=False):
819 819 """return status of files between two nodes or node and working directory
820 820
821 821 If node1 is None, use the first dirstate parent instead.
822 822 If node2 is None, compare node1 with working directory.
823 823 """
824 824
825 825 def fcmp(fn, mf):
826 826 t1 = self.wread(fn)
827 827 return self.file(fn).cmp(mf.get(fn, nullid), t1)
828 828
829 829 def mfmatches(node):
830 830 change = self.changelog.read(node)
831 831 mf = self.manifest.read(change[0]).copy()
832 832 for fn in mf.keys():
833 833 if not match(fn):
834 834 del mf[fn]
835 835 return mf
836 836
837 837 modified, added, removed, deleted, unknown = [], [], [], [], []
838 838 ignored, clean = [], []
839 839
840 840 compareworking = False
841 841 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
842 842 compareworking = True
843 843
844 844 if not compareworking:
845 845 # read the manifest from node1 before the manifest from node2,
846 846 # so that we'll hit the manifest cache if we're going through
847 847 # all the revisions in parent->child order.
848 848 mf1 = mfmatches(node1)
849 849
850 850 # are we comparing the working directory?
851 851 if not node2:
852 852 if not wlock:
853 853 try:
854 854 wlock = self.wlock(wait=0)
855 855 except lock.LockException:
856 856 wlock = None
857 857 (lookup, modified, added, removed, deleted, unknown,
858 858 ignored, clean) = self.dirstate.status(files, match,
859 859 list_ignored, list_clean)
860 860
861 861 # are we comparing working dir against its parent?
862 862 if compareworking:
863 863 if lookup:
864 864 # do a full compare of any files that might have changed
865 865 mf2 = mfmatches(self.dirstate.parents()[0])
866 866 for f in lookup:
867 867 if fcmp(f, mf2):
868 868 modified.append(f)
869 869 else:
870 870 clean.append(f)
871 871 if wlock is not None:
872 872 self.dirstate.update([f], "n")
873 873 else:
874 874 # we are comparing working dir against non-parent
875 875 # generate a pseudo-manifest for the working dir
876 876 # XXX: create it in dirstate.py ?
877 877 mf2 = mfmatches(self.dirstate.parents()[0])
878 878 for f in lookup + modified + added:
879 879 mf2[f] = ""
880 880 mf2.set(f, execf=util.is_exec(self.wjoin(f), mf2.execf(f)))
881 881 for f in removed:
882 882 if f in mf2:
883 883 del mf2[f]
884 884 else:
885 885 # we are comparing two revisions
886 886 mf2 = mfmatches(node2)
887 887
888 888 if not compareworking:
889 889 # flush lists from dirstate before comparing manifests
890 890 modified, added, clean = [], [], []
891 891
892 892 # make sure to sort the files so we talk to the disk in a
893 893 # reasonable order
894 894 mf2keys = mf2.keys()
895 895 mf2keys.sort()
896 896 for fn in mf2keys:
897 897 if mf1.has_key(fn):
898 898 if mf1.flags(fn) != mf2.flags(fn) or \
899 899 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
900 900 modified.append(fn)
901 901 elif list_clean:
902 902 clean.append(fn)
903 903 del mf1[fn]
904 904 else:
905 905 added.append(fn)
906 906
907 907 removed = mf1.keys()
908 908
909 909 # sort and return results:
910 910 for l in modified, added, removed, deleted, unknown, ignored, clean:
911 911 l.sort()
912 912 return (modified, added, removed, deleted, unknown, ignored, clean)
913 913
914 914 def add(self, list, wlock=None):
915 915 if not wlock:
916 916 wlock = self.wlock()
917 917 for f in list:
918 918 p = self.wjoin(f)
919 919 if not os.path.exists(p):
920 920 self.ui.warn(_("%s does not exist!\n") % f)
921 921 elif not os.path.isfile(p):
922 922 self.ui.warn(_("%s not added: only files supported currently\n")
923 923 % f)
924 924 elif self.dirstate.state(f) in 'an':
925 925 self.ui.warn(_("%s already tracked!\n") % f)
926 926 else:
927 927 self.dirstate.update([f], "a")
928 928
929 929 def forget(self, list, wlock=None):
930 930 if not wlock:
931 931 wlock = self.wlock()
932 932 for f in list:
933 933 if self.dirstate.state(f) not in 'ai':
934 934 self.ui.warn(_("%s not added!\n") % f)
935 935 else:
936 936 self.dirstate.forget([f])
937 937
938 938 def remove(self, list, unlink=False, wlock=None):
939 939 if unlink:
940 940 for f in list:
941 941 try:
942 942 util.unlink(self.wjoin(f))
943 943 except OSError, inst:
944 944 if inst.errno != errno.ENOENT:
945 945 raise
946 946 if not wlock:
947 947 wlock = self.wlock()
948 948 for f in list:
949 949 p = self.wjoin(f)
950 950 if os.path.exists(p):
951 951 self.ui.warn(_("%s still exists!\n") % f)
952 952 elif self.dirstate.state(f) == 'a':
953 953 self.dirstate.forget([f])
954 954 elif f not in self.dirstate:
955 955 self.ui.warn(_("%s not tracked!\n") % f)
956 956 else:
957 957 self.dirstate.update([f], "r")
958 958
959 959 def undelete(self, list, wlock=None):
960 960 p = self.dirstate.parents()[0]
961 961 mn = self.changelog.read(p)[0]
962 962 m = self.manifest.read(mn)
963 963 if not wlock:
964 964 wlock = self.wlock()
965 965 for f in list:
966 966 if self.dirstate.state(f) not in "r":
967 967 self.ui.warn("%s not removed!\n" % f)
968 968 else:
969 969 t = self.file(f).read(m[f])
970 970 self.wwrite(f, t)
971 971 util.set_exec(self.wjoin(f), m.execf(f))
972 972 self.dirstate.update([f], "n")
973 973
974 974 def copy(self, source, dest, wlock=None):
975 975 p = self.wjoin(dest)
976 976 if not os.path.exists(p):
977 977 self.ui.warn(_("%s does not exist!\n") % dest)
978 978 elif not os.path.isfile(p):
979 979 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
980 980 else:
981 981 if not wlock:
982 982 wlock = self.wlock()
983 983 if self.dirstate.state(dest) == '?':
984 984 self.dirstate.update([dest], "a")
985 985 self.dirstate.copy(source, dest)
986 986
987 987 def heads(self, start=None):
988 988 heads = self.changelog.heads(start)
989 989 # sort the output in rev descending order
990 990 heads = [(-self.changelog.rev(h), h) for h in heads]
991 991 heads.sort()
992 992 return [n for (r, n) in heads]
993 993
994 994 def branches(self, nodes):
995 995 if not nodes:
996 996 nodes = [self.changelog.tip()]
997 997 b = []
998 998 for n in nodes:
999 999 t = n
1000 1000 while 1:
1001 1001 p = self.changelog.parents(n)
1002 1002 if p[1] != nullid or p[0] == nullid:
1003 1003 b.append((t, n, p[0], p[1]))
1004 1004 break
1005 1005 n = p[0]
1006 1006 return b
1007 1007
1008 1008 def between(self, pairs):
1009 1009 r = []
1010 1010
1011 1011 for top, bottom in pairs:
1012 1012 n, l, i = top, [], 0
1013 1013 f = 1
1014 1014
1015 1015 while n != bottom:
1016 1016 p = self.changelog.parents(n)[0]
1017 1017 if i == f:
1018 1018 l.append(n)
1019 1019 f = f * 2
1020 1020 n = p
1021 1021 i += 1
1022 1022
1023 1023 r.append(l)
1024 1024
1025 1025 return r
1026 1026
1027 1027 def findincoming(self, remote, base=None, heads=None, force=False):
1028 1028 """Return list of roots of the subsets of missing nodes from remote
1029 1029
1030 1030 If base dict is specified, assume that these nodes and their parents
1031 1031 exist on the remote side and that no child of a node of base exists
1032 1032 in both remote and self.
1033 1033 Furthermore base will be updated to include the nodes that exists
1034 1034 in self and remote but no children exists in self and remote.
1035 1035 If a list of heads is specified, return only nodes which are heads
1036 1036 or ancestors of these heads.
1037 1037
1038 1038 All the ancestors of base are in self and in remote.
1039 1039 All the descendants of the list returned are missing in self.
1040 1040 (and so we know that the rest of the nodes are missing in remote, see
1041 1041 outgoing)
1042 1042 """
1043 1043 m = self.changelog.nodemap
1044 1044 search = []
1045 1045 fetch = {}
1046 1046 seen = {}
1047 1047 seenbranch = {}
1048 1048 if base == None:
1049 1049 base = {}
1050 1050
1051 1051 if not heads:
1052 1052 heads = remote.heads()
1053 1053
1054 1054 if self.changelog.tip() == nullid:
1055 1055 base[nullid] = 1
1056 1056 if heads != [nullid]:
1057 1057 return [nullid]
1058 1058 return []
1059 1059
1060 1060 # assume we're closer to the tip than the root
1061 1061 # and start by examining the heads
1062 1062 self.ui.status(_("searching for changes\n"))
1063 1063
1064 1064 unknown = []
1065 1065 for h in heads:
1066 1066 if h not in m:
1067 1067 unknown.append(h)
1068 1068 else:
1069 1069 base[h] = 1
1070 1070
1071 1071 if not unknown:
1072 1072 return []
1073 1073
1074 1074 req = dict.fromkeys(unknown)
1075 1075 reqcnt = 0
1076 1076
1077 1077 # search through remote branches
1078 1078 # a 'branch' here is a linear segment of history, with four parts:
1079 1079 # head, root, first parent, second parent
1080 1080 # (a branch always has two parents (or none) by definition)
1081 1081 unknown = remote.branches(unknown)
1082 1082 while unknown:
1083 1083 r = []
1084 1084 while unknown:
1085 1085 n = unknown.pop(0)
1086 1086 if n[0] in seen:
1087 1087 continue
1088 1088
1089 1089 self.ui.debug(_("examining %s:%s\n")
1090 1090 % (short(n[0]), short(n[1])))
1091 1091 if n[0] == nullid: # found the end of the branch
1092 1092 pass
1093 1093 elif n in seenbranch:
1094 1094 self.ui.debug(_("branch already found\n"))
1095 1095 continue
1096 1096 elif n[1] and n[1] in m: # do we know the base?
1097 1097 self.ui.debug(_("found incomplete branch %s:%s\n")
1098 1098 % (short(n[0]), short(n[1])))
1099 1099 search.append(n) # schedule branch range for scanning
1100 1100 seenbranch[n] = 1
1101 1101 else:
1102 1102 if n[1] not in seen and n[1] not in fetch:
1103 1103 if n[2] in m and n[3] in m:
1104 1104 self.ui.debug(_("found new changeset %s\n") %
1105 1105 short(n[1]))
1106 1106 fetch[n[1]] = 1 # earliest unknown
1107 1107 for p in n[2:4]:
1108 1108 if p in m:
1109 1109 base[p] = 1 # latest known
1110 1110
1111 1111 for p in n[2:4]:
1112 1112 if p not in req and p not in m:
1113 1113 r.append(p)
1114 1114 req[p] = 1
1115 1115 seen[n[0]] = 1
1116 1116
1117 1117 if r:
1118 1118 reqcnt += 1
1119 1119 self.ui.debug(_("request %d: %s\n") %
1120 1120 (reqcnt, " ".join(map(short, r))))
1121 1121 for p in xrange(0, len(r), 10):
1122 1122 for b in remote.branches(r[p:p+10]):
1123 1123 self.ui.debug(_("received %s:%s\n") %
1124 1124 (short(b[0]), short(b[1])))
1125 1125 unknown.append(b)
1126 1126
1127 1127 # do binary search on the branches we found
1128 1128 while search:
1129 1129 n = search.pop(0)
1130 1130 reqcnt += 1
1131 1131 l = remote.between([(n[0], n[1])])[0]
1132 1132 l.append(n[1])
1133 1133 p = n[0]
1134 1134 f = 1
1135 1135 for i in l:
1136 1136 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1137 1137 if i in m:
1138 1138 if f <= 2:
1139 1139 self.ui.debug(_("found new branch changeset %s\n") %
1140 1140 short(p))
1141 1141 fetch[p] = 1
1142 1142 base[i] = 1
1143 1143 else:
1144 1144 self.ui.debug(_("narrowed branch search to %s:%s\n")
1145 1145 % (short(p), short(i)))
1146 1146 search.append((p, i))
1147 1147 break
1148 1148 p, f = i, f * 2
1149 1149
1150 1150 # sanity check our fetch list
1151 1151 for f in fetch.keys():
1152 1152 if f in m:
1153 1153 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1154 1154
1155 1155 if base.keys() == [nullid]:
1156 1156 if force:
1157 1157 self.ui.warn(_("warning: repository is unrelated\n"))
1158 1158 else:
1159 1159 raise util.Abort(_("repository is unrelated"))
1160 1160
1161 1161 self.ui.debug(_("found new changesets starting at ") +
1162 1162 " ".join([short(f) for f in fetch]) + "\n")
1163 1163
1164 1164 self.ui.debug(_("%d total queries\n") % reqcnt)
1165 1165
1166 1166 return fetch.keys()
1167 1167
1168 1168 def findoutgoing(self, remote, base=None, heads=None, force=False):
1169 1169 """Return list of nodes that are roots of subsets not in remote
1170 1170
1171 1171 If base dict is specified, assume that these nodes and their parents
1172 1172 exist on the remote side.
1173 1173 If a list of heads is specified, return only nodes which are heads
1174 1174 or ancestors of these heads, and return a second element which
1175 1175 contains all remote heads which get new children.
1176 1176 """
1177 1177 if base == None:
1178 1178 base = {}
1179 1179 self.findincoming(remote, base, heads, force=force)
1180 1180
1181 1181 self.ui.debug(_("common changesets up to ")
1182 1182 + " ".join(map(short, base.keys())) + "\n")
1183 1183
1184 1184 remain = dict.fromkeys(self.changelog.nodemap)
1185 1185
1186 1186 # prune everything remote has from the tree
1187 1187 del remain[nullid]
1188 1188 remove = base.keys()
1189 1189 while remove:
1190 1190 n = remove.pop(0)
1191 1191 if n in remain:
1192 1192 del remain[n]
1193 1193 for p in self.changelog.parents(n):
1194 1194 remove.append(p)
1195 1195
1196 1196 # find every node whose parents have been pruned
1197 1197 subset = []
1198 1198 # find every remote head that will get new children
1199 1199 updated_heads = {}
1200 1200 for n in remain:
1201 1201 p1, p2 = self.changelog.parents(n)
1202 1202 if p1 not in remain and p2 not in remain:
1203 1203 subset.append(n)
1204 1204 if heads:
1205 1205 if p1 in heads:
1206 1206 updated_heads[p1] = True
1207 1207 if p2 in heads:
1208 1208 updated_heads[p2] = True
1209 1209
1210 1210 # this is the set of all roots we have to push
1211 1211 if heads:
1212 1212 return subset, updated_heads.keys()
1213 1213 else:
1214 1214 return subset
1215 1215
1216 1216 def pull(self, remote, heads=None, force=False, lock=None):
1217 1217 mylock = False
1218 1218 if not lock:
1219 1219 lock = self.lock()
1220 1220 mylock = True
1221 1221
1222 1222 try:
1223 1223 fetch = self.findincoming(remote, force=force)
1224 1224 if fetch == [nullid]:
1225 1225 self.ui.status(_("requesting all changes\n"))
1226 1226
1227 1227 if not fetch:
1228 1228 self.ui.status(_("no changes found\n"))
1229 1229 return 0
1230 1230
1231 1231 if heads is None:
1232 1232 cg = remote.changegroup(fetch, 'pull')
1233 1233 else:
1234 1234 if 'changegroupsubset' not in remote.capabilities:
1235 1235 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1236 1236 cg = remote.changegroupsubset(fetch, heads, 'pull')
1237 1237 return self.addchangegroup(cg, 'pull', remote.url())
1238 1238 finally:
1239 1239 if mylock:
1240 1240 lock.release()
1241 1241
1242 1242 def push(self, remote, force=False, revs=None):
1243 1243 # there are two ways to push to remote repo:
1244 1244 #
1245 1245 # addchangegroup assumes local user can lock remote
1246 1246 # repo (local filesystem, old ssh servers).
1247 1247 #
1248 1248 # unbundle assumes local user cannot lock remote repo (new ssh
1249 1249 # servers, http servers).
1250 1250
1251 1251 if remote.capable('unbundle'):
1252 1252 return self.push_unbundle(remote, force, revs)
1253 1253 return self.push_addchangegroup(remote, force, revs)
1254 1254
1255 1255 def prepush(self, remote, force, revs):
1256 1256 base = {}
1257 1257 remote_heads = remote.heads()
1258 1258 inc = self.findincoming(remote, base, remote_heads, force=force)
1259 1259
1260 1260 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1261 1261 if revs is not None:
1262 1262 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1263 1263 else:
1264 1264 bases, heads = update, self.changelog.heads()
1265 1265
1266 1266 if not bases:
1267 1267 self.ui.status(_("no changes found\n"))
1268 1268 return None, 1
1269 1269 elif not force:
1270 1270 # check if we're creating new remote heads
1271 1271 # to be a remote head after push, node must be either
1272 1272 # - unknown locally
1273 1273 # - a local outgoing head descended from update
1274 1274 # - a remote head that's known locally and not
1275 1275 # ancestral to an outgoing head
1276 1276
1277 1277 warn = 0
1278 1278
1279 1279 if remote_heads == [nullid]:
1280 1280 warn = 0
1281 1281 elif not revs and len(heads) > len(remote_heads):
1282 1282 warn = 1
1283 1283 else:
1284 1284 newheads = list(heads)
1285 1285 for r in remote_heads:
1286 1286 if r in self.changelog.nodemap:
1287 1287 desc = self.changelog.heads(r, heads)
1288 1288 l = [h for h in heads if h in desc]
1289 1289 if not l:
1290 1290 newheads.append(r)
1291 1291 else:
1292 1292 newheads.append(r)
1293 1293 if len(newheads) > len(remote_heads):
1294 1294 warn = 1
1295 1295
1296 1296 if warn:
1297 1297 self.ui.warn(_("abort: push creates new remote branches!\n"))
1298 1298 self.ui.status(_("(did you forget to merge?"
1299 1299 " use push -f to force)\n"))
1300 1300 return None, 1
1301 1301 elif inc:
1302 1302 self.ui.warn(_("note: unsynced remote changes!\n"))
1303 1303
1304 1304
1305 1305 if revs is None:
1306 1306 cg = self.changegroup(update, 'push')
1307 1307 else:
1308 1308 cg = self.changegroupsubset(update, revs, 'push')
1309 1309 return cg, remote_heads
1310 1310
1311 1311 def push_addchangegroup(self, remote, force, revs):
1312 1312 lock = remote.lock()
1313 1313
1314 1314 ret = self.prepush(remote, force, revs)
1315 1315 if ret[0] is not None:
1316 1316 cg, remote_heads = ret
1317 1317 return remote.addchangegroup(cg, 'push', self.url())
1318 1318 return ret[1]
1319 1319
1320 1320 def push_unbundle(self, remote, force, revs):
1321 1321 # local repo finds heads on server, finds out what revs it
1322 1322 # must push. once revs transferred, if server finds it has
1323 1323 # different heads (someone else won commit/push race), server
1324 1324 # aborts.
1325 1325
1326 1326 ret = self.prepush(remote, force, revs)
1327 1327 if ret[0] is not None:
1328 1328 cg, remote_heads = ret
1329 1329 if force: remote_heads = ['force']
1330 1330 return remote.unbundle(cg, remote_heads, 'push')
1331 1331 return ret[1]
1332 1332
1333 1333 def changegroupinfo(self, nodes):
1334 1334 self.ui.note(_("%d changesets found\n") % len(nodes))
1335 1335 if self.ui.debugflag:
1336 1336 self.ui.debug(_("List of changesets:\n"))
1337 1337 for node in nodes:
1338 1338 self.ui.debug("%s\n" % hex(node))
1339 1339
1340 1340 def changegroupsubset(self, bases, heads, source):
1341 1341 """This function generates a changegroup consisting of all the nodes
1342 1342 that are descendents of any of the bases, and ancestors of any of
1343 1343 the heads.
1344 1344
1345 1345 It is fairly complex as determining which filenodes and which
1346 1346 manifest nodes need to be included for the changeset to be complete
1347 1347 is non-trivial.
1348 1348
1349 1349 Another wrinkle is doing the reverse, figuring out which changeset in
1350 1350 the changegroup a particular filenode or manifestnode belongs to."""
1351 1351
1352 1352 self.hook('preoutgoing', throw=True, source=source)
1353 1353
1354 1354 # Set up some initial variables
1355 1355 # Make it easy to refer to self.changelog
1356 1356 cl = self.changelog
1357 1357 # msng is short for missing - compute the list of changesets in this
1358 1358 # changegroup.
1359 1359 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1360 1360 self.changegroupinfo(msng_cl_lst)
1361 1361 # Some bases may turn out to be superfluous, and some heads may be
1362 1362 # too. nodesbetween will return the minimal set of bases and heads
1363 1363 # necessary to re-create the changegroup.
1364 1364
1365 1365 # Known heads are the list of heads that it is assumed the recipient
1366 1366 # of this changegroup will know about.
1367 1367 knownheads = {}
1368 1368 # We assume that all parents of bases are known heads.
1369 1369 for n in bases:
1370 1370 for p in cl.parents(n):
1371 1371 if p != nullid:
1372 1372 knownheads[p] = 1
1373 1373 knownheads = knownheads.keys()
1374 1374 if knownheads:
1375 1375 # Now that we know what heads are known, we can compute which
1376 1376 # changesets are known. The recipient must know about all
1377 1377 # changesets required to reach the known heads from the null
1378 1378 # changeset.
1379 1379 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1380 1380 junk = None
1381 1381 # Transform the list into an ersatz set.
1382 1382 has_cl_set = dict.fromkeys(has_cl_set)
1383 1383 else:
1384 1384 # If there were no known heads, the recipient cannot be assumed to
1385 1385 # know about any changesets.
1386 1386 has_cl_set = {}
1387 1387
1388 1388 # Make it easy to refer to self.manifest
1389 1389 mnfst = self.manifest
1390 1390 # We don't know which manifests are missing yet
1391 1391 msng_mnfst_set = {}
1392 1392 # Nor do we know which filenodes are missing.
1393 1393 msng_filenode_set = {}
1394 1394
1395 1395 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1396 1396 junk = None
1397 1397
1398 1398 # A changeset always belongs to itself, so the changenode lookup
1399 1399 # function for a changenode is identity.
1400 1400 def identity(x):
1401 1401 return x
1402 1402
1403 1403 # A function generating function. Sets up an environment for the
1404 1404 # inner function.
1405 1405 def cmp_by_rev_func(revlog):
1406 1406 # Compare two nodes by their revision number in the environment's
1407 1407 # revision history. Since the revision number both represents the
1408 1408 # most efficient order to read the nodes in, and represents a
1409 1409 # topological sorting of the nodes, this function is often useful.
1410 1410 def cmp_by_rev(a, b):
1411 1411 return cmp(revlog.rev(a), revlog.rev(b))
1412 1412 return cmp_by_rev
1413 1413
1414 1414 # If we determine that a particular file or manifest node must be a
1415 1415 # node that the recipient of the changegroup will already have, we can
1416 1416 # also assume the recipient will have all the parents. This function
1417 1417 # prunes them from the set of missing nodes.
1418 1418 def prune_parents(revlog, hasset, msngset):
1419 1419 haslst = hasset.keys()
1420 1420 haslst.sort(cmp_by_rev_func(revlog))
1421 1421 for node in haslst:
1422 1422 parentlst = [p for p in revlog.parents(node) if p != nullid]
1423 1423 while parentlst:
1424 1424 n = parentlst.pop()
1425 1425 if n not in hasset:
1426 1426 hasset[n] = 1
1427 1427 p = [p for p in revlog.parents(n) if p != nullid]
1428 1428 parentlst.extend(p)
1429 1429 for n in hasset:
1430 1430 msngset.pop(n, None)
1431 1431
1432 1432 # This is a function generating function used to set up an environment
1433 1433 # for the inner function to execute in.
1434 1434 def manifest_and_file_collector(changedfileset):
1435 1435 # This is an information gathering function that gathers
1436 1436 # information from each changeset node that goes out as part of
1437 1437 # the changegroup. The information gathered is a list of which
1438 1438 # manifest nodes are potentially required (the recipient may
1439 1439 # already have them) and total list of all files which were
1440 1440 # changed in any changeset in the changegroup.
1441 1441 #
1442 1442 # We also remember the first changenode we saw any manifest
1443 1443 # referenced by so we can later determine which changenode 'owns'
1444 1444 # the manifest.
1445 1445 def collect_manifests_and_files(clnode):
1446 1446 c = cl.read(clnode)
1447 1447 for f in c[3]:
1448 1448 # This is to make sure we only have one instance of each
1449 1449 # filename string for each filename.
1450 1450 changedfileset.setdefault(f, f)
1451 1451 msng_mnfst_set.setdefault(c[0], clnode)
1452 1452 return collect_manifests_and_files
1453 1453
1454 1454 # Figure out which manifest nodes (of the ones we think might be part
1455 1455 # of the changegroup) the recipient must know about and remove them
1456 1456 # from the changegroup.
1457 1457 def prune_manifests():
1458 1458 has_mnfst_set = {}
1459 1459 for n in msng_mnfst_set:
1460 1460 # If a 'missing' manifest thinks it belongs to a changenode
1461 1461 # the recipient is assumed to have, obviously the recipient
1462 1462 # must have that manifest.
1463 1463 linknode = cl.node(mnfst.linkrev(n))
1464 1464 if linknode in has_cl_set:
1465 1465 has_mnfst_set[n] = 1
1466 1466 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1467 1467
1468 1468 # Use the information collected in collect_manifests_and_files to say
1469 1469 # which changenode any manifestnode belongs to.
1470 1470 def lookup_manifest_link(mnfstnode):
1471 1471 return msng_mnfst_set[mnfstnode]
1472 1472
1473 1473 # A function generating function that sets up the initial environment
1474 1474 # the inner function.
1475 1475 def filenode_collector(changedfiles):
1476 1476 next_rev = [0]
1477 1477 # This gathers information from each manifestnode included in the
1478 1478 # changegroup about which filenodes the manifest node references
1479 1479 # so we can include those in the changegroup too.
1480 1480 #
1481 1481 # It also remembers which changenode each filenode belongs to. It
1482 1482 # does this by assuming the a filenode belongs to the changenode
1483 1483 # the first manifest that references it belongs to.
1484 1484 def collect_msng_filenodes(mnfstnode):
1485 1485 r = mnfst.rev(mnfstnode)
1486 1486 if r == next_rev[0]:
1487 1487 # If the last rev we looked at was the one just previous,
1488 1488 # we only need to see a diff.
1489 1489 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1490 1490 # For each line in the delta
1491 1491 for dline in delta.splitlines():
1492 1492 # get the filename and filenode for that line
1493 1493 f, fnode = dline.split('\0')
1494 1494 fnode = bin(fnode[:40])
1495 1495 f = changedfiles.get(f, None)
1496 1496 # And if the file is in the list of files we care
1497 1497 # about.
1498 1498 if f is not None:
1499 1499 # Get the changenode this manifest belongs to
1500 1500 clnode = msng_mnfst_set[mnfstnode]
1501 1501 # Create the set of filenodes for the file if
1502 1502 # there isn't one already.
1503 1503 ndset = msng_filenode_set.setdefault(f, {})
1504 1504 # And set the filenode's changelog node to the
1505 1505 # manifest's if it hasn't been set already.
1506 1506 ndset.setdefault(fnode, clnode)
1507 1507 else:
1508 1508 # Otherwise we need a full manifest.
1509 1509 m = mnfst.read(mnfstnode)
1510 1510 # For every file in we care about.
1511 1511 for f in changedfiles:
1512 1512 fnode = m.get(f, None)
1513 1513 # If it's in the manifest
1514 1514 if fnode is not None:
1515 1515 # See comments above.
1516 1516 clnode = msng_mnfst_set[mnfstnode]
1517 1517 ndset = msng_filenode_set.setdefault(f, {})
1518 1518 ndset.setdefault(fnode, clnode)
1519 1519 # Remember the revision we hope to see next.
1520 1520 next_rev[0] = r + 1
1521 1521 return collect_msng_filenodes
1522 1522
1523 1523 # We have a list of filenodes we think we need for a file, lets remove
1524 1524 # all those we now the recipient must have.
1525 1525 def prune_filenodes(f, filerevlog):
1526 1526 msngset = msng_filenode_set[f]
1527 1527 hasset = {}
1528 1528 # If a 'missing' filenode thinks it belongs to a changenode we
1529 1529 # assume the recipient must have, then the recipient must have
1530 1530 # that filenode.
1531 1531 for n in msngset:
1532 1532 clnode = cl.node(filerevlog.linkrev(n))
1533 1533 if clnode in has_cl_set:
1534 1534 hasset[n] = 1
1535 1535 prune_parents(filerevlog, hasset, msngset)
1536 1536
1537 1537 # A function generator function that sets up the a context for the
1538 1538 # inner function.
1539 1539 def lookup_filenode_link_func(fname):
1540 1540 msngset = msng_filenode_set[fname]
1541 1541 # Lookup the changenode the filenode belongs to.
1542 1542 def lookup_filenode_link(fnode):
1543 1543 return msngset[fnode]
1544 1544 return lookup_filenode_link
1545 1545
1546 1546 # Now that we have all theses utility functions to help out and
1547 1547 # logically divide up the task, generate the group.
1548 1548 def gengroup():
1549 1549 # The set of changed files starts empty.
1550 1550 changedfiles = {}
1551 1551 # Create a changenode group generator that will call our functions
1552 1552 # back to lookup the owning changenode and collect information.
1553 1553 group = cl.group(msng_cl_lst, identity,
1554 1554 manifest_and_file_collector(changedfiles))
1555 1555 for chnk in group:
1556 1556 yield chnk
1557 1557
1558 1558 # The list of manifests has been collected by the generator
1559 1559 # calling our functions back.
1560 1560 prune_manifests()
1561 1561 msng_mnfst_lst = msng_mnfst_set.keys()
1562 1562 # Sort the manifestnodes by revision number.
1563 1563 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1564 1564 # Create a generator for the manifestnodes that calls our lookup
1565 1565 # and data collection functions back.
1566 1566 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1567 1567 filenode_collector(changedfiles))
1568 1568 for chnk in group:
1569 1569 yield chnk
1570 1570
1571 1571 # These are no longer needed, dereference and toss the memory for
1572 1572 # them.
1573 1573 msng_mnfst_lst = None
1574 1574 msng_mnfst_set.clear()
1575 1575
1576 1576 changedfiles = changedfiles.keys()
1577 1577 changedfiles.sort()
1578 1578 # Go through all our files in order sorted by name.
1579 1579 for fname in changedfiles:
1580 1580 filerevlog = self.file(fname)
1581 1581 # Toss out the filenodes that the recipient isn't really
1582 1582 # missing.
1583 1583 if msng_filenode_set.has_key(fname):
1584 1584 prune_filenodes(fname, filerevlog)
1585 1585 msng_filenode_lst = msng_filenode_set[fname].keys()
1586 1586 else:
1587 1587 msng_filenode_lst = []
1588 1588 # If any filenodes are left, generate the group for them,
1589 1589 # otherwise don't bother.
1590 1590 if len(msng_filenode_lst) > 0:
1591 1591 yield changegroup.genchunk(fname)
1592 1592 # Sort the filenodes by their revision #
1593 1593 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1594 1594 # Create a group generator and only pass in a changenode
1595 1595 # lookup function as we need to collect no information
1596 1596 # from filenodes.
1597 1597 group = filerevlog.group(msng_filenode_lst,
1598 1598 lookup_filenode_link_func(fname))
1599 1599 for chnk in group:
1600 1600 yield chnk
1601 1601 if msng_filenode_set.has_key(fname):
1602 1602 # Don't need this anymore, toss it to free memory.
1603 1603 del msng_filenode_set[fname]
1604 1604 # Signal that no more groups are left.
1605 1605 yield changegroup.closechunk()
1606 1606
1607 1607 if msng_cl_lst:
1608 1608 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1609 1609
1610 1610 return util.chunkbuffer(gengroup())
1611 1611
1612 1612 def changegroup(self, basenodes, source):
1613 1613 """Generate a changegroup of all nodes that we have that a recipient
1614 1614 doesn't.
1615 1615
1616 1616 This is much easier than the previous function as we can assume that
1617 1617 the recipient has any changenode we aren't sending them."""
1618 1618
1619 1619 self.hook('preoutgoing', throw=True, source=source)
1620 1620
1621 1621 cl = self.changelog
1622 1622 nodes = cl.nodesbetween(basenodes, None)[0]
1623 1623 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1624 1624 self.changegroupinfo(nodes)
1625 1625
1626 1626 def identity(x):
1627 1627 return x
1628 1628
1629 1629 def gennodelst(revlog):
1630 1630 for r in xrange(0, revlog.count()):
1631 1631 n = revlog.node(r)
1632 1632 if revlog.linkrev(n) in revset:
1633 1633 yield n
1634 1634
1635 1635 def changed_file_collector(changedfileset):
1636 1636 def collect_changed_files(clnode):
1637 1637 c = cl.read(clnode)
1638 1638 for fname in c[3]:
1639 1639 changedfileset[fname] = 1
1640 1640 return collect_changed_files
1641 1641
1642 1642 def lookuprevlink_func(revlog):
1643 1643 def lookuprevlink(n):
1644 1644 return cl.node(revlog.linkrev(n))
1645 1645 return lookuprevlink
1646 1646
1647 1647 def gengroup():
1648 1648 # construct a list of all changed files
1649 1649 changedfiles = {}
1650 1650
1651 1651 for chnk in cl.group(nodes, identity,
1652 1652 changed_file_collector(changedfiles)):
1653 1653 yield chnk
1654 1654 changedfiles = changedfiles.keys()
1655 1655 changedfiles.sort()
1656 1656
1657 1657 mnfst = self.manifest
1658 1658 nodeiter = gennodelst(mnfst)
1659 1659 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1660 1660 yield chnk
1661 1661
1662 1662 for fname in changedfiles:
1663 1663 filerevlog = self.file(fname)
1664 1664 nodeiter = gennodelst(filerevlog)
1665 1665 nodeiter = list(nodeiter)
1666 1666 if nodeiter:
1667 1667 yield changegroup.genchunk(fname)
1668 1668 lookup = lookuprevlink_func(filerevlog)
1669 1669 for chnk in filerevlog.group(nodeiter, lookup):
1670 1670 yield chnk
1671 1671
1672 1672 yield changegroup.closechunk()
1673 1673
1674 1674 if nodes:
1675 1675 self.hook('outgoing', node=hex(nodes[0]), source=source)
1676 1676
1677 1677 return util.chunkbuffer(gengroup())
1678 1678
1679 1679 def addchangegroup(self, source, srctype, url):
1680 1680 """add changegroup to repo.
1681 1681
1682 1682 return values:
1683 1683 - nothing changed or no source: 0
1684 1684 - more heads than before: 1+added heads (2..n)
1685 1685 - less heads than before: -1-removed heads (-2..-n)
1686 1686 - number of heads stays the same: 1
1687 1687 """
1688 1688 def csmap(x):
1689 1689 self.ui.debug(_("add changeset %s\n") % short(x))
1690 1690 return cl.count()
1691 1691
1692 1692 def revmap(x):
1693 1693 return cl.rev(x)
1694 1694
1695 1695 if not source:
1696 1696 return 0
1697 1697
1698 1698 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1699 1699
1700 1700 changesets = files = revisions = 0
1701 1701
1702 1702 tr = self.transaction()
1703 1703
1704 1704 # write changelog data to temp files so concurrent readers will not see
1705 1705 # inconsistent view
1706 1706 cl = None
1707 1707 try:
1708 1708 cl = appendfile.appendchangelog(self.sopener,
1709 1709 self.changelog.version)
1710 1710
1711 1711 oldheads = len(cl.heads())
1712 1712
1713 1713 # pull off the changeset group
1714 1714 self.ui.status(_("adding changesets\n"))
1715 1715 cor = cl.count() - 1
1716 1716 chunkiter = changegroup.chunkiter(source)
1717 1717 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1718 1718 raise util.Abort(_("received changelog group is empty"))
1719 1719 cnr = cl.count() - 1
1720 1720 changesets = cnr - cor
1721 1721
1722 1722 # pull off the manifest group
1723 1723 self.ui.status(_("adding manifests\n"))
1724 1724 chunkiter = changegroup.chunkiter(source)
1725 1725 # no need to check for empty manifest group here:
1726 1726 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1727 1727 # no new manifest will be created and the manifest group will
1728 1728 # be empty during the pull
1729 1729 self.manifest.addgroup(chunkiter, revmap, tr)
1730 1730
1731 1731 # process the files
1732 1732 self.ui.status(_("adding file changes\n"))
1733 1733 while 1:
1734 1734 f = changegroup.getchunk(source)
1735 1735 if not f:
1736 1736 break
1737 1737 self.ui.debug(_("adding %s revisions\n") % f)
1738 1738 fl = self.file(f)
1739 1739 o = fl.count()
1740 1740 chunkiter = changegroup.chunkiter(source)
1741 1741 if fl.addgroup(chunkiter, revmap, tr) is None:
1742 1742 raise util.Abort(_("received file revlog group is empty"))
1743 1743 revisions += fl.count() - o
1744 1744 files += 1
1745 1745
1746 1746 cl.writedata()
1747 1747 finally:
1748 1748 if cl:
1749 1749 cl.cleanup()
1750 1750
1751 1751 # make changelog see real files again
1752 1752 self.changelog = changelog.changelog(self.sopener,
1753 1753 self.changelog.version)
1754 1754 self.changelog.checkinlinesize(tr)
1755 1755
1756 1756 newheads = len(self.changelog.heads())
1757 1757 heads = ""
1758 1758 if oldheads and newheads != oldheads:
1759 1759 heads = _(" (%+d heads)") % (newheads - oldheads)
1760 1760
1761 1761 self.ui.status(_("added %d changesets"
1762 1762 " with %d changes to %d files%s\n")
1763 1763 % (changesets, revisions, files, heads))
1764 1764
1765 1765 if changesets > 0:
1766 1766 self.hook('pretxnchangegroup', throw=True,
1767 1767 node=hex(self.changelog.node(cor+1)), source=srctype,
1768 1768 url=url)
1769 1769
1770 1770 tr.close()
1771 1771
1772 1772 if changesets > 0:
1773 1773 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1774 1774 source=srctype, url=url)
1775 1775
1776 1776 for i in xrange(cor + 1, cnr + 1):
1777 1777 self.hook("incoming", node=hex(self.changelog.node(i)),
1778 1778 source=srctype, url=url)
1779 1779
1780 1780 # never return 0 here:
1781 1781 if newheads < oldheads:
1782 1782 return newheads - oldheads - 1
1783 1783 else:
1784 1784 return newheads - oldheads + 1
1785 1785
1786 1786
1787 1787 def stream_in(self, remote):
1788 1788 fp = remote.stream_out()
1789 1789 l = fp.readline()
1790 1790 try:
1791 1791 resp = int(l)
1792 1792 except ValueError:
1793 1793 raise util.UnexpectedOutput(
1794 1794 _('Unexpected response from remote server:'), l)
1795 1795 if resp == 1:
1796 1796 raise util.Abort(_('operation forbidden by server'))
1797 1797 elif resp == 2:
1798 1798 raise util.Abort(_('locking the remote repository failed'))
1799 1799 elif resp != 0:
1800 1800 raise util.Abort(_('the server sent an unknown error code'))
1801 1801 self.ui.status(_('streaming all changes\n'))
1802 1802 l = fp.readline()
1803 1803 try:
1804 1804 total_files, total_bytes = map(int, l.split(' ', 1))
1805 1805 except ValueError, TypeError:
1806 1806 raise util.UnexpectedOutput(
1807 1807 _('Unexpected response from remote server:'), l)
1808 1808 self.ui.status(_('%d files to transfer, %s of data\n') %
1809 1809 (total_files, util.bytecount(total_bytes)))
1810 1810 start = time.time()
1811 1811 for i in xrange(total_files):
1812 1812 # XXX doesn't support '\n' or '\r' in filenames
1813 1813 l = fp.readline()
1814 1814 try:
1815 1815 name, size = l.split('\0', 1)
1816 1816 size = int(size)
1817 1817 except ValueError, TypeError:
1818 1818 raise util.UnexpectedOutput(
1819 1819 _('Unexpected response from remote server:'), l)
1820 1820 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1821 1821 ofp = self.sopener(name, 'w')
1822 1822 for chunk in util.filechunkiter(fp, limit=size):
1823 1823 ofp.write(chunk)
1824 1824 ofp.close()
1825 1825 elapsed = time.time() - start
1826 1826 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1827 1827 (util.bytecount(total_bytes), elapsed,
1828 1828 util.bytecount(total_bytes / elapsed)))
1829 1829 self.reload()
1830 1830 return len(self.heads()) + 1
1831 1831
1832 1832 def clone(self, remote, heads=[], stream=False):
1833 1833 '''clone remote repository.
1834 1834
1835 1835 keyword arguments:
1836 1836 heads: list of revs to clone (forces use of pull)
1837 1837 stream: use streaming clone if possible'''
1838 1838
1839 1839 # now, all clients that can request uncompressed clones can
1840 1840 # read repo formats supported by all servers that can serve
1841 1841 # them.
1842 1842
1843 1843 # if revlog format changes, client will have to check version
1844 1844 # and format flags on "stream" capability, and use
1845 1845 # uncompressed only if compatible.
1846 1846
1847 1847 if stream and not heads and remote.capable('stream'):
1848 1848 return self.stream_in(remote)
1849 1849 return self.pull(remote, heads)
1850 1850
1851 1851 # used to avoid circular references so destructors work
1852 1852 def aftertrans(files):
1853 1853 renamefiles = [tuple(t) for t in files]
1854 1854 def a():
1855 1855 for src, dest in renamefiles:
1856 1856 util.rename(src, dest)
1857 1857 return a
1858 1858
1859 1859 def instance(ui, path, create):
1860 1860 return localrepository(ui, util.drop_scheme('file', path), create)
1861 1861
1862 1862 def islocal(path):
1863 1863 return True
@@ -1,27 +1,24 b''
1 1 # repo.py - repository base classes for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 class RepoError(Exception):
10 10 pass
11 11
12 class LookupError(RepoError):
13 pass
14
15 12 class repository(object):
16 13 def capable(self, name):
17 14 '''tell whether repo supports named capability.
18 15 return False if not supported.
19 16 if boolean capability, return True.
20 17 if string capability, return string.'''
21 18 name_eq = name + '='
22 19 for cap in self.capabilities:
23 20 if name == cap:
24 21 return True
25 22 if cap.startswith(name_eq):
26 23 return cap[len(name_eq):]
27 24 return False
@@ -1,1291 +1,1292 b''
1 1 """
2 2 revlog.py - storage back-end for mercurial
3 3
4 4 This provides efficient delta storage with O(1) retrieve and append
5 5 and O(changes) merge between branches
6 6
7 7 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
8 8
9 9 This software may be used and distributed according to the terms
10 10 of the GNU General Public License, incorporated herein by reference.
11 11 """
12 12
13 13 from node import *
14 14 from i18n import _
15 15 import binascii, changegroup, errno, ancestor, mdiff, os
16 16 import sha, struct, util, zlib
17 17
18 18 # revlog version strings
19 19 REVLOGV0 = 0
20 20 REVLOGNG = 1
21 21
22 22 # revlog flags
23 23 REVLOGNGINLINEDATA = (1 << 16)
24 24 REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
25 25
26 26 REVLOG_DEFAULT_FORMAT = REVLOGNG
27 27 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
28 28
29 29 def flagstr(flag):
30 30 if flag == "inline":
31 31 return REVLOGNGINLINEDATA
32 32 raise RevlogError(_("unknown revlog flag %s") % flag)
33 33
34 34 def hash(text, p1, p2):
35 35 """generate a hash from the given text and its parent hashes
36 36
37 37 This hash combines both the current file contents and its history
38 38 in a manner that makes it easy to distinguish nodes with the same
39 39 content in the revision graph.
40 40 """
41 41 l = [p1, p2]
42 42 l.sort()
43 43 s = sha.new(l[0])
44 44 s.update(l[1])
45 45 s.update(text)
46 46 return s.digest()
47 47
48 48 def compress(text):
49 49 """ generate a possibly-compressed representation of text """
50 50 if not text: return ("", text)
51 51 if len(text) < 44:
52 52 if text[0] == '\0': return ("", text)
53 53 return ('u', text)
54 54 bin = zlib.compress(text)
55 55 if len(bin) > len(text):
56 56 if text[0] == '\0': return ("", text)
57 57 return ('u', text)
58 58 return ("", bin)
59 59
60 60 def decompress(bin):
61 61 """ decompress the given input """
62 62 if not bin: return bin
63 63 t = bin[0]
64 64 if t == '\0': return bin
65 65 if t == 'x': return zlib.decompress(bin)
66 66 if t == 'u': return bin[1:]
67 67 raise RevlogError(_("unknown compression type %r") % t)
68 68
69 69 indexformatv0 = ">4l20s20s20s"
70 70 v0shaoffset = 56
71 71 # index ng:
72 72 # 6 bytes offset
73 73 # 2 bytes flags
74 74 # 4 bytes compressed length
75 75 # 4 bytes uncompressed length
76 76 # 4 bytes: base rev
77 77 # 4 bytes link rev
78 78 # 4 bytes parent 1 rev
79 79 # 4 bytes parent 2 rev
80 80 # 32 bytes: nodeid
81 81 indexformatng = ">Qiiiiii20s12x"
82 82 ngshaoffset = 32
83 83 versionformat = ">I"
84 84
85 85 class lazyparser(object):
86 86 """
87 87 this class avoids the need to parse the entirety of large indices
88 88 """
89 89
90 90 # lazyparser is not safe to use on windows if win32 extensions not
91 91 # available. it keeps file handle open, which make it not possible
92 92 # to break hardlinks on local cloned repos.
93 93 safe_to_use = os.name != 'nt' or (not util.is_win_9x() and
94 94 hasattr(util, 'win32api'))
95 95
96 96 def __init__(self, dataf, size, indexformat, shaoffset):
97 97 self.dataf = dataf
98 98 self.format = indexformat
99 99 self.s = struct.calcsize(indexformat)
100 100 self.indexformat = indexformat
101 101 self.datasize = size
102 102 self.l = size/self.s
103 103 self.index = [None] * self.l
104 104 self.map = {nullid: nullrev}
105 105 self.allmap = 0
106 106 self.all = 0
107 107 self.mapfind_count = 0
108 108 self.shaoffset = shaoffset
109 109
110 110 def loadmap(self):
111 111 """
112 112 during a commit, we need to make sure the rev being added is
113 113 not a duplicate. This requires loading the entire index,
114 114 which is fairly slow. loadmap can load up just the node map,
115 115 which takes much less time.
116 116 """
117 117 if self.allmap: return
118 118 end = self.datasize
119 119 self.allmap = 1
120 120 cur = 0
121 121 count = 0
122 122 blocksize = self.s * 256
123 123 self.dataf.seek(0)
124 124 while cur < end:
125 125 data = self.dataf.read(blocksize)
126 126 off = 0
127 127 for x in xrange(256):
128 128 n = data[off + self.shaoffset:off + self.shaoffset + 20]
129 129 self.map[n] = count
130 130 count += 1
131 131 if count >= self.l:
132 132 break
133 133 off += self.s
134 134 cur += blocksize
135 135
136 136 def loadblock(self, blockstart, blocksize, data=None):
137 137 if self.all: return
138 138 if data is None:
139 139 self.dataf.seek(blockstart)
140 140 if blockstart + blocksize > self.datasize:
141 141 # the revlog may have grown since we've started running,
142 142 # but we don't have space in self.index for more entries.
143 143 # limit blocksize so that we don't get too much data.
144 144 blocksize = max(self.datasize - blockstart, 0)
145 145 data = self.dataf.read(blocksize)
146 146 lend = len(data) / self.s
147 147 i = blockstart / self.s
148 148 off = 0
149 149 for x in xrange(lend):
150 150 if self.index[i + x] == None:
151 151 b = data[off : off + self.s]
152 152 self.index[i + x] = b
153 153 n = b[self.shaoffset:self.shaoffset + 20]
154 154 self.map[n] = i + x
155 155 off += self.s
156 156
157 157 def findnode(self, node):
158 158 """search backwards through the index file for a specific node"""
159 159 if self.allmap: return None
160 160
161 161 # hg log will cause many many searches for the manifest
162 162 # nodes. After we get called a few times, just load the whole
163 163 # thing.
164 164 if self.mapfind_count > 8:
165 165 self.loadmap()
166 166 if node in self.map:
167 167 return node
168 168 return None
169 169 self.mapfind_count += 1
170 170 last = self.l - 1
171 171 while self.index[last] != None:
172 172 if last == 0:
173 173 self.all = 1
174 174 self.allmap = 1
175 175 return None
176 176 last -= 1
177 177 end = (last + 1) * self.s
178 178 blocksize = self.s * 256
179 179 while end >= 0:
180 180 start = max(end - blocksize, 0)
181 181 self.dataf.seek(start)
182 182 data = self.dataf.read(end - start)
183 183 findend = end - start
184 184 while True:
185 185 # we're searching backwards, so weh have to make sure
186 186 # we don't find a changeset where this node is a parent
187 187 off = data.rfind(node, 0, findend)
188 188 findend = off
189 189 if off >= 0:
190 190 i = off / self.s
191 191 off = i * self.s
192 192 n = data[off + self.shaoffset:off + self.shaoffset + 20]
193 193 if n == node:
194 194 self.map[n] = i + start / self.s
195 195 return node
196 196 else:
197 197 break
198 198 end -= blocksize
199 199 return None
200 200
201 201 def loadindex(self, i=None, end=None):
202 202 if self.all: return
203 203 all = False
204 204 if i == None:
205 205 blockstart = 0
206 206 blocksize = (512 / self.s) * self.s
207 207 end = self.datasize
208 208 all = True
209 209 else:
210 210 if end:
211 211 blockstart = i * self.s
212 212 end = end * self.s
213 213 blocksize = end - blockstart
214 214 else:
215 215 blockstart = (i & ~(32)) * self.s
216 216 blocksize = self.s * 64
217 217 end = blockstart + blocksize
218 218 while blockstart < end:
219 219 self.loadblock(blockstart, blocksize)
220 220 blockstart += blocksize
221 221 if all: self.all = True
222 222
223 223 class lazyindex(object):
224 224 """a lazy version of the index array"""
225 225 def __init__(self, parser):
226 226 self.p = parser
227 227 def __len__(self):
228 228 return len(self.p.index)
229 229 def load(self, pos):
230 230 if pos < 0:
231 231 pos += len(self.p.index)
232 232 self.p.loadindex(pos)
233 233 return self.p.index[pos]
234 234 def __getitem__(self, pos):
235 235 ret = self.p.index[pos] or self.load(pos)
236 236 if isinstance(ret, str):
237 237 ret = struct.unpack(self.p.indexformat, ret)
238 238 return ret
239 239 def __setitem__(self, pos, item):
240 240 self.p.index[pos] = item
241 241 def __delitem__(self, pos):
242 242 del self.p.index[pos]
243 243 def append(self, e):
244 244 self.p.index.append(e)
245 245
246 246 class lazymap(object):
247 247 """a lazy version of the node map"""
248 248 def __init__(self, parser):
249 249 self.p = parser
250 250 def load(self, key):
251 251 n = self.p.findnode(key)
252 252 if n == None:
253 253 raise KeyError(key)
254 254 def __contains__(self, key):
255 255 if key in self.p.map:
256 256 return True
257 257 self.p.loadmap()
258 258 return key in self.p.map
259 259 def __iter__(self):
260 260 yield nullid
261 261 for i in xrange(self.p.l):
262 262 ret = self.p.index[i]
263 263 if not ret:
264 264 self.p.loadindex(i)
265 265 ret = self.p.index[i]
266 266 if isinstance(ret, str):
267 267 ret = struct.unpack(self.p.indexformat, ret)
268 268 yield ret[-1]
269 269 def __getitem__(self, key):
270 270 try:
271 271 return self.p.map[key]
272 272 except KeyError:
273 273 try:
274 274 self.load(key)
275 275 return self.p.map[key]
276 276 except KeyError:
277 277 raise KeyError("node " + hex(key))
278 278 def __setitem__(self, key, val):
279 279 self.p.map[key] = val
280 280 def __delitem__(self, key):
281 281 del self.p.map[key]
282 282
283 283 class RevlogError(Exception): pass
284 class LookupError(RevlogError): pass
284 285
285 286 class revlog(object):
286 287 """
287 288 the underlying revision storage object
288 289
289 290 A revlog consists of two parts, an index and the revision data.
290 291
291 292 The index is a file with a fixed record size containing
292 293 information on each revision, includings its nodeid (hash), the
293 294 nodeids of its parents, the position and offset of its data within
294 295 the data file, and the revision it's based on. Finally, each entry
295 296 contains a linkrev entry that can serve as a pointer to external
296 297 data.
297 298
298 299 The revision data itself is a linear collection of data chunks.
299 300 Each chunk represents a revision and is usually represented as a
300 301 delta against the previous chunk. To bound lookup time, runs of
301 302 deltas are limited to about 2 times the length of the original
302 303 version data. This makes retrieval of a version proportional to
303 304 its size, or O(1) relative to the number of revisions.
304 305
305 306 Both pieces of the revlog are written to in an append-only
306 307 fashion, which means we never need to rewrite a file to insert or
307 308 remove data, and can use some simple techniques to avoid the need
308 309 for locking while reading.
309 310 """
310 311 def __init__(self, opener, indexfile, datafile,
311 312 defversion=REVLOG_DEFAULT_VERSION):
312 313 """
313 314 create a revlog object
314 315
315 316 opener is a function that abstracts the file opening operation
316 317 and can be used to implement COW semantics or the like.
317 318 """
318 319 self.indexfile = indexfile
319 320 self.datafile = datafile
320 321 self.opener = opener
321 322
322 323 self.indexstat = None
323 324 self.cache = None
324 325 self.chunkcache = None
325 326 self.defversion = defversion
326 327 self.load()
327 328
328 329 def load(self):
329 330 v = self.defversion
330 331 try:
331 332 f = self.opener(self.indexfile)
332 333 i = f.read(4)
333 334 f.seek(0)
334 335 except IOError, inst:
335 336 if inst.errno != errno.ENOENT:
336 337 raise
337 338 i = ""
338 339 else:
339 340 try:
340 341 st = util.fstat(f)
341 342 except AttributeError, inst:
342 343 st = None
343 344 else:
344 345 oldst = self.indexstat
345 346 if (oldst and st.st_dev == oldst.st_dev
346 347 and st.st_ino == oldst.st_ino
347 348 and st.st_mtime == oldst.st_mtime
348 349 and st.st_ctime == oldst.st_ctime):
349 350 return
350 351 self.indexstat = st
351 352 if len(i) > 0:
352 353 v = struct.unpack(versionformat, i)[0]
353 354 flags = v & ~0xFFFF
354 355 fmt = v & 0xFFFF
355 356 if fmt == REVLOGV0:
356 357 if flags:
357 358 raise RevlogError(_("index %s unknown flags %#04x for format v0")
358 359 % (self.indexfile, flags >> 16))
359 360 elif fmt == REVLOGNG:
360 361 if flags & ~REVLOGNGINLINEDATA:
361 362 raise RevlogError(_("index %s unknown flags %#04x for revlogng")
362 363 % (self.indexfile, flags >> 16))
363 364 else:
364 365 raise RevlogError(_("index %s unknown format %d")
365 366 % (self.indexfile, fmt))
366 367 self.version = v
367 368 if v == REVLOGV0:
368 369 self.indexformat = indexformatv0
369 370 shaoffset = v0shaoffset
370 371 else:
371 372 self.indexformat = indexformatng
372 373 shaoffset = ngshaoffset
373 374
374 375 if i:
375 376 if (lazyparser.safe_to_use and not self.inlinedata() and
376 377 st and st.st_size > 10000):
377 378 # big index, let's parse it on demand
378 379 parser = lazyparser(f, st.st_size, self.indexformat, shaoffset)
379 380 self.index = lazyindex(parser)
380 381 self.nodemap = lazymap(parser)
381 382 else:
382 383 self.parseindex(f, st)
383 384 if self.version != REVLOGV0:
384 385 e = list(self.index[0])
385 386 type = self.ngtype(e[0])
386 387 e[0] = self.offset_type(0, type)
387 388 self.index[0] = e
388 389 else:
389 390 self.nodemap = {nullid: nullrev}
390 391 self.index = []
391 392
392 393
393 394 def parseindex(self, fp, st):
394 395 s = struct.calcsize(self.indexformat)
395 396 self.index = []
396 397 self.nodemap = {nullid: nullrev}
397 398 inline = self.inlinedata()
398 399 n = 0
399 400 leftover = None
400 401 while True:
401 402 if st:
402 403 data = fp.read(65536)
403 404 else:
404 405 # hack for httprangereader, it doesn't do partial reads well
405 406 data = fp.read()
406 407 if not data:
407 408 break
408 409 if n == 0 and self.inlinedata():
409 410 # cache the first chunk
410 411 self.chunkcache = (0, data)
411 412 if leftover:
412 413 data = leftover + data
413 414 leftover = None
414 415 off = 0
415 416 l = len(data)
416 417 while off < l:
417 418 if l - off < s:
418 419 leftover = data[off:]
419 420 break
420 421 cur = data[off:off + s]
421 422 off += s
422 423 e = struct.unpack(self.indexformat, cur)
423 424 self.index.append(e)
424 425 self.nodemap[e[-1]] = n
425 426 n += 1
426 427 if inline:
427 428 off += e[1]
428 429 if off > l:
429 430 # some things don't seek well, just read it
430 431 fp.read(off - l)
431 432 if not st:
432 433 break
433 434
434 435
435 436 def ngoffset(self, q):
436 437 if q & 0xFFFF:
437 438 raise RevlogError(_('%s: incompatible revision flag %x') %
438 439 (self.indexfile, q))
439 440 return long(q >> 16)
440 441
441 442 def ngtype(self, q):
442 443 return int(q & 0xFFFF)
443 444
444 445 def offset_type(self, offset, type):
445 446 return long(long(offset) << 16 | type)
446 447
447 448 def loadindex(self, start, end):
448 449 """load a block of indexes all at once from the lazy parser"""
449 450 if isinstance(self.index, lazyindex):
450 451 self.index.p.loadindex(start, end)
451 452
452 453 def loadindexmap(self):
453 454 """loads both the map and the index from the lazy parser"""
454 455 if isinstance(self.index, lazyindex):
455 456 p = self.index.p
456 457 p.loadindex()
457 458 self.nodemap = p.map
458 459
459 460 def loadmap(self):
460 461 """loads the map from the lazy parser"""
461 462 if isinstance(self.nodemap, lazymap):
462 463 self.nodemap.p.loadmap()
463 464 self.nodemap = self.nodemap.p.map
464 465
465 466 def inlinedata(self): return self.version & REVLOGNGINLINEDATA
466 467 def tip(self): return self.node(len(self.index) - 1)
467 468 def count(self): return len(self.index)
468 469 def node(self, rev):
469 470 return rev == nullrev and nullid or self.index[rev][-1]
470 471 def rev(self, node):
471 472 try:
472 473 return self.nodemap[node]
473 474 except KeyError:
474 raise RevlogError(_('%s: no node %s') % (self.indexfile, hex(node)))
475 raise LookupError(_('%s: no node %s') % (self.indexfile, hex(node)))
475 476 def linkrev(self, node):
476 477 return (node == nullid) and nullrev or self.index[self.rev(node)][-4]
477 478 def parents(self, node):
478 479 if node == nullid: return (nullid, nullid)
479 480 r = self.rev(node)
480 481 d = self.index[r][-3:-1]
481 482 if self.version == REVLOGV0:
482 483 return d
483 484 return (self.node(d[0]), self.node(d[1]))
484 485 def parentrevs(self, rev):
485 486 if rev == nullrev:
486 487 return (nullrev, nullrev)
487 488 d = self.index[rev][-3:-1]
488 489 if self.version == REVLOGV0:
489 490 return (self.rev(d[0]), self.rev(d[1]))
490 491 return d
491 492 def start(self, rev):
492 493 if rev == nullrev:
493 494 return 0
494 495 if self.version != REVLOGV0:
495 496 return self.ngoffset(self.index[rev][0])
496 497 return self.index[rev][0]
497 498
498 499 def end(self, rev): return self.start(rev) + self.length(rev)
499 500
500 501 def size(self, rev):
501 502 """return the length of the uncompressed text for a given revision"""
502 503 if rev == nullrev:
503 504 return 0
504 505 l = -1
505 506 if self.version != REVLOGV0:
506 507 l = self.index[rev][2]
507 508 if l >= 0:
508 509 return l
509 510
510 511 t = self.revision(self.node(rev))
511 512 return len(t)
512 513
513 514 # alternate implementation, The advantage to this code is it
514 515 # will be faster for a single revision. But, the results are not
515 516 # cached, so finding the size of every revision will be slower.
516 517 """
517 518 if self.cache and self.cache[1] == rev:
518 519 return len(self.cache[2])
519 520
520 521 base = self.base(rev)
521 522 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
522 523 base = self.cache[1]
523 524 text = self.cache[2]
524 525 else:
525 526 text = self.revision(self.node(base))
526 527
527 528 l = len(text)
528 529 for x in xrange(base + 1, rev + 1):
529 530 l = mdiff.patchedsize(l, self.chunk(x))
530 531 return l
531 532 """
532 533
533 534 def length(self, rev):
534 535 if rev == nullrev:
535 536 return 0
536 537 else:
537 538 return self.index[rev][1]
538 539 def base(self, rev):
539 540 if (rev == nullrev):
540 541 return nullrev
541 542 else:
542 543 return self.index[rev][-5]
543 544
544 545 def reachable(self, node, stop=None):
545 546 """return a hash of all nodes ancestral to a given node, including
546 547 the node itself, stopping when stop is matched"""
547 548 reachable = {}
548 549 visit = [node]
549 550 reachable[node] = 1
550 551 if stop:
551 552 stopn = self.rev(stop)
552 553 else:
553 554 stopn = 0
554 555 while visit:
555 556 n = visit.pop(0)
556 557 if n == stop:
557 558 continue
558 559 if n == nullid:
559 560 continue
560 561 for p in self.parents(n):
561 562 if self.rev(p) < stopn:
562 563 continue
563 564 if p not in reachable:
564 565 reachable[p] = 1
565 566 visit.append(p)
566 567 return reachable
567 568
568 569 def nodesbetween(self, roots=None, heads=None):
569 570 """Return a tuple containing three elements. Elements 1 and 2 contain
570 571 a final list bases and heads after all the unreachable ones have been
571 572 pruned. Element 0 contains a topologically sorted list of all
572 573
573 574 nodes that satisfy these constraints:
574 575 1. All nodes must be descended from a node in roots (the nodes on
575 576 roots are considered descended from themselves).
576 577 2. All nodes must also be ancestors of a node in heads (the nodes in
577 578 heads are considered to be their own ancestors).
578 579
579 580 If roots is unspecified, nullid is assumed as the only root.
580 581 If heads is unspecified, it is taken to be the output of the
581 582 heads method (i.e. a list of all nodes in the repository that
582 583 have no children)."""
583 584 nonodes = ([], [], [])
584 585 if roots is not None:
585 586 roots = list(roots)
586 587 if not roots:
587 588 return nonodes
588 589 lowestrev = min([self.rev(n) for n in roots])
589 590 else:
590 591 roots = [nullid] # Everybody's a descendent of nullid
591 592 lowestrev = nullrev
592 593 if (lowestrev == nullrev) and (heads is None):
593 594 # We want _all_ the nodes!
594 595 return ([self.node(r) for r in xrange(0, self.count())],
595 596 [nullid], list(self.heads()))
596 597 if heads is None:
597 598 # All nodes are ancestors, so the latest ancestor is the last
598 599 # node.
599 600 highestrev = self.count() - 1
600 601 # Set ancestors to None to signal that every node is an ancestor.
601 602 ancestors = None
602 603 # Set heads to an empty dictionary for later discovery of heads
603 604 heads = {}
604 605 else:
605 606 heads = list(heads)
606 607 if not heads:
607 608 return nonodes
608 609 ancestors = {}
609 610 # Turn heads into a dictionary so we can remove 'fake' heads.
610 611 # Also, later we will be using it to filter out the heads we can't
611 612 # find from roots.
612 613 heads = dict.fromkeys(heads, 0)
613 614 # Start at the top and keep marking parents until we're done.
614 615 nodestotag = heads.keys()
615 616 # Remember where the top was so we can use it as a limit later.
616 617 highestrev = max([self.rev(n) for n in nodestotag])
617 618 while nodestotag:
618 619 # grab a node to tag
619 620 n = nodestotag.pop()
620 621 # Never tag nullid
621 622 if n == nullid:
622 623 continue
623 624 # A node's revision number represents its place in a
624 625 # topologically sorted list of nodes.
625 626 r = self.rev(n)
626 627 if r >= lowestrev:
627 628 if n not in ancestors:
628 629 # If we are possibly a descendent of one of the roots
629 630 # and we haven't already been marked as an ancestor
630 631 ancestors[n] = 1 # Mark as ancestor
631 632 # Add non-nullid parents to list of nodes to tag.
632 633 nodestotag.extend([p for p in self.parents(n) if
633 634 p != nullid])
634 635 elif n in heads: # We've seen it before, is it a fake head?
635 636 # So it is, real heads should not be the ancestors of
636 637 # any other heads.
637 638 heads.pop(n)
638 639 if not ancestors:
639 640 return nonodes
640 641 # Now that we have our set of ancestors, we want to remove any
641 642 # roots that are not ancestors.
642 643
643 644 # If one of the roots was nullid, everything is included anyway.
644 645 if lowestrev > nullrev:
645 646 # But, since we weren't, let's recompute the lowest rev to not
646 647 # include roots that aren't ancestors.
647 648
648 649 # Filter out roots that aren't ancestors of heads
649 650 roots = [n for n in roots if n in ancestors]
650 651 # Recompute the lowest revision
651 652 if roots:
652 653 lowestrev = min([self.rev(n) for n in roots])
653 654 else:
654 655 # No more roots? Return empty list
655 656 return nonodes
656 657 else:
657 658 # We are descending from nullid, and don't need to care about
658 659 # any other roots.
659 660 lowestrev = nullrev
660 661 roots = [nullid]
661 662 # Transform our roots list into a 'set' (i.e. a dictionary where the
662 663 # values don't matter.
663 664 descendents = dict.fromkeys(roots, 1)
664 665 # Also, keep the original roots so we can filter out roots that aren't
665 666 # 'real' roots (i.e. are descended from other roots).
666 667 roots = descendents.copy()
667 668 # Our topologically sorted list of output nodes.
668 669 orderedout = []
669 670 # Don't start at nullid since we don't want nullid in our output list,
670 671 # and if nullid shows up in descedents, empty parents will look like
671 672 # they're descendents.
672 673 for r in xrange(max(lowestrev, 0), highestrev + 1):
673 674 n = self.node(r)
674 675 isdescendent = False
675 676 if lowestrev == nullrev: # Everybody is a descendent of nullid
676 677 isdescendent = True
677 678 elif n in descendents:
678 679 # n is already a descendent
679 680 isdescendent = True
680 681 # This check only needs to be done here because all the roots
681 682 # will start being marked is descendents before the loop.
682 683 if n in roots:
683 684 # If n was a root, check if it's a 'real' root.
684 685 p = tuple(self.parents(n))
685 686 # If any of its parents are descendents, it's not a root.
686 687 if (p[0] in descendents) or (p[1] in descendents):
687 688 roots.pop(n)
688 689 else:
689 690 p = tuple(self.parents(n))
690 691 # A node is a descendent if either of its parents are
691 692 # descendents. (We seeded the dependents list with the roots
692 693 # up there, remember?)
693 694 if (p[0] in descendents) or (p[1] in descendents):
694 695 descendents[n] = 1
695 696 isdescendent = True
696 697 if isdescendent and ((ancestors is None) or (n in ancestors)):
697 698 # Only include nodes that are both descendents and ancestors.
698 699 orderedout.append(n)
699 700 if (ancestors is not None) and (n in heads):
700 701 # We're trying to figure out which heads are reachable
701 702 # from roots.
702 703 # Mark this head as having been reached
703 704 heads[n] = 1
704 705 elif ancestors is None:
705 706 # Otherwise, we're trying to discover the heads.
706 707 # Assume this is a head because if it isn't, the next step
707 708 # will eventually remove it.
708 709 heads[n] = 1
709 710 # But, obviously its parents aren't.
710 711 for p in self.parents(n):
711 712 heads.pop(p, None)
712 713 heads = [n for n in heads.iterkeys() if heads[n] != 0]
713 714 roots = roots.keys()
714 715 assert orderedout
715 716 assert roots
716 717 assert heads
717 718 return (orderedout, roots, heads)
718 719
719 720 def heads(self, start=None, stop=None):
720 721 """return the list of all nodes that have no children
721 722
722 723 if start is specified, only heads that are descendants of
723 724 start will be returned
724 725 if stop is specified, it will consider all the revs from stop
725 726 as if they had no children
726 727 """
727 728 if start is None:
728 729 start = nullid
729 730 if stop is None:
730 731 stop = []
731 732 stoprevs = dict.fromkeys([self.rev(n) for n in stop])
732 733 startrev = self.rev(start)
733 734 reachable = {startrev: 1}
734 735 heads = {startrev: 1}
735 736
736 737 parentrevs = self.parentrevs
737 738 for r in xrange(startrev + 1, self.count()):
738 739 for p in parentrevs(r):
739 740 if p in reachable:
740 741 if r not in stoprevs:
741 742 reachable[r] = 1
742 743 heads[r] = 1
743 744 if p in heads and p not in stoprevs:
744 745 del heads[p]
745 746
746 747 return [self.node(r) for r in heads]
747 748
748 749 def children(self, node):
749 750 """find the children of a given node"""
750 751 c = []
751 752 p = self.rev(node)
752 753 for r in range(p + 1, self.count()):
753 754 for pr in self.parentrevs(r):
754 755 if pr == p:
755 756 c.append(self.node(r))
756 757 return c
757 758
758 759 def _match(self, id):
759 760 if isinstance(id, (long, int)):
760 761 # rev
761 762 return self.node(id)
762 763 if len(id) == 20:
763 764 # possibly a binary node
764 765 # odds of a binary node being all hex in ASCII are 1 in 10**25
765 766 try:
766 767 node = id
767 768 r = self.rev(node) # quick search the index
768 769 return node
769 except RevlogError:
770 except LookupError:
770 771 pass # may be partial hex id
771 772 try:
772 773 # str(rev)
773 774 rev = int(id)
774 775 if str(rev) != id: raise ValueError
775 776 if rev < 0: rev = self.count() + rev
776 777 if rev < 0 or rev >= self.count(): raise ValueError
777 778 return self.node(rev)
778 779 except (ValueError, OverflowError):
779 780 pass
780 781 if len(id) == 40:
781 782 try:
782 783 # a full hex nodeid?
783 784 node = bin(id)
784 785 r = self.rev(node)
785 786 return node
786 787 except TypeError:
787 788 pass
788 789
789 790 def _partialmatch(self, id):
790 791 if len(id) < 40:
791 792 try:
792 793 # hex(node)[:...]
793 794 bin_id = bin(id[:len(id) & ~1]) # grab an even number of digits
794 795 node = None
795 796 for n in self.nodemap:
796 797 if n.startswith(bin_id) and hex(n).startswith(id):
797 798 if node is not None:
798 raise RevlogError(_("Ambiguous identifier"))
799 raise LookupError(_("Ambiguous identifier"))
799 800 node = n
800 801 if node is not None:
801 802 return node
802 803 except TypeError:
803 804 pass
804 805
805 806 def lookup(self, id):
806 807 """locate a node based on:
807 808 - revision number or str(revision number)
808 809 - nodeid or subset of hex nodeid
809 810 """
810 811
811 812 n = self._match(id)
812 813 if n is not None:
813 814 return n
814 815 n = self._partialmatch(id)
815 816 if n:
816 817 return n
817 818
818 raise RevlogError(_("No match found"))
819 raise LookupError(_("No match found"))
819 820
820 821 def cmp(self, node, text):
821 822 """compare text with a given file revision"""
822 823 p1, p2 = self.parents(node)
823 824 return hash(text, p1, p2) != node
824 825
825 826 def makenode(self, node, text):
826 827 """calculate a file nodeid for text, descended or possibly
827 828 unchanged from node"""
828 829
829 830 if self.cmp(node, text):
830 831 return hash(text, node, nullid)
831 832 return node
832 833
833 834 def diff(self, a, b):
834 835 """return a delta between two revisions"""
835 836 return mdiff.textdiff(a, b)
836 837
837 838 def patches(self, t, pl):
838 839 """apply a list of patches to a string"""
839 840 return mdiff.patches(t, pl)
840 841
841 842 def chunk(self, rev, df=None, cachelen=4096):
842 843 start, length = self.start(rev), self.length(rev)
843 844 inline = self.inlinedata()
844 845 if inline:
845 846 start += (rev + 1) * struct.calcsize(self.indexformat)
846 847 end = start + length
847 848 def loadcache(df):
848 849 cache_length = max(cachelen, length) # 4k
849 850 if not df:
850 851 if inline:
851 852 df = self.opener(self.indexfile)
852 853 else:
853 854 df = self.opener(self.datafile)
854 855 df.seek(start)
855 856 self.chunkcache = (start, df.read(cache_length))
856 857
857 858 if not self.chunkcache:
858 859 loadcache(df)
859 860
860 861 cache_start = self.chunkcache[0]
861 862 cache_end = cache_start + len(self.chunkcache[1])
862 863 if start >= cache_start and end <= cache_end:
863 864 # it is cached
864 865 offset = start - cache_start
865 866 else:
866 867 loadcache(df)
867 868 offset = 0
868 869
869 870 #def checkchunk():
870 871 # df = self.opener(self.datafile)
871 872 # df.seek(start)
872 873 # return df.read(length)
873 874 #assert s == checkchunk()
874 875 return decompress(self.chunkcache[1][offset:offset + length])
875 876
876 877 def delta(self, node):
877 878 """return or calculate a delta between a node and its predecessor"""
878 879 r = self.rev(node)
879 880 return self.revdiff(r - 1, r)
880 881
881 882 def revdiff(self, rev1, rev2):
882 883 """return or calculate a delta between two revisions"""
883 884 b1 = self.base(rev1)
884 885 b2 = self.base(rev2)
885 886 if b1 == b2 and rev1 + 1 == rev2:
886 887 return self.chunk(rev2)
887 888 else:
888 889 return self.diff(self.revision(self.node(rev1)),
889 890 self.revision(self.node(rev2)))
890 891
891 892 def revision(self, node):
892 893 """return an uncompressed revision of a given"""
893 894 if node == nullid: return ""
894 895 if self.cache and self.cache[0] == node: return self.cache[2]
895 896
896 897 # look up what we need to read
897 898 text = None
898 899 rev = self.rev(node)
899 900 base = self.base(rev)
900 901
901 902 if self.inlinedata():
902 903 # we probably have the whole chunk cached
903 904 df = None
904 905 else:
905 906 df = self.opener(self.datafile)
906 907
907 908 # do we have useful data cached?
908 909 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
909 910 base = self.cache[1]
910 911 text = self.cache[2]
911 912 self.loadindex(base, rev + 1)
912 913 else:
913 914 self.loadindex(base, rev + 1)
914 915 text = self.chunk(base, df=df)
915 916
916 917 bins = []
917 918 for r in xrange(base + 1, rev + 1):
918 919 bins.append(self.chunk(r, df=df))
919 920
920 921 text = self.patches(text, bins)
921 922
922 923 p1, p2 = self.parents(node)
923 924 if node != hash(text, p1, p2):
924 925 raise RevlogError(_("integrity check failed on %s:%d")
925 926 % (self.datafile, rev))
926 927
927 928 self.cache = (node, rev, text)
928 929 return text
929 930
930 931 def checkinlinesize(self, tr, fp=None):
931 932 if not self.inlinedata():
932 933 return
933 934 if not fp:
934 935 fp = self.opener(self.indexfile, 'r')
935 936 fp.seek(0, 2)
936 937 size = fp.tell()
937 938 if size < 131072:
938 939 return
939 940 trinfo = tr.find(self.indexfile)
940 941 if trinfo == None:
941 942 raise RevlogError(_("%s not found in the transaction")
942 943 % self.indexfile)
943 944
944 945 trindex = trinfo[2]
945 946 dataoff = self.start(trindex)
946 947
947 948 tr.add(self.datafile, dataoff)
948 949 df = self.opener(self.datafile, 'w')
949 950 calc = struct.calcsize(self.indexformat)
950 951 for r in xrange(self.count()):
951 952 start = self.start(r) + (r + 1) * calc
952 953 length = self.length(r)
953 954 fp.seek(start)
954 955 d = fp.read(length)
955 956 df.write(d)
956 957 fp.close()
957 958 df.close()
958 959 fp = self.opener(self.indexfile, 'w', atomictemp=True)
959 960 self.version &= ~(REVLOGNGINLINEDATA)
960 961 if self.count():
961 962 x = self.index[0]
962 963 e = struct.pack(self.indexformat, *x)[4:]
963 964 l = struct.pack(versionformat, self.version)
964 965 fp.write(l)
965 966 fp.write(e)
966 967
967 968 for i in xrange(1, self.count()):
968 969 x = self.index[i]
969 970 e = struct.pack(self.indexformat, *x)
970 971 fp.write(e)
971 972
972 973 # if we don't call rename, the temp file will never replace the
973 974 # real index
974 975 fp.rename()
975 976
976 977 tr.replace(self.indexfile, trindex * calc)
977 978 self.chunkcache = None
978 979
979 980 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
980 981 """add a revision to the log
981 982
982 983 text - the revision data to add
983 984 transaction - the transaction object used for rollback
984 985 link - the linkrev data to add
985 986 p1, p2 - the parent nodeids of the revision
986 987 d - an optional precomputed delta
987 988 """
988 989 if not self.inlinedata():
989 990 dfh = self.opener(self.datafile, "a")
990 991 else:
991 992 dfh = None
992 993 ifh = self.opener(self.indexfile, "a+")
993 994 return self._addrevision(text, transaction, link, p1, p2, d, ifh, dfh)
994 995
995 996 def _addrevision(self, text, transaction, link, p1, p2, d, ifh, dfh):
996 997 if text is None: text = ""
997 998 if p1 is None: p1 = self.tip()
998 999 if p2 is None: p2 = nullid
999 1000
1000 1001 node = hash(text, p1, p2)
1001 1002
1002 1003 if node in self.nodemap:
1003 1004 return node
1004 1005
1005 1006 n = self.count()
1006 1007 t = n - 1
1007 1008
1008 1009 if n:
1009 1010 base = self.base(t)
1010 1011 start = self.start(base)
1011 1012 end = self.end(t)
1012 1013 if not d:
1013 1014 prev = self.revision(self.tip())
1014 1015 d = self.diff(prev, text)
1015 1016 data = compress(d)
1016 1017 l = len(data[1]) + len(data[0])
1017 1018 dist = end - start + l
1018 1019
1019 1020 # full versions are inserted when the needed deltas
1020 1021 # become comparable to the uncompressed text
1021 1022 if not n or dist > len(text) * 2:
1022 1023 data = compress(text)
1023 1024 l = len(data[1]) + len(data[0])
1024 1025 base = n
1025 1026 else:
1026 1027 base = self.base(t)
1027 1028
1028 1029 offset = 0
1029 1030 if t >= 0:
1030 1031 offset = self.end(t)
1031 1032
1032 1033 if self.version == REVLOGV0:
1033 1034 e = (offset, l, base, link, p1, p2, node)
1034 1035 else:
1035 1036 e = (self.offset_type(offset, 0), l, len(text),
1036 1037 base, link, self.rev(p1), self.rev(p2), node)
1037 1038
1038 1039 self.index.append(e)
1039 1040 self.nodemap[node] = n
1040 1041 entry = struct.pack(self.indexformat, *e)
1041 1042
1042 1043 if not self.inlinedata():
1043 1044 transaction.add(self.datafile, offset)
1044 1045 transaction.add(self.indexfile, n * len(entry))
1045 1046 if data[0]:
1046 1047 dfh.write(data[0])
1047 1048 dfh.write(data[1])
1048 1049 dfh.flush()
1049 1050 else:
1050 1051 ifh.seek(0, 2)
1051 1052 transaction.add(self.indexfile, ifh.tell(), self.count() - 1)
1052 1053
1053 1054 if len(self.index) == 1 and self.version != REVLOGV0:
1054 1055 l = struct.pack(versionformat, self.version)
1055 1056 ifh.write(l)
1056 1057 entry = entry[4:]
1057 1058
1058 1059 ifh.write(entry)
1059 1060
1060 1061 if self.inlinedata():
1061 1062 ifh.write(data[0])
1062 1063 ifh.write(data[1])
1063 1064 self.checkinlinesize(transaction, ifh)
1064 1065
1065 1066 self.cache = (node, n, text)
1066 1067 return node
1067 1068
1068 1069 def ancestor(self, a, b):
1069 1070 """calculate the least common ancestor of nodes a and b"""
1070 1071
1071 1072 def parents(rev):
1072 1073 return [p for p in self.parentrevs(rev) if p != nullrev]
1073 1074
1074 1075 c = ancestor.ancestor(self.rev(a), self.rev(b), parents)
1075 1076 if c is None:
1076 1077 return nullid
1077 1078
1078 1079 return self.node(c)
1079 1080
1080 1081 def group(self, nodelist, lookup, infocollect=None):
1081 1082 """calculate a delta group
1082 1083
1083 1084 Given a list of changeset revs, return a set of deltas and
1084 1085 metadata corresponding to nodes. the first delta is
1085 1086 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
1086 1087 have this parent as it has all history before these
1087 1088 changesets. parent is parent[0]
1088 1089 """
1089 1090 revs = [self.rev(n) for n in nodelist]
1090 1091
1091 1092 # if we don't have any revisions touched by these changesets, bail
1092 1093 if not revs:
1093 1094 yield changegroup.closechunk()
1094 1095 return
1095 1096
1096 1097 # add the parent of the first rev
1097 1098 p = self.parents(self.node(revs[0]))[0]
1098 1099 revs.insert(0, self.rev(p))
1099 1100
1100 1101 # build deltas
1101 1102 for d in xrange(0, len(revs) - 1):
1102 1103 a, b = revs[d], revs[d + 1]
1103 1104 nb = self.node(b)
1104 1105
1105 1106 if infocollect is not None:
1106 1107 infocollect(nb)
1107 1108
1108 1109 d = self.revdiff(a, b)
1109 1110 p = self.parents(nb)
1110 1111 meta = nb + p[0] + p[1] + lookup(nb)
1111 1112 yield changegroup.genchunk("%s%s" % (meta, d))
1112 1113
1113 1114 yield changegroup.closechunk()
1114 1115
1115 1116 def addgroup(self, revs, linkmapper, transaction, unique=0):
1116 1117 """
1117 1118 add a delta group
1118 1119
1119 1120 given a set of deltas, add them to the revision log. the
1120 1121 first delta is against its parent, which should be in our
1121 1122 log, the rest are against the previous delta.
1122 1123 """
1123 1124
1124 1125 #track the base of the current delta log
1125 1126 r = self.count()
1126 1127 t = r - 1
1127 1128 node = None
1128 1129
1129 1130 base = prev = nullrev
1130 1131 start = end = textlen = 0
1131 1132 if r:
1132 1133 end = self.end(t)
1133 1134
1134 1135 ifh = self.opener(self.indexfile, "a+")
1135 1136 ifh.seek(0, 2)
1136 1137 transaction.add(self.indexfile, ifh.tell(), self.count())
1137 1138 if self.inlinedata():
1138 1139 dfh = None
1139 1140 else:
1140 1141 transaction.add(self.datafile, end)
1141 1142 dfh = self.opener(self.datafile, "a")
1142 1143
1143 1144 # loop through our set of deltas
1144 1145 chain = None
1145 1146 for chunk in revs:
1146 1147 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
1147 1148 link = linkmapper(cs)
1148 1149 if node in self.nodemap:
1149 1150 # this can happen if two branches make the same change
1150 1151 # if unique:
1151 1152 # raise RevlogError(_("already have %s") % hex(node[:4]))
1152 1153 chain = node
1153 1154 continue
1154 1155 delta = chunk[80:]
1155 1156
1156 1157 for p in (p1, p2):
1157 1158 if not p in self.nodemap:
1158 raise RevlogError(_("unknown parent %s") % short(p))
1159 raise LookupError(_("unknown parent %s") % short(p))
1159 1160
1160 1161 if not chain:
1161 1162 # retrieve the parent revision of the delta chain
1162 1163 chain = p1
1163 1164 if not chain in self.nodemap:
1164 raise RevlogError(_("unknown base %s") % short(chain[:4]))
1165 raise LookupError(_("unknown base %s") % short(chain[:4]))
1165 1166
1166 1167 # full versions are inserted when the needed deltas become
1167 1168 # comparable to the uncompressed text or when the previous
1168 1169 # version is not the one we have a delta against. We use
1169 1170 # the size of the previous full rev as a proxy for the
1170 1171 # current size.
1171 1172
1172 1173 if chain == prev:
1173 1174 tempd = compress(delta)
1174 1175 cdelta = tempd[0] + tempd[1]
1175 1176 textlen = mdiff.patchedsize(textlen, delta)
1176 1177
1177 1178 if chain != prev or (end - start + len(cdelta)) > textlen * 2:
1178 1179 # flush our writes here so we can read it in revision
1179 1180 if dfh:
1180 1181 dfh.flush()
1181 1182 ifh.flush()
1182 1183 text = self.revision(chain)
1183 1184 text = self.patches(text, [delta])
1184 1185 chk = self._addrevision(text, transaction, link, p1, p2, None,
1185 1186 ifh, dfh)
1186 1187 if not dfh and not self.inlinedata():
1187 1188 # addrevision switched from inline to conventional
1188 1189 # reopen the index
1189 1190 dfh = self.opener(self.datafile, "a")
1190 1191 ifh = self.opener(self.indexfile, "a")
1191 1192 if chk != node:
1192 1193 raise RevlogError(_("consistency error adding group"))
1193 1194 textlen = len(text)
1194 1195 else:
1195 1196 if self.version == REVLOGV0:
1196 1197 e = (end, len(cdelta), base, link, p1, p2, node)
1197 1198 else:
1198 1199 e = (self.offset_type(end, 0), len(cdelta), textlen, base,
1199 1200 link, self.rev(p1), self.rev(p2), node)
1200 1201 self.index.append(e)
1201 1202 self.nodemap[node] = r
1202 1203 if self.inlinedata():
1203 1204 ifh.write(struct.pack(self.indexformat, *e))
1204 1205 ifh.write(cdelta)
1205 1206 self.checkinlinesize(transaction, ifh)
1206 1207 if not self.inlinedata():
1207 1208 dfh = self.opener(self.datafile, "a")
1208 1209 ifh = self.opener(self.indexfile, "a")
1209 1210 else:
1210 1211 dfh.write(cdelta)
1211 1212 ifh.write(struct.pack(self.indexformat, *e))
1212 1213
1213 1214 t, r, chain, prev = r, r + 1, node, node
1214 1215 base = self.base(t)
1215 1216 start = self.start(base)
1216 1217 end = self.end(t)
1217 1218
1218 1219 return node
1219 1220
1220 1221 def strip(self, rev, minlink):
1221 1222 if self.count() == 0 or rev >= self.count():
1222 1223 return
1223 1224
1224 1225 if isinstance(self.index, lazyindex):
1225 1226 self.loadindexmap()
1226 1227
1227 1228 # When stripping away a revision, we need to make sure it
1228 1229 # does not actually belong to an older changeset.
1229 1230 # The minlink parameter defines the oldest revision
1230 1231 # we're allowed to strip away.
1231 1232 while minlink > self.index[rev][-4]:
1232 1233 rev += 1
1233 1234 if rev >= self.count():
1234 1235 return
1235 1236
1236 1237 # first truncate the files on disk
1237 1238 end = self.start(rev)
1238 1239 if not self.inlinedata():
1239 1240 df = self.opener(self.datafile, "a")
1240 1241 df.truncate(end)
1241 1242 end = rev * struct.calcsize(self.indexformat)
1242 1243 else:
1243 1244 end += rev * struct.calcsize(self.indexformat)
1244 1245
1245 1246 indexf = self.opener(self.indexfile, "a")
1246 1247 indexf.truncate(end)
1247 1248
1248 1249 # then reset internal state in memory to forget those revisions
1249 1250 self.cache = None
1250 1251 self.chunkcache = None
1251 1252 for x in xrange(rev, self.count()):
1252 1253 del self.nodemap[self.node(x)]
1253 1254
1254 1255 del self.index[rev:]
1255 1256
1256 1257 def checksize(self):
1257 1258 expected = 0
1258 1259 if self.count():
1259 1260 expected = self.end(self.count() - 1)
1260 1261
1261 1262 try:
1262 1263 f = self.opener(self.datafile)
1263 1264 f.seek(0, 2)
1264 1265 actual = f.tell()
1265 1266 dd = actual - expected
1266 1267 except IOError, inst:
1267 1268 if inst.errno != errno.ENOENT:
1268 1269 raise
1269 1270 dd = 0
1270 1271
1271 1272 try:
1272 1273 f = self.opener(self.indexfile)
1273 1274 f.seek(0, 2)
1274 1275 actual = f.tell()
1275 1276 s = struct.calcsize(self.indexformat)
1276 1277 i = actual / s
1277 1278 di = actual - (i * s)
1278 1279 if self.inlinedata():
1279 1280 databytes = 0
1280 1281 for r in xrange(self.count()):
1281 1282 databytes += self.length(r)
1282 1283 dd = 0
1283 1284 di = actual - self.count() * s - databytes
1284 1285 except IOError, inst:
1285 1286 if inst.errno != errno.ENOENT:
1286 1287 raise
1287 1288 di = 0
1288 1289
1289 1290 return (dd, di)
1290 1291
1291 1292
General Comments 0
You need to be logged in to leave comments. Login now