##// END OF EJS Templates
revlog: simplify revlog version handling...
Matt Mackall -
r4258:b11a2fb5 default
parent child Browse files
Show More
@@ -1,161 +1,161
1 1 # appendfile.py - special classes to make repo updates atomic
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import cStringIO, changelog, errno, manifest, os, tempfile, util
9 9
10 10 # writes to metadata files are ordered. reads: changelog, manifest,
11 11 # normal files. writes: normal files, manifest, changelog.
12 12
13 13 # manifest contains pointers to offsets in normal files. changelog
14 14 # contains pointers to offsets in manifest. if reader reads old
15 15 # changelog while manifest or normal files are written, it has no
16 16 # pointers into new parts of those files that are maybe not consistent
17 17 # yet, so will not read them.
18 18
19 19 # localrepo.addchangegroup thinks it writes changelog first, then
20 20 # manifest, then normal files (this is order they are available, and
21 21 # needed for computing linkrev fields), but uses appendfile to hide
22 22 # updates from readers. data not written to manifest or changelog
23 23 # until all normal files updated. write manifest first, then
24 24 # changelog.
25 25
26 26 # with this write ordering, readers cannot see inconsistent view of
27 27 # repo during update.
28 28
29 29 class appendfile(object):
30 30 '''implement enough of file protocol to append to revlog file.
31 31 appended data is written to temp file. reads and seeks span real
32 32 file and temp file. readers cannot see appended data until
33 33 writedata called.'''
34 34
35 35 def __init__(self, fp, tmpname):
36 36 if tmpname:
37 37 self.tmpname = tmpname
38 38 self.tmpfp = util.posixfile(self.tmpname, 'ab+')
39 39 else:
40 40 fd, self.tmpname = tempfile.mkstemp(prefix="hg-appendfile-")
41 41 os.close(fd)
42 42 self.tmpfp = util.posixfile(self.tmpname, 'ab+')
43 43 self.realfp = fp
44 44 self.offset = fp.tell()
45 45 # real file is not written by anyone else. cache its size so
46 46 # seek and read can be fast.
47 47 self.realsize = util.fstat(fp).st_size
48 48 self.name = fp.name
49 49
50 50 def end(self):
51 51 self.tmpfp.flush() # make sure the stat is correct
52 52 return self.realsize + util.fstat(self.tmpfp).st_size
53 53
54 54 def tell(self):
55 55 return self.offset
56 56
57 57 def flush(self):
58 58 self.tmpfp.flush()
59 59
60 60 def close(self):
61 61 self.realfp.close()
62 62 self.tmpfp.close()
63 63
64 64 def seek(self, offset, whence=0):
65 65 '''virtual file offset spans real file and temp file.'''
66 66 if whence == 0:
67 67 self.offset = offset
68 68 elif whence == 1:
69 69 self.offset += offset
70 70 elif whence == 2:
71 71 self.offset = self.end() + offset
72 72
73 73 if self.offset < self.realsize:
74 74 self.realfp.seek(self.offset)
75 75 else:
76 76 self.tmpfp.seek(self.offset - self.realsize)
77 77
78 78 def read(self, count=-1):
79 79 '''only trick here is reads that span real file and temp file.'''
80 80 fp = cStringIO.StringIO()
81 81 old_offset = self.offset
82 82 if self.offset < self.realsize:
83 83 s = self.realfp.read(count)
84 84 fp.write(s)
85 85 self.offset += len(s)
86 86 if count > 0:
87 87 count -= len(s)
88 88 if count != 0:
89 89 if old_offset != self.offset:
90 90 self.tmpfp.seek(self.offset - self.realsize)
91 91 s = self.tmpfp.read(count)
92 92 fp.write(s)
93 93 self.offset += len(s)
94 94 return fp.getvalue()
95 95
96 96 def write(self, s):
97 97 '''append to temp file.'''
98 98 self.tmpfp.seek(0, 2)
99 99 self.tmpfp.write(s)
100 100 # all writes are appends, so offset must go to end of file.
101 101 self.offset = self.realsize + self.tmpfp.tell()
102 102
103 103 class appendopener(object):
104 104 '''special opener for files that only read or append.'''
105 105
106 106 def __init__(self, opener):
107 107 self.realopener = opener
108 108 # key: file name, value: appendfile name
109 109 self.tmpnames = {}
110 110
111 111 def __call__(self, name, mode='r'):
112 112 '''open file.'''
113 113
114 114 assert mode in 'ra+'
115 115 try:
116 116 realfp = self.realopener(name, 'r')
117 117 except IOError, err:
118 118 if err.errno != errno.ENOENT: raise
119 119 realfp = self.realopener(name, 'w+')
120 120 tmpname = self.tmpnames.get(name)
121 121 fp = appendfile(realfp, tmpname)
122 122 if tmpname is None:
123 123 self.tmpnames[name] = fp.tmpname
124 124 return fp
125 125
126 126 def writedata(self):
127 127 '''copy data from temp files to real files.'''
128 128 # write .d file before .i file.
129 129 tmpnames = self.tmpnames.items()
130 130 tmpnames.sort()
131 131 for name, tmpname in tmpnames:
132 132 ifp = open(tmpname, 'rb')
133 133 ofp = self.realopener(name, 'a')
134 134 for chunk in util.filechunkiter(ifp):
135 135 ofp.write(chunk)
136 136 ifp.close()
137 137 os.unlink(tmpname)
138 138 del self.tmpnames[name]
139 139 ofp.close()
140 140
141 141 def cleanup(self):
142 142 '''delete temp files (this discards unwritten data!)'''
143 143 for tmpname in self.tmpnames.values():
144 144 os.unlink(tmpname)
145 145
146 146 # files for changelog and manifest are in different appendopeners, so
147 147 # not mixed up together.
148 148
149 149 class appendchangelog(changelog.changelog, appendopener):
150 def __init__(self, opener, version):
150 def __init__(self, opener):
151 151 appendopener.__init__(self, opener)
152 changelog.changelog.__init__(self, self, version)
152 changelog.changelog.__init__(self, self)
153 153 def checkinlinesize(self, fp, tr):
154 154 return
155 155
156 156 class appendmanifest(manifest.manifest, appendopener):
157 def __init__(self, opener, version):
157 def __init__(self, opener):
158 158 appendopener.__init__(self, opener)
159 manifest.manifest.__init__(self, self, version)
159 manifest.manifest.__init__(self, self)
160 160 def checkinlinesize(self, fp, tr):
161 161 return
@@ -1,105 +1,105
1 1 # changelog.py - changelog class for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from revlog import *
9 9 from i18n import _
10 10 import os, time, util
11 11
12 12 def _string_escape(text):
13 13 """
14 14 >>> d = {'nl': chr(10), 'bs': chr(92), 'cr': chr(13), 'nul': chr(0)}
15 15 >>> s = "ab%(nl)scd%(bs)s%(bs)sn%(nul)sab%(cr)scd%(bs)s%(nl)s" % d
16 16 >>> s
17 17 'ab\\ncd\\\\\\\\n\\x00ab\\rcd\\\\\\n'
18 18 >>> res = _string_escape(s)
19 19 >>> s == _string_unescape(res)
20 20 True
21 21 """
22 22 # subset of the string_escape codec
23 23 text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r')
24 24 return text.replace('\0', '\\0')
25 25
26 26 def _string_unescape(text):
27 27 return text.decode('string_escape')
28 28
29 29 class changelog(revlog):
30 def __init__(self, opener, defversion=REVLOGV0):
31 revlog.__init__(self, opener, "00changelog.i", defversion)
30 def __init__(self, opener):
31 revlog.__init__(self, opener, "00changelog.i")
32 32
33 33 def decode_extra(self, text):
34 34 extra = {}
35 35 for l in text.split('\0'):
36 36 if not l:
37 37 continue
38 38 k, v = _string_unescape(l).split(':', 1)
39 39 extra[k] = v
40 40 return extra
41 41
42 42 def encode_extra(self, d):
43 43 items = [_string_escape(":".join(t)) for t in d.iteritems()]
44 44 return "\0".join(items)
45 45
46 46 def extract(self, text):
47 47 """
48 48 format used:
49 49 nodeid\n : manifest node in ascii
50 50 user\n : user, no \n or \r allowed
51 51 time tz extra\n : date (time is int or float, timezone is int)
52 52 : extra is metadatas, encoded and separated by '\0'
53 53 : older versions ignore it
54 54 files\n\n : files modified by the cset, no \n or \r allowed
55 55 (.*) : comment (free text, ideally utf-8)
56 56
57 57 changelog v0 doesn't use extra
58 58 """
59 59 if not text:
60 60 return (nullid, "", (0, 0), [], "", {'branch': 'default'})
61 61 last = text.index("\n\n")
62 62 desc = util.tolocal(text[last + 2:])
63 63 l = text[:last].split('\n')
64 64 manifest = bin(l[0])
65 65 user = util.tolocal(l[1])
66 66
67 67 extra_data = l[2].split(' ', 2)
68 68 if len(extra_data) != 3:
69 69 time = float(extra_data.pop(0))
70 70 try:
71 71 # various tools did silly things with the time zone field.
72 72 timezone = int(extra_data[0])
73 73 except:
74 74 timezone = 0
75 75 extra = {}
76 76 else:
77 77 time, timezone, extra = extra_data
78 78 time, timezone = float(time), int(timezone)
79 79 extra = self.decode_extra(extra)
80 80 if not extra.get('branch'):
81 81 extra['branch'] = 'default'
82 82 files = l[3:]
83 83 return (manifest, user, (time, timezone), files, desc, extra)
84 84
85 85 def read(self, node):
86 86 return self.extract(self.revision(node))
87 87
88 88 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
89 89 user=None, date=None, extra={}):
90 90
91 91 user, desc = util.fromlocal(user), util.fromlocal(desc)
92 92
93 93 if date:
94 94 parseddate = "%d %d" % util.parsedate(date)
95 95 else:
96 96 parseddate = "%d %d" % util.makedate()
97 97 if extra and extra.get("branch") in ("default", ""):
98 98 del extra["branch"]
99 99 if extra:
100 100 extra = self.encode_extra(extra)
101 101 parseddate = "%s %s" % (parseddate, extra)
102 102 list.sort()
103 103 l = [hex(manifest), user, parseddate] + list + ["", desc]
104 104 text = "\n".join(l)
105 105 return self.addrevision(text, transaction, self.count(), p1, p2)
@@ -1,3342 +1,3341
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import demandimport; demandimport.enable()
9 9 from node import *
10 10 from i18n import _
11 11 import bisect, os, re, sys, signal, imp, urllib, pdb, shlex, stat
12 12 import fancyopts, ui, hg, util, lock, revlog, bundlerepo
13 13 import difflib, patch, time, help, mdiff, tempfile
14 14 import traceback, errno, version, atexit, socket
15 15 import archival, changegroup, cmdutil, hgweb.server, sshserver
16 16
17 17 class UnknownCommand(Exception):
18 18 """Exception raised if command is not in the command table."""
19 19 class AmbiguousCommand(Exception):
20 20 """Exception raised if command shortcut matches more than one command."""
21 21
22 22 def bail_if_changed(repo):
23 23 modified, added, removed, deleted = repo.status()[:4]
24 24 if modified or added or removed or deleted:
25 25 raise util.Abort(_("outstanding uncommitted changes"))
26 26
27 27 def logmessage(opts):
28 28 """ get the log message according to -m and -l option """
29 29 message = opts['message']
30 30 logfile = opts['logfile']
31 31
32 32 if message and logfile:
33 33 raise util.Abort(_('options --message and --logfile are mutually '
34 34 'exclusive'))
35 35 if not message and logfile:
36 36 try:
37 37 if logfile == '-':
38 38 message = sys.stdin.read()
39 39 else:
40 40 message = open(logfile).read()
41 41 except IOError, inst:
42 42 raise util.Abort(_("can't read commit message '%s': %s") %
43 43 (logfile, inst.strerror))
44 44 return message
45 45
46 46 def setremoteconfig(ui, opts):
47 47 "copy remote options to ui tree"
48 48 if opts.get('ssh'):
49 49 ui.setconfig("ui", "ssh", opts['ssh'])
50 50 if opts.get('remotecmd'):
51 51 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
52 52
53 53 # Commands start here, listed alphabetically
54 54
55 55 def add(ui, repo, *pats, **opts):
56 56 """add the specified files on the next commit
57 57
58 58 Schedule files to be version controlled and added to the repository.
59 59
60 60 The files will be added to the repository at the next commit. To
61 61 undo an add before that, see hg revert.
62 62
63 63 If no names are given, add all files in the repository.
64 64 """
65 65
66 66 names = []
67 67 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
68 68 if exact:
69 69 if ui.verbose:
70 70 ui.status(_('adding %s\n') % rel)
71 71 names.append(abs)
72 72 elif repo.dirstate.state(abs) == '?':
73 73 ui.status(_('adding %s\n') % rel)
74 74 names.append(abs)
75 75 if not opts.get('dry_run'):
76 76 repo.add(names)
77 77
78 78 def addremove(ui, repo, *pats, **opts):
79 79 """add all new files, delete all missing files
80 80
81 81 Add all new files and remove all missing files from the repository.
82 82
83 83 New files are ignored if they match any of the patterns in .hgignore. As
84 84 with add, these changes take effect at the next commit.
85 85
86 86 Use the -s option to detect renamed files. With a parameter > 0,
87 87 this compares every removed file with every added file and records
88 88 those similar enough as renames. This option takes a percentage
89 89 between 0 (disabled) and 100 (files must be identical) as its
90 90 parameter. Detecting renamed files this way can be expensive.
91 91 """
92 92 sim = float(opts.get('similarity') or 0)
93 93 if sim < 0 or sim > 100:
94 94 raise util.Abort(_('similarity must be between 0 and 100'))
95 95 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
96 96
97 97 def annotate(ui, repo, *pats, **opts):
98 98 """show changeset information per file line
99 99
100 100 List changes in files, showing the revision id responsible for each line
101 101
102 102 This command is useful to discover who did a change or when a change took
103 103 place.
104 104
105 105 Without the -a option, annotate will avoid processing files it
106 106 detects as binary. With -a, annotate will generate an annotation
107 107 anyway, probably with undesirable results.
108 108 """
109 109 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
110 110
111 111 if not pats:
112 112 raise util.Abort(_('at least one file name or pattern required'))
113 113
114 114 opmap = [['user', lambda x: ui.shortuser(x.user())],
115 115 ['number', lambda x: str(x.rev())],
116 116 ['changeset', lambda x: short(x.node())],
117 117 ['date', getdate], ['follow', lambda x: x.path()]]
118 118 if (not opts['user'] and not opts['changeset'] and not opts['date']
119 119 and not opts['follow']):
120 120 opts['number'] = 1
121 121
122 122 ctx = repo.changectx(opts['rev'])
123 123
124 124 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
125 125 node=ctx.node()):
126 126 fctx = ctx.filectx(abs)
127 127 if not opts['text'] and util.binary(fctx.data()):
128 128 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
129 129 continue
130 130
131 131 lines = fctx.annotate(follow=opts.get('follow'))
132 132 pieces = []
133 133
134 134 for o, f in opmap:
135 135 if opts[o]:
136 136 l = [f(n) for n, dummy in lines]
137 137 if l:
138 138 m = max(map(len, l))
139 139 pieces.append(["%*s" % (m, x) for x in l])
140 140
141 141 if pieces:
142 142 for p, l in zip(zip(*pieces), lines):
143 143 ui.write("%s: %s" % (" ".join(p), l[1]))
144 144
145 145 def archive(ui, repo, dest, **opts):
146 146 '''create unversioned archive of a repository revision
147 147
148 148 By default, the revision used is the parent of the working
149 149 directory; use "-r" to specify a different revision.
150 150
151 151 To specify the type of archive to create, use "-t". Valid
152 152 types are:
153 153
154 154 "files" (default): a directory full of files
155 155 "tar": tar archive, uncompressed
156 156 "tbz2": tar archive, compressed using bzip2
157 157 "tgz": tar archive, compressed using gzip
158 158 "uzip": zip archive, uncompressed
159 159 "zip": zip archive, compressed using deflate
160 160
161 161 The exact name of the destination archive or directory is given
162 162 using a format string; see "hg help export" for details.
163 163
164 164 Each member added to an archive file has a directory prefix
165 165 prepended. Use "-p" to specify a format string for the prefix.
166 166 The default is the basename of the archive, with suffixes removed.
167 167 '''
168 168
169 169 node = repo.changectx(opts['rev']).node()
170 170 dest = cmdutil.make_filename(repo, dest, node)
171 171 if os.path.realpath(dest) == repo.root:
172 172 raise util.Abort(_('repository root cannot be destination'))
173 173 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
174 174 kind = opts.get('type') or 'files'
175 175 prefix = opts['prefix']
176 176 if dest == '-':
177 177 if kind == 'files':
178 178 raise util.Abort(_('cannot archive plain files to stdout'))
179 179 dest = sys.stdout
180 180 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
181 181 prefix = cmdutil.make_filename(repo, prefix, node)
182 182 archival.archive(repo, dest, node, kind, not opts['no_decode'],
183 183 matchfn, prefix)
184 184
185 185 def backout(ui, repo, rev, **opts):
186 186 '''reverse effect of earlier changeset
187 187
188 188 Commit the backed out changes as a new changeset. The new
189 189 changeset is a child of the backed out changeset.
190 190
191 191 If you back out a changeset other than the tip, a new head is
192 192 created. This head is the parent of the working directory. If
193 193 you back out an old changeset, your working directory will appear
194 194 old after the backout. You should merge the backout changeset
195 195 with another head.
196 196
197 197 The --merge option remembers the parent of the working directory
198 198 before starting the backout, then merges the new head with that
199 199 changeset afterwards. This saves you from doing the merge by
200 200 hand. The result of this merge is not committed, as for a normal
201 201 merge.'''
202 202
203 203 bail_if_changed(repo)
204 204 op1, op2 = repo.dirstate.parents()
205 205 if op2 != nullid:
206 206 raise util.Abort(_('outstanding uncommitted merge'))
207 207 node = repo.lookup(rev)
208 208 p1, p2 = repo.changelog.parents(node)
209 209 if p1 == nullid:
210 210 raise util.Abort(_('cannot back out a change with no parents'))
211 211 if p2 != nullid:
212 212 if not opts['parent']:
213 213 raise util.Abort(_('cannot back out a merge changeset without '
214 214 '--parent'))
215 215 p = repo.lookup(opts['parent'])
216 216 if p not in (p1, p2):
217 217 raise util.Abort(_('%s is not a parent of %s') %
218 218 (short(p), short(node)))
219 219 parent = p
220 220 else:
221 221 if opts['parent']:
222 222 raise util.Abort(_('cannot use --parent on non-merge changeset'))
223 223 parent = p1
224 224 hg.clean(repo, node, show_stats=False)
225 225 revert_opts = opts.copy()
226 226 revert_opts['date'] = None
227 227 revert_opts['all'] = True
228 228 revert_opts['rev'] = hex(parent)
229 229 revert(ui, repo, **revert_opts)
230 230 commit_opts = opts.copy()
231 231 commit_opts['addremove'] = False
232 232 if not commit_opts['message'] and not commit_opts['logfile']:
233 233 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
234 234 commit_opts['force_editor'] = True
235 235 commit(ui, repo, **commit_opts)
236 236 def nice(node):
237 237 return '%d:%s' % (repo.changelog.rev(node), short(node))
238 238 ui.status(_('changeset %s backs out changeset %s\n') %
239 239 (nice(repo.changelog.tip()), nice(node)))
240 240 if op1 != node:
241 241 if opts['merge']:
242 242 ui.status(_('merging with changeset %s\n') % nice(op1))
243 243 hg.merge(repo, hex(op1))
244 244 else:
245 245 ui.status(_('the backout changeset is a new head - '
246 246 'do not forget to merge\n'))
247 247 ui.status(_('(use "backout --merge" '
248 248 'if you want to auto-merge)\n'))
249 249
250 250 def branch(ui, repo, label=None, **opts):
251 251 """set or show the current branch name
252 252
253 253 With <name>, set the current branch name. Otherwise, show the
254 254 current branch name.
255 255
256 256 Unless --force is specified, branch will not let you set a
257 257 branch name that shadows an existing branch.
258 258 """
259 259
260 260 if label:
261 261 if not opts.get('force') and label in repo.branchtags():
262 262 if label not in [p.branch() for p in repo.workingctx().parents()]:
263 263 raise util.Abort(_('a branch of the same name already exists'
264 264 ' (use --force to override)'))
265 265 repo.dirstate.setbranch(util.fromlocal(label))
266 266 else:
267 267 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
268 268
269 269 def branches(ui, repo):
270 270 """list repository named branches
271 271
272 272 List the repository's named branches.
273 273 """
274 274 b = repo.branchtags()
275 275 l = [(-repo.changelog.rev(n), n, t) for t, n in b.items()]
276 276 l.sort()
277 277 for r, n, t in l:
278 278 hexfunc = ui.debugflag and hex or short
279 279 if ui.quiet:
280 280 ui.write("%s\n" % t)
281 281 else:
282 282 spaces = " " * (30 - util.locallen(t))
283 283 ui.write("%s%s %s:%s\n" % (t, spaces, -r, hexfunc(n)))
284 284
285 285 def bundle(ui, repo, fname, dest=None, **opts):
286 286 """create a changegroup file
287 287
288 288 Generate a compressed changegroup file collecting changesets not
289 289 found in the other repository.
290 290
291 291 If no destination repository is specified the destination is assumed
292 292 to have all the nodes specified by one or more --base parameters.
293 293
294 294 The bundle file can then be transferred using conventional means and
295 295 applied to another repository with the unbundle or pull command.
296 296 This is useful when direct push and pull are not available or when
297 297 exporting an entire repository is undesirable.
298 298
299 299 Applying bundles preserves all changeset contents including
300 300 permissions, copy/rename information, and revision history.
301 301 """
302 302 revs = opts.get('rev') or None
303 303 if revs:
304 304 revs = [repo.lookup(rev) for rev in revs]
305 305 base = opts.get('base')
306 306 if base:
307 307 if dest:
308 308 raise util.Abort(_("--base is incompatible with specifiying "
309 309 "a destination"))
310 310 base = [repo.lookup(rev) for rev in base]
311 311 # create the right base
312 312 # XXX: nodesbetween / changegroup* should be "fixed" instead
313 313 o = []
314 314 has = {nullid: None}
315 315 for n in base:
316 316 has.update(repo.changelog.reachable(n))
317 317 if revs:
318 318 visit = list(revs)
319 319 else:
320 320 visit = repo.changelog.heads()
321 321 seen = {}
322 322 while visit:
323 323 n = visit.pop(0)
324 324 parents = [p for p in repo.changelog.parents(n) if p not in has]
325 325 if len(parents) == 0:
326 326 o.insert(0, n)
327 327 else:
328 328 for p in parents:
329 329 if p not in seen:
330 330 seen[p] = 1
331 331 visit.append(p)
332 332 else:
333 333 setremoteconfig(ui, opts)
334 334 dest = ui.expandpath(dest or 'default-push', dest or 'default')
335 335 other = hg.repository(ui, dest)
336 336 o = repo.findoutgoing(other, force=opts['force'])
337 337
338 338 if revs:
339 339 cg = repo.changegroupsubset(o, revs, 'bundle')
340 340 else:
341 341 cg = repo.changegroup(o, 'bundle')
342 342 changegroup.writebundle(cg, fname, "HG10BZ")
343 343
344 344 def cat(ui, repo, file1, *pats, **opts):
345 345 """output the current or given revision of files
346 346
347 347 Print the specified files as they were at the given revision.
348 348 If no revision is given, the parent of the working directory is used,
349 349 or tip if no revision is checked out.
350 350
351 351 Output may be to a file, in which case the name of the file is
352 352 given using a format string. The formatting rules are the same as
353 353 for the export command, with the following additions:
354 354
355 355 %s basename of file being printed
356 356 %d dirname of file being printed, or '.' if in repo root
357 357 %p root-relative path name of file being printed
358 358 """
359 359 ctx = repo.changectx(opts['rev'])
360 360 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
361 361 ctx.node()):
362 362 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
363 363 fp.write(ctx.filectx(abs).data())
364 364
365 365 def clone(ui, source, dest=None, **opts):
366 366 """make a copy of an existing repository
367 367
368 368 Create a copy of an existing repository in a new directory.
369 369
370 370 If no destination directory name is specified, it defaults to the
371 371 basename of the source.
372 372
373 373 The location of the source is added to the new repository's
374 374 .hg/hgrc file, as the default to be used for future pulls.
375 375
376 376 For efficiency, hardlinks are used for cloning whenever the source
377 377 and destination are on the same filesystem (note this applies only
378 378 to the repository data, not to the checked out files). Some
379 379 filesystems, such as AFS, implement hardlinking incorrectly, but
380 380 do not report errors. In these cases, use the --pull option to
381 381 avoid hardlinking.
382 382
383 383 You can safely clone repositories and checked out files using full
384 384 hardlinks with
385 385
386 386 $ cp -al REPO REPOCLONE
387 387
388 388 which is the fastest way to clone. However, the operation is not
389 389 atomic (making sure REPO is not modified during the operation is
390 390 up to you) and you have to make sure your editor breaks hardlinks
391 391 (Emacs and most Linux Kernel tools do so).
392 392
393 393 If you use the -r option to clone up to a specific revision, no
394 394 subsequent revisions will be present in the cloned repository.
395 395 This option implies --pull, even on local repositories.
396 396
397 397 See pull for valid source format details.
398 398
399 399 It is possible to specify an ssh:// URL as the destination, but no
400 400 .hg/hgrc and working directory will be created on the remote side.
401 401 Look at the help text for the pull command for important details
402 402 about ssh:// URLs.
403 403 """
404 404 setremoteconfig(ui, opts)
405 405 hg.clone(ui, ui.expandpath(source), dest,
406 406 pull=opts['pull'],
407 407 stream=opts['uncompressed'],
408 408 rev=opts['rev'],
409 409 update=not opts['noupdate'])
410 410
411 411 def commit(ui, repo, *pats, **opts):
412 412 """commit the specified files or all outstanding changes
413 413
414 414 Commit changes to the given files into the repository.
415 415
416 416 If a list of files is omitted, all changes reported by "hg status"
417 417 will be committed.
418 418
419 419 If no commit message is specified, the editor configured in your hgrc
420 420 or in the EDITOR environment variable is started to enter a message.
421 421 """
422 422 message = logmessage(opts)
423 423
424 424 if opts['addremove']:
425 425 cmdutil.addremove(repo, pats, opts)
426 426 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
427 427 if pats:
428 428 status = repo.status(files=fns, match=match)
429 429 modified, added, removed, deleted, unknown = status[:5]
430 430 files = modified + added + removed
431 431 slist = None
432 432 for f in fns:
433 433 if f == '.':
434 434 continue
435 435 if f not in files:
436 436 rf = repo.wjoin(f)
437 437 if f in unknown:
438 438 raise util.Abort(_("file %s not tracked!") % rf)
439 439 try:
440 440 mode = os.lstat(rf)[stat.ST_MODE]
441 441 except OSError:
442 442 raise util.Abort(_("file %s not found!") % rf)
443 443 if stat.S_ISDIR(mode):
444 444 name = f + '/'
445 445 if slist is None:
446 446 slist = list(files)
447 447 slist.sort()
448 448 i = bisect.bisect(slist, name)
449 449 if i >= len(slist) or not slist[i].startswith(name):
450 450 raise util.Abort(_("no match under directory %s!")
451 451 % rf)
452 452 elif not stat.S_ISREG(mode):
453 453 raise util.Abort(_("can't commit %s: "
454 454 "unsupported file type!") % rf)
455 455 else:
456 456 files = []
457 457 try:
458 458 repo.commit(files, message, opts['user'], opts['date'], match,
459 459 force_editor=opts.get('force_editor'))
460 460 except ValueError, inst:
461 461 raise util.Abort(str(inst))
462 462
463 463 def docopy(ui, repo, pats, opts, wlock):
464 464 # called with the repo lock held
465 465 #
466 466 # hgsep => pathname that uses "/" to separate directories
467 467 # ossep => pathname that uses os.sep to separate directories
468 468 cwd = repo.getcwd()
469 469 errors = 0
470 470 copied = []
471 471 targets = {}
472 472
473 473 # abs: hgsep
474 474 # rel: ossep
475 475 # return: hgsep
476 476 def okaytocopy(abs, rel, exact):
477 477 reasons = {'?': _('is not managed'),
478 478 'a': _('has been marked for add'),
479 479 'r': _('has been marked for remove')}
480 480 state = repo.dirstate.state(abs)
481 481 reason = reasons.get(state)
482 482 if reason:
483 483 if state == 'a':
484 484 origsrc = repo.dirstate.copied(abs)
485 485 if origsrc is not None:
486 486 return origsrc
487 487 if exact:
488 488 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
489 489 else:
490 490 return abs
491 491
492 492 # origsrc: hgsep
493 493 # abssrc: hgsep
494 494 # relsrc: ossep
495 495 # target: ossep
496 496 def copy(origsrc, abssrc, relsrc, target, exact):
497 497 abstarget = util.canonpath(repo.root, cwd, target)
498 498 reltarget = util.pathto(repo.root, cwd, abstarget)
499 499 prevsrc = targets.get(abstarget)
500 500 if prevsrc is not None:
501 501 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
502 502 (reltarget, util.localpath(abssrc),
503 503 util.localpath(prevsrc)))
504 504 return
505 505 if (not opts['after'] and os.path.exists(reltarget) or
506 506 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
507 507 if not opts['force']:
508 508 ui.warn(_('%s: not overwriting - file exists\n') %
509 509 reltarget)
510 510 return
511 511 if not opts['after'] and not opts.get('dry_run'):
512 512 os.unlink(reltarget)
513 513 if opts['after']:
514 514 if not os.path.exists(reltarget):
515 515 return
516 516 else:
517 517 targetdir = os.path.dirname(reltarget) or '.'
518 518 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
519 519 os.makedirs(targetdir)
520 520 try:
521 521 restore = repo.dirstate.state(abstarget) == 'r'
522 522 if restore and not opts.get('dry_run'):
523 523 repo.undelete([abstarget], wlock)
524 524 try:
525 525 if not opts.get('dry_run'):
526 526 util.copyfile(relsrc, reltarget)
527 527 restore = False
528 528 finally:
529 529 if restore:
530 530 repo.remove([abstarget], wlock)
531 531 except IOError, inst:
532 532 if inst.errno == errno.ENOENT:
533 533 ui.warn(_('%s: deleted in working copy\n') % relsrc)
534 534 else:
535 535 ui.warn(_('%s: cannot copy - %s\n') %
536 536 (relsrc, inst.strerror))
537 537 errors += 1
538 538 return
539 539 if ui.verbose or not exact:
540 540 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
541 541 targets[abstarget] = abssrc
542 542 if abstarget != origsrc and not opts.get('dry_run'):
543 543 repo.copy(origsrc, abstarget, wlock)
544 544 copied.append((abssrc, relsrc, exact))
545 545
546 546 # pat: ossep
547 547 # dest ossep
548 548 # srcs: list of (hgsep, hgsep, ossep, bool)
549 549 # return: function that takes hgsep and returns ossep
550 550 def targetpathfn(pat, dest, srcs):
551 551 if os.path.isdir(pat):
552 552 abspfx = util.canonpath(repo.root, cwd, pat)
553 553 abspfx = util.localpath(abspfx)
554 554 if destdirexists:
555 555 striplen = len(os.path.split(abspfx)[0])
556 556 else:
557 557 striplen = len(abspfx)
558 558 if striplen:
559 559 striplen += len(os.sep)
560 560 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
561 561 elif destdirexists:
562 562 res = lambda p: os.path.join(dest,
563 563 os.path.basename(util.localpath(p)))
564 564 else:
565 565 res = lambda p: dest
566 566 return res
567 567
568 568 # pat: ossep
569 569 # dest ossep
570 570 # srcs: list of (hgsep, hgsep, ossep, bool)
571 571 # return: function that takes hgsep and returns ossep
572 572 def targetpathafterfn(pat, dest, srcs):
573 573 if util.patkind(pat, None)[0]:
574 574 # a mercurial pattern
575 575 res = lambda p: os.path.join(dest,
576 576 os.path.basename(util.localpath(p)))
577 577 else:
578 578 abspfx = util.canonpath(repo.root, cwd, pat)
579 579 if len(abspfx) < len(srcs[0][0]):
580 580 # A directory. Either the target path contains the last
581 581 # component of the source path or it does not.
582 582 def evalpath(striplen):
583 583 score = 0
584 584 for s in srcs:
585 585 t = os.path.join(dest, util.localpath(s[0])[striplen:])
586 586 if os.path.exists(t):
587 587 score += 1
588 588 return score
589 589
590 590 abspfx = util.localpath(abspfx)
591 591 striplen = len(abspfx)
592 592 if striplen:
593 593 striplen += len(os.sep)
594 594 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
595 595 score = evalpath(striplen)
596 596 striplen1 = len(os.path.split(abspfx)[0])
597 597 if striplen1:
598 598 striplen1 += len(os.sep)
599 599 if evalpath(striplen1) > score:
600 600 striplen = striplen1
601 601 res = lambda p: os.path.join(dest,
602 602 util.localpath(p)[striplen:])
603 603 else:
604 604 # a file
605 605 if destdirexists:
606 606 res = lambda p: os.path.join(dest,
607 607 os.path.basename(util.localpath(p)))
608 608 else:
609 609 res = lambda p: dest
610 610 return res
611 611
612 612
613 613 pats = util.expand_glob(pats)
614 614 if not pats:
615 615 raise util.Abort(_('no source or destination specified'))
616 616 if len(pats) == 1:
617 617 raise util.Abort(_('no destination specified'))
618 618 dest = pats.pop()
619 619 destdirexists = os.path.isdir(dest)
620 620 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
621 621 raise util.Abort(_('with multiple sources, destination must be an '
622 622 'existing directory'))
623 623 if opts['after']:
624 624 tfn = targetpathafterfn
625 625 else:
626 626 tfn = targetpathfn
627 627 copylist = []
628 628 for pat in pats:
629 629 srcs = []
630 630 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
631 631 globbed=True):
632 632 origsrc = okaytocopy(abssrc, relsrc, exact)
633 633 if origsrc:
634 634 srcs.append((origsrc, abssrc, relsrc, exact))
635 635 if not srcs:
636 636 continue
637 637 copylist.append((tfn(pat, dest, srcs), srcs))
638 638 if not copylist:
639 639 raise util.Abort(_('no files to copy'))
640 640
641 641 for targetpath, srcs in copylist:
642 642 for origsrc, abssrc, relsrc, exact in srcs:
643 643 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
644 644
645 645 if errors:
646 646 ui.warn(_('(consider using --after)\n'))
647 647 return errors, copied
648 648
649 649 def copy(ui, repo, *pats, **opts):
650 650 """mark files as copied for the next commit
651 651
652 652 Mark dest as having copies of source files. If dest is a
653 653 directory, copies are put in that directory. If dest is a file,
654 654 there can only be one source.
655 655
656 656 By default, this command copies the contents of files as they
657 657 stand in the working directory. If invoked with --after, the
658 658 operation is recorded, but no copying is performed.
659 659
660 660 This command takes effect in the next commit. To undo a copy
661 661 before that, see hg revert.
662 662 """
663 663 wlock = repo.wlock(0)
664 664 errs, copied = docopy(ui, repo, pats, opts, wlock)
665 665 return errs
666 666
667 667 def debugancestor(ui, index, rev1, rev2):
668 668 """find the ancestor revision of two revisions in a given index"""
669 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, 0)
669 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
670 670 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
671 671 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
672 672
673 673 def debugcomplete(ui, cmd='', **opts):
674 674 """returns the completion list associated with the given command"""
675 675
676 676 if opts['options']:
677 677 options = []
678 678 otables = [globalopts]
679 679 if cmd:
680 680 aliases, entry = findcmd(ui, cmd)
681 681 otables.append(entry[1])
682 682 for t in otables:
683 683 for o in t:
684 684 if o[0]:
685 685 options.append('-%s' % o[0])
686 686 options.append('--%s' % o[1])
687 687 ui.write("%s\n" % "\n".join(options))
688 688 return
689 689
690 690 clist = findpossible(ui, cmd).keys()
691 691 clist.sort()
692 692 ui.write("%s\n" % "\n".join(clist))
693 693
694 694 def debugrebuildstate(ui, repo, rev=""):
695 695 """rebuild the dirstate as it would look like for the given revision"""
696 696 if rev == "":
697 697 rev = repo.changelog.tip()
698 698 ctx = repo.changectx(rev)
699 699 files = ctx.manifest()
700 700 wlock = repo.wlock()
701 701 repo.dirstate.rebuild(rev, files)
702 702
703 703 def debugcheckstate(ui, repo):
704 704 """validate the correctness of the current dirstate"""
705 705 parent1, parent2 = repo.dirstate.parents()
706 706 repo.dirstate.read()
707 707 dc = repo.dirstate.map
708 708 keys = dc.keys()
709 709 keys.sort()
710 710 m1 = repo.changectx(parent1).manifest()
711 711 m2 = repo.changectx(parent2).manifest()
712 712 errors = 0
713 713 for f in dc:
714 714 state = repo.dirstate.state(f)
715 715 if state in "nr" and f not in m1:
716 716 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
717 717 errors += 1
718 718 if state in "a" and f in m1:
719 719 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
720 720 errors += 1
721 721 if state in "m" and f not in m1 and f not in m2:
722 722 ui.warn(_("%s in state %s, but not in either manifest\n") %
723 723 (f, state))
724 724 errors += 1
725 725 for f in m1:
726 726 state = repo.dirstate.state(f)
727 727 if state not in "nrm":
728 728 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
729 729 errors += 1
730 730 if errors:
731 731 error = _(".hg/dirstate inconsistent with current parent's manifest")
732 732 raise util.Abort(error)
733 733
734 734 def showconfig(ui, repo, *values, **opts):
735 735 """show combined config settings from all hgrc files
736 736
737 737 With no args, print names and values of all config items.
738 738
739 739 With one arg of the form section.name, print just the value of
740 740 that config item.
741 741
742 742 With multiple args, print names and values of all config items
743 743 with matching section names."""
744 744
745 745 untrusted = bool(opts.get('untrusted'))
746 746 if values:
747 747 if len([v for v in values if '.' in v]) > 1:
748 748 raise util.Abort(_('only one config item permitted'))
749 749 for section, name, value in ui.walkconfig(untrusted=untrusted):
750 750 sectname = section + '.' + name
751 751 if values:
752 752 for v in values:
753 753 if v == section:
754 754 ui.write('%s=%s\n' % (sectname, value))
755 755 elif v == sectname:
756 756 ui.write(value, '\n')
757 757 else:
758 758 ui.write('%s=%s\n' % (sectname, value))
759 759
760 760 def debugsetparents(ui, repo, rev1, rev2=None):
761 761 """manually set the parents of the current working directory
762 762
763 763 This is useful for writing repository conversion tools, but should
764 764 be used with care.
765 765 """
766 766
767 767 if not rev2:
768 768 rev2 = hex(nullid)
769 769
770 770 wlock = repo.wlock()
771 771 try:
772 772 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
773 773 finally:
774 774 wlock.release()
775 775
776 776 def debugstate(ui, repo):
777 777 """show the contents of the current dirstate"""
778 778 repo.dirstate.read()
779 779 dc = repo.dirstate.map
780 780 keys = dc.keys()
781 781 keys.sort()
782 782 for file_ in keys:
783 783 if dc[file_][3] == -1:
784 784 # Pad or slice to locale representation
785 785 locale_len = len(time.strftime("%x %X", time.localtime(0)))
786 786 timestr = 'unset'
787 787 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
788 788 else:
789 789 timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
790 790 ui.write("%c %3o %10d %s %s\n"
791 791 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
792 792 timestr, file_))
793 793 for f in repo.dirstate.copies():
794 794 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
795 795
796 796 def debugdata(ui, file_, rev):
797 """dump the contents of an data file revision"""
798 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
799 file_[:-2] + ".i", 0)
797 """dump the contents of a data file revision"""
798 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
800 799 try:
801 800 ui.write(r.revision(r.lookup(rev)))
802 801 except KeyError:
803 802 raise util.Abort(_('invalid revision identifier %s') % rev)
804 803
805 804 def debugdate(ui, date, range=None, **opts):
806 805 """parse and display a date"""
807 806 if opts["extended"]:
808 807 d = util.parsedate(date, util.extendeddateformats)
809 808 else:
810 809 d = util.parsedate(date)
811 810 ui.write("internal: %s %s\n" % d)
812 811 ui.write("standard: %s\n" % util.datestr(d))
813 812 if range:
814 813 m = util.matchdate(range)
815 814 ui.write("match: %s\n" % m(d[0]))
816 815
817 816 def debugindex(ui, file_):
818 817 """dump the contents of an index file"""
819 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, 0)
818 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
820 819 ui.write(" rev offset length base linkrev" +
821 820 " nodeid p1 p2\n")
822 821 for i in xrange(r.count()):
823 822 node = r.node(i)
824 823 pp = r.parents(node)
825 824 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
826 825 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
827 826 short(node), short(pp[0]), short(pp[1])))
828 827
829 828 def debugindexdot(ui, file_):
830 829 """dump an index DAG as a .dot file"""
831 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, 0)
830 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
832 831 ui.write("digraph G {\n")
833 832 for i in xrange(r.count()):
834 833 node = r.node(i)
835 834 pp = r.parents(node)
836 835 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
837 836 if pp[1] != nullid:
838 837 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
839 838 ui.write("}\n")
840 839
841 840 def debuginstall(ui):
842 841 '''test Mercurial installation'''
843 842
844 843 def writetemp(contents):
845 844 (fd, name) = tempfile.mkstemp()
846 845 f = os.fdopen(fd, "wb")
847 846 f.write(contents)
848 847 f.close()
849 848 return name
850 849
851 850 problems = 0
852 851
853 852 # encoding
854 853 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
855 854 try:
856 855 util.fromlocal("test")
857 856 except util.Abort, inst:
858 857 ui.write(" %s\n" % inst)
859 858 ui.write(_(" (check that your locale is properly set)\n"))
860 859 problems += 1
861 860
862 861 # compiled modules
863 862 ui.status(_("Checking extensions...\n"))
864 863 try:
865 864 import bdiff, mpatch, base85
866 865 except Exception, inst:
867 866 ui.write(" %s\n" % inst)
868 867 ui.write(_(" One or more extensions could not be found"))
869 868 ui.write(_(" (check that you compiled the extensions)\n"))
870 869 problems += 1
871 870
872 871 # templates
873 872 ui.status(_("Checking templates...\n"))
874 873 try:
875 874 import templater
876 875 t = templater.templater(templater.templatepath("map-cmdline.default"))
877 876 except Exception, inst:
878 877 ui.write(" %s\n" % inst)
879 878 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
880 879 problems += 1
881 880
882 881 # patch
883 882 ui.status(_("Checking patch...\n"))
884 883 path = os.environ.get('PATH', '')
885 884 patcher = util.find_in_path('gpatch', path,
886 885 util.find_in_path('patch', path, None))
887 886 if not patcher:
888 887 ui.write(_(" Can't find patch or gpatch in PATH\n"))
889 888 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
890 889 problems += 1
891 890 else:
892 891 # actually attempt a patch here
893 892 a = "1\n2\n3\n4\n"
894 893 b = "1\n2\n3\ninsert\n4\n"
895 894 d = mdiff.unidiff(a, None, b, None, "a")
896 895 fa = writetemp(a)
897 896 fd = writetemp(d)
898 897 fp = os.popen('%s %s %s' % (patcher, fa, fd))
899 898 files = []
900 899 output = ""
901 900 for line in fp:
902 901 output += line
903 902 if line.startswith('patching file '):
904 903 pf = util.parse_patch_output(line.rstrip())
905 904 files.append(pf)
906 905 if files != [fa]:
907 906 ui.write(_(" unexpected patch output!"))
908 907 ui.write(_(" (you may have an incompatible version of patch)\n"))
909 908 ui.write(output)
910 909 problems += 1
911 910 a = file(fa).read()
912 911 if a != b:
913 912 ui.write(_(" patch test failed!"))
914 913 ui.write(_(" (you may have an incompatible version of patch)\n"))
915 914 problems += 1
916 915 os.unlink(fa)
917 916 os.unlink(fd)
918 917
919 918 # merge helper
920 919 ui.status(_("Checking merge helper...\n"))
921 920 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
922 921 or "hgmerge")
923 922 cmdpath = util.find_in_path(cmd, path)
924 923 if not cmdpath:
925 924 cmdpath = util.find_in_path(cmd.split()[0], path)
926 925 if not cmdpath:
927 926 if cmd == 'hgmerge':
928 927 ui.write(_(" No merge helper set and can't find default"
929 928 " hgmerge script in PATH\n"))
930 929 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
931 930 else:
932 931 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
933 932 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
934 933 problems += 1
935 934 else:
936 935 # actually attempt a patch here
937 936 fa = writetemp("1\n2\n3\n4\n")
938 937 fl = writetemp("1\n2\n3\ninsert\n4\n")
939 938 fr = writetemp("begin\n1\n2\n3\n4\n")
940 939 r = os.system('%s %s %s %s' % (cmd, fl, fa, fr))
941 940 if r:
942 941 ui.write(_(" got unexpected merge error %d!") % r)
943 942 problems += 1
944 943 m = file(fl).read()
945 944 if m != "begin\n1\n2\n3\ninsert\n4\n":
946 945 ui.write(_(" got unexpected merge results!") % r)
947 946 ui.write(_(" (your merge helper may have the"
948 947 " wrong argument order)\n"))
949 948 ui.write(m)
950 949 os.unlink(fa)
951 950 os.unlink(fl)
952 951 os.unlink(fr)
953 952
954 953 # editor
955 954 ui.status(_("Checking commit editor...\n"))
956 955 editor = (os.environ.get("HGEDITOR") or
957 956 ui.config("ui", "editor") or
958 957 os.environ.get("EDITOR", "vi"))
959 958 cmdpath = util.find_in_path(editor, path)
960 959 if not cmdpath:
961 960 cmdpath = util.find_in_path(editor.split()[0], path)
962 961 if not cmdpath:
963 962 if editor == 'vi':
964 963 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
965 964 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
966 965 else:
967 966 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
968 967 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
969 968 problems += 1
970 969
971 970 # check username
972 971 ui.status(_("Checking username...\n"))
973 972 user = os.environ.get("HGUSER")
974 973 if user is None:
975 974 user = ui.config("ui", "username")
976 975 if user is None:
977 976 user = os.environ.get("EMAIL")
978 977 if not user:
979 978 ui.warn(" ")
980 979 ui.username()
981 980 ui.write(_(" (specify a username in your .hgrc file)\n"))
982 981
983 982 if not problems:
984 983 ui.status(_("No problems detected\n"))
985 984 else:
986 985 ui.write(_("%s problems detected,"
987 986 " please check your install!\n") % problems)
988 987
989 988 return problems
990 989
991 990 def debugrename(ui, repo, file1, *pats, **opts):
992 991 """dump rename information"""
993 992
994 993 ctx = repo.changectx(opts.get('rev', 'tip'))
995 994 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
996 995 ctx.node()):
997 996 m = ctx.filectx(abs).renamed()
998 997 if m:
999 998 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
1000 999 else:
1001 1000 ui.write(_("%s not renamed\n") % rel)
1002 1001
1003 1002 def debugwalk(ui, repo, *pats, **opts):
1004 1003 """show how files match on given patterns"""
1005 1004 items = list(cmdutil.walk(repo, pats, opts))
1006 1005 if not items:
1007 1006 return
1008 1007 fmt = '%%s %%-%ds %%-%ds %%s' % (
1009 1008 max([len(abs) for (src, abs, rel, exact) in items]),
1010 1009 max([len(rel) for (src, abs, rel, exact) in items]))
1011 1010 for src, abs, rel, exact in items:
1012 1011 line = fmt % (src, abs, rel, exact and 'exact' or '')
1013 1012 ui.write("%s\n" % line.rstrip())
1014 1013
1015 1014 def diff(ui, repo, *pats, **opts):
1016 1015 """diff repository (or selected files)
1017 1016
1018 1017 Show differences between revisions for the specified files.
1019 1018
1020 1019 Differences between files are shown using the unified diff format.
1021 1020
1022 1021 NOTE: diff may generate unexpected results for merges, as it will
1023 1022 default to comparing against the working directory's first parent
1024 1023 changeset if no revisions are specified.
1025 1024
1026 1025 When two revision arguments are given, then changes are shown
1027 1026 between those revisions. If only one revision is specified then
1028 1027 that revision is compared to the working directory, and, when no
1029 1028 revisions are specified, the working directory files are compared
1030 1029 to its parent.
1031 1030
1032 1031 Without the -a option, diff will avoid generating diffs of files
1033 1032 it detects as binary. With -a, diff will generate a diff anyway,
1034 1033 probably with undesirable results.
1035 1034 """
1036 1035 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1037 1036
1038 1037 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1039 1038
1040 1039 patch.diff(repo, node1, node2, fns, match=matchfn,
1041 1040 opts=patch.diffopts(ui, opts))
1042 1041
1043 1042 def export(ui, repo, *changesets, **opts):
1044 1043 """dump the header and diffs for one or more changesets
1045 1044
1046 1045 Print the changeset header and diffs for one or more revisions.
1047 1046
1048 1047 The information shown in the changeset header is: author,
1049 1048 changeset hash, parent(s) and commit comment.
1050 1049
1051 1050 NOTE: export may generate unexpected diff output for merge changesets,
1052 1051 as it will compare the merge changeset against its first parent only.
1053 1052
1054 1053 Output may be to a file, in which case the name of the file is
1055 1054 given using a format string. The formatting rules are as follows:
1056 1055
1057 1056 %% literal "%" character
1058 1057 %H changeset hash (40 bytes of hexadecimal)
1059 1058 %N number of patches being generated
1060 1059 %R changeset revision number
1061 1060 %b basename of the exporting repository
1062 1061 %h short-form changeset hash (12 bytes of hexadecimal)
1063 1062 %n zero-padded sequence number, starting at 1
1064 1063 %r zero-padded changeset revision number
1065 1064
1066 1065 Without the -a option, export will avoid generating diffs of files
1067 1066 it detects as binary. With -a, export will generate a diff anyway,
1068 1067 probably with undesirable results.
1069 1068
1070 1069 With the --switch-parent option, the diff will be against the second
1071 1070 parent. It can be useful to review a merge.
1072 1071 """
1073 1072 if not changesets:
1074 1073 raise util.Abort(_("export requires at least one changeset"))
1075 1074 revs = cmdutil.revrange(repo, changesets)
1076 1075 if len(revs) > 1:
1077 1076 ui.note(_('exporting patches:\n'))
1078 1077 else:
1079 1078 ui.note(_('exporting patch:\n'))
1080 1079 patch.export(repo, revs, template=opts['output'],
1081 1080 switch_parent=opts['switch_parent'],
1082 1081 opts=patch.diffopts(ui, opts))
1083 1082
1084 1083 def grep(ui, repo, pattern, *pats, **opts):
1085 1084 """search for a pattern in specified files and revisions
1086 1085
1087 1086 Search revisions of files for a regular expression.
1088 1087
1089 1088 This command behaves differently than Unix grep. It only accepts
1090 1089 Python/Perl regexps. It searches repository history, not the
1091 1090 working directory. It always prints the revision number in which
1092 1091 a match appears.
1093 1092
1094 1093 By default, grep only prints output for the first revision of a
1095 1094 file in which it finds a match. To get it to print every revision
1096 1095 that contains a change in match status ("-" for a match that
1097 1096 becomes a non-match, or "+" for a non-match that becomes a match),
1098 1097 use the --all flag.
1099 1098 """
1100 1099 reflags = 0
1101 1100 if opts['ignore_case']:
1102 1101 reflags |= re.I
1103 1102 regexp = re.compile(pattern, reflags)
1104 1103 sep, eol = ':', '\n'
1105 1104 if opts['print0']:
1106 1105 sep = eol = '\0'
1107 1106
1108 1107 fcache = {}
1109 1108 def getfile(fn):
1110 1109 if fn not in fcache:
1111 1110 fcache[fn] = repo.file(fn)
1112 1111 return fcache[fn]
1113 1112
1114 1113 def matchlines(body):
1115 1114 begin = 0
1116 1115 linenum = 0
1117 1116 while True:
1118 1117 match = regexp.search(body, begin)
1119 1118 if not match:
1120 1119 break
1121 1120 mstart, mend = match.span()
1122 1121 linenum += body.count('\n', begin, mstart) + 1
1123 1122 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1124 1123 lend = body.find('\n', mend)
1125 1124 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1126 1125 begin = lend + 1
1127 1126
1128 1127 class linestate(object):
1129 1128 def __init__(self, line, linenum, colstart, colend):
1130 1129 self.line = line
1131 1130 self.linenum = linenum
1132 1131 self.colstart = colstart
1133 1132 self.colend = colend
1134 1133
1135 1134 def __eq__(self, other):
1136 1135 return self.line == other.line
1137 1136
1138 1137 matches = {}
1139 1138 copies = {}
1140 1139 def grepbody(fn, rev, body):
1141 1140 matches[rev].setdefault(fn, [])
1142 1141 m = matches[rev][fn]
1143 1142 for lnum, cstart, cend, line in matchlines(body):
1144 1143 s = linestate(line, lnum, cstart, cend)
1145 1144 m.append(s)
1146 1145
1147 1146 def difflinestates(a, b):
1148 1147 sm = difflib.SequenceMatcher(None, a, b)
1149 1148 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1150 1149 if tag == 'insert':
1151 1150 for i in xrange(blo, bhi):
1152 1151 yield ('+', b[i])
1153 1152 elif tag == 'delete':
1154 1153 for i in xrange(alo, ahi):
1155 1154 yield ('-', a[i])
1156 1155 elif tag == 'replace':
1157 1156 for i in xrange(alo, ahi):
1158 1157 yield ('-', a[i])
1159 1158 for i in xrange(blo, bhi):
1160 1159 yield ('+', b[i])
1161 1160
1162 1161 prev = {}
1163 1162 def display(fn, rev, states, prevstates):
1164 1163 found = False
1165 1164 filerevmatches = {}
1166 1165 r = prev.get(fn, -1)
1167 1166 if opts['all']:
1168 1167 iter = difflinestates(states, prevstates)
1169 1168 else:
1170 1169 iter = [('', l) for l in prevstates]
1171 1170 for change, l in iter:
1172 1171 cols = [fn, str(r)]
1173 1172 if opts['line_number']:
1174 1173 cols.append(str(l.linenum))
1175 1174 if opts['all']:
1176 1175 cols.append(change)
1177 1176 if opts['user']:
1178 1177 cols.append(ui.shortuser(get(r)[1]))
1179 1178 if opts['files_with_matches']:
1180 1179 c = (fn, r)
1181 1180 if c in filerevmatches:
1182 1181 continue
1183 1182 filerevmatches[c] = 1
1184 1183 else:
1185 1184 cols.append(l.line)
1186 1185 ui.write(sep.join(cols), eol)
1187 1186 found = True
1188 1187 return found
1189 1188
1190 1189 fstate = {}
1191 1190 skip = {}
1192 1191 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1193 1192 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1194 1193 found = False
1195 1194 follow = opts.get('follow')
1196 1195 for st, rev, fns in changeiter:
1197 1196 if st == 'window':
1198 1197 matches.clear()
1199 1198 elif st == 'add':
1200 1199 mf = repo.changectx(rev).manifest()
1201 1200 matches[rev] = {}
1202 1201 for fn in fns:
1203 1202 if fn in skip:
1204 1203 continue
1205 1204 fstate.setdefault(fn, {})
1206 1205 try:
1207 1206 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1208 1207 if follow:
1209 1208 copied = getfile(fn).renamed(mf[fn])
1210 1209 if copied:
1211 1210 copies.setdefault(rev, {})[fn] = copied[0]
1212 1211 except KeyError:
1213 1212 pass
1214 1213 elif st == 'iter':
1215 1214 states = matches[rev].items()
1216 1215 states.sort()
1217 1216 for fn, m in states:
1218 1217 copy = copies.get(rev, {}).get(fn)
1219 1218 if fn in skip:
1220 1219 if copy:
1221 1220 skip[copy] = True
1222 1221 continue
1223 1222 if fn in prev or fstate[fn]:
1224 1223 r = display(fn, rev, m, fstate[fn])
1225 1224 found = found or r
1226 1225 if r and not opts['all']:
1227 1226 skip[fn] = True
1228 1227 if copy:
1229 1228 skip[copy] = True
1230 1229 fstate[fn] = m
1231 1230 if copy:
1232 1231 fstate[copy] = m
1233 1232 prev[fn] = rev
1234 1233
1235 1234 fstate = fstate.items()
1236 1235 fstate.sort()
1237 1236 for fn, state in fstate:
1238 1237 if fn in skip:
1239 1238 continue
1240 1239 if fn not in copies.get(prev[fn], {}):
1241 1240 found = display(fn, rev, {}, state) or found
1242 1241 return (not found and 1) or 0
1243 1242
1244 1243 def heads(ui, repo, **opts):
1245 1244 """show current repository heads
1246 1245
1247 1246 Show all repository head changesets.
1248 1247
1249 1248 Repository "heads" are changesets that don't have children
1250 1249 changesets. They are where development generally takes place and
1251 1250 are the usual targets for update and merge operations.
1252 1251 """
1253 1252 if opts['rev']:
1254 1253 heads = repo.heads(repo.lookup(opts['rev']))
1255 1254 else:
1256 1255 heads = repo.heads()
1257 1256 displayer = cmdutil.show_changeset(ui, repo, opts)
1258 1257 for n in heads:
1259 1258 displayer.show(changenode=n)
1260 1259
1261 1260 def help_(ui, name=None, with_version=False):
1262 1261 """show help for a command, extension, or list of commands
1263 1262
1264 1263 With no arguments, print a list of commands and short help.
1265 1264
1266 1265 Given a command name, print help for that command.
1267 1266
1268 1267 Given an extension name, print help for that extension, and the
1269 1268 commands it provides."""
1270 1269 option_lists = []
1271 1270
1272 1271 def helpcmd(name):
1273 1272 if with_version:
1274 1273 version_(ui)
1275 1274 ui.write('\n')
1276 1275 aliases, i = findcmd(ui, name)
1277 1276 # synopsis
1278 1277 ui.write("%s\n\n" % i[2])
1279 1278
1280 1279 # description
1281 1280 doc = i[0].__doc__
1282 1281 if not doc:
1283 1282 doc = _("(No help text available)")
1284 1283 if ui.quiet:
1285 1284 doc = doc.splitlines(0)[0]
1286 1285 ui.write("%s\n" % doc.rstrip())
1287 1286
1288 1287 if not ui.quiet:
1289 1288 # aliases
1290 1289 if len(aliases) > 1:
1291 1290 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1292 1291
1293 1292 # options
1294 1293 if i[1]:
1295 1294 option_lists.append(("options", i[1]))
1296 1295
1297 1296 def helplist(select=None):
1298 1297 h = {}
1299 1298 cmds = {}
1300 1299 for c, e in table.items():
1301 1300 f = c.split("|", 1)[0]
1302 1301 if select and not select(f):
1303 1302 continue
1304 1303 if name == "shortlist" and not f.startswith("^"):
1305 1304 continue
1306 1305 f = f.lstrip("^")
1307 1306 if not ui.debugflag and f.startswith("debug"):
1308 1307 continue
1309 1308 doc = e[0].__doc__
1310 1309 if not doc:
1311 1310 doc = _("(No help text available)")
1312 1311 h[f] = doc.splitlines(0)[0].rstrip()
1313 1312 cmds[f] = c.lstrip("^")
1314 1313
1315 1314 fns = h.keys()
1316 1315 fns.sort()
1317 1316 m = max(map(len, fns))
1318 1317 for f in fns:
1319 1318 if ui.verbose:
1320 1319 commands = cmds[f].replace("|",", ")
1321 1320 ui.write(" %s:\n %s\n"%(commands, h[f]))
1322 1321 else:
1323 1322 ui.write(' %-*s %s\n' % (m, f, h[f]))
1324 1323
1325 1324 def helptopic(name):
1326 1325 v = None
1327 1326 for i in help.helptable:
1328 1327 l = i.split('|')
1329 1328 if name in l:
1330 1329 v = i
1331 1330 header = l[-1]
1332 1331 if not v:
1333 1332 raise UnknownCommand(name)
1334 1333
1335 1334 # description
1336 1335 doc = help.helptable[v]
1337 1336 if not doc:
1338 1337 doc = _("(No help text available)")
1339 1338 if callable(doc):
1340 1339 doc = doc()
1341 1340
1342 1341 ui.write("%s\n" % header)
1343 1342 ui.write("%s\n" % doc.rstrip())
1344 1343
1345 1344 def helpext(name):
1346 1345 try:
1347 1346 mod = findext(name)
1348 1347 except KeyError:
1349 1348 raise UnknownCommand(name)
1350 1349
1351 1350 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1352 1351 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1353 1352 for d in doc[1:]:
1354 1353 ui.write(d, '\n')
1355 1354
1356 1355 ui.status('\n')
1357 1356
1358 1357 try:
1359 1358 ct = mod.cmdtable
1360 1359 except AttributeError:
1361 1360 ui.status(_('no commands defined\n'))
1362 1361 return
1363 1362
1364 1363 if ui.verbose:
1365 1364 ui.status(_('list of commands:\n\n'))
1366 1365 else:
1367 1366 ui.status(_('list of commands (use "hg help -v %s" '
1368 1367 'to show aliases and global options):\n\n') % name)
1369 1368
1370 1369 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1371 1370 helplist(modcmds.has_key)
1372 1371
1373 1372 if name and name != 'shortlist':
1374 1373 i = None
1375 1374 for f in (helpcmd, helptopic, helpext):
1376 1375 try:
1377 1376 f(name)
1378 1377 i = None
1379 1378 break
1380 1379 except UnknownCommand, inst:
1381 1380 i = inst
1382 1381 if i:
1383 1382 raise i
1384 1383
1385 1384 else:
1386 1385 # program name
1387 1386 if ui.verbose or with_version:
1388 1387 version_(ui)
1389 1388 else:
1390 1389 ui.status(_("Mercurial Distributed SCM\n"))
1391 1390 ui.status('\n')
1392 1391
1393 1392 # list of commands
1394 1393 if name == "shortlist":
1395 1394 ui.status(_('basic commands (use "hg help" '
1396 1395 'for the full list or option "-v" for details):\n\n'))
1397 1396 elif ui.verbose:
1398 1397 ui.status(_('list of commands:\n\n'))
1399 1398 else:
1400 1399 ui.status(_('list of commands (use "hg help -v" '
1401 1400 'to show aliases and global options):\n\n'))
1402 1401
1403 1402 helplist()
1404 1403
1405 1404 # global options
1406 1405 if ui.verbose:
1407 1406 option_lists.append(("global options", globalopts))
1408 1407
1409 1408 # list all option lists
1410 1409 opt_output = []
1411 1410 for title, options in option_lists:
1412 1411 opt_output.append(("\n%s:\n" % title, None))
1413 1412 for shortopt, longopt, default, desc in options:
1414 1413 if "DEPRECATED" in desc and not ui.verbose: continue
1415 1414 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1416 1415 longopt and " --%s" % longopt),
1417 1416 "%s%s" % (desc,
1418 1417 default
1419 1418 and _(" (default: %s)") % default
1420 1419 or "")))
1421 1420
1422 1421 if opt_output:
1423 1422 opts_len = max([len(line[0]) for line in opt_output if line[1]])
1424 1423 for first, second in opt_output:
1425 1424 if second:
1426 1425 ui.write(" %-*s %s\n" % (opts_len, first, second))
1427 1426 else:
1428 1427 ui.write("%s\n" % first)
1429 1428
1430 1429 def identify(ui, repo):
1431 1430 """print information about the working copy
1432 1431
1433 1432 Print a short summary of the current state of the repo.
1434 1433
1435 1434 This summary identifies the repository state using one or two parent
1436 1435 hash identifiers, followed by a "+" if there are uncommitted changes
1437 1436 in the working directory, followed by a list of tags for this revision.
1438 1437 """
1439 1438 parents = [p for p in repo.dirstate.parents() if p != nullid]
1440 1439 if not parents:
1441 1440 ui.write(_("unknown\n"))
1442 1441 return
1443 1442
1444 1443 hexfunc = ui.debugflag and hex or short
1445 1444 modified, added, removed, deleted = repo.status()[:4]
1446 1445 output = ["%s%s" %
1447 1446 ('+'.join([hexfunc(parent) for parent in parents]),
1448 1447 (modified or added or removed or deleted) and "+" or "")]
1449 1448
1450 1449 if not ui.quiet:
1451 1450
1452 1451 branch = util.tolocal(repo.workingctx().branch())
1453 1452 if branch != 'default':
1454 1453 output.append("(%s)" % branch)
1455 1454
1456 1455 # multiple tags for a single parent separated by '/'
1457 1456 parenttags = ['/'.join(tags)
1458 1457 for tags in map(repo.nodetags, parents) if tags]
1459 1458 # tags for multiple parents separated by ' + '
1460 1459 if parenttags:
1461 1460 output.append(' + '.join(parenttags))
1462 1461
1463 1462 ui.write("%s\n" % ' '.join(output))
1464 1463
1465 1464 def import_(ui, repo, patch1, *patches, **opts):
1466 1465 """import an ordered set of patches
1467 1466
1468 1467 Import a list of patches and commit them individually.
1469 1468
1470 1469 If there are outstanding changes in the working directory, import
1471 1470 will abort unless given the -f flag.
1472 1471
1473 1472 You can import a patch straight from a mail message. Even patches
1474 1473 as attachments work (body part must be type text/plain or
1475 1474 text/x-patch to be used). From and Subject headers of email
1476 1475 message are used as default committer and commit message. All
1477 1476 text/plain body parts before first diff are added to commit
1478 1477 message.
1479 1478
1480 1479 If imported patch was generated by hg export, user and description
1481 1480 from patch override values from message headers and body. Values
1482 1481 given on command line with -m and -u override these.
1483 1482
1484 1483 To read a patch from standard input, use patch name "-".
1485 1484 """
1486 1485 patches = (patch1,) + patches
1487 1486
1488 1487 if not opts['force']:
1489 1488 bail_if_changed(repo)
1490 1489
1491 1490 d = opts["base"]
1492 1491 strip = opts["strip"]
1493 1492
1494 1493 wlock = repo.wlock()
1495 1494 lock = repo.lock()
1496 1495
1497 1496 for p in patches:
1498 1497 pf = os.path.join(d, p)
1499 1498
1500 1499 if pf == '-':
1501 1500 ui.status(_("applying patch from stdin\n"))
1502 1501 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1503 1502 else:
1504 1503 ui.status(_("applying %s\n") % p)
1505 1504 tmpname, message, user, date = patch.extract(ui, file(pf))
1506 1505
1507 1506 if tmpname is None:
1508 1507 raise util.Abort(_('no diffs found'))
1509 1508
1510 1509 try:
1511 1510 cmdline_message = logmessage(opts)
1512 1511 if cmdline_message:
1513 1512 # pickup the cmdline msg
1514 1513 message = cmdline_message
1515 1514 elif message:
1516 1515 # pickup the patch msg
1517 1516 message = message.strip()
1518 1517 else:
1519 1518 # launch the editor
1520 1519 message = None
1521 1520 ui.debug(_('message:\n%s\n') % message)
1522 1521
1523 1522 files = {}
1524 1523 try:
1525 1524 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1526 1525 files=files)
1527 1526 finally:
1528 1527 files = patch.updatedir(ui, repo, files, wlock=wlock)
1529 1528 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1530 1529 finally:
1531 1530 os.unlink(tmpname)
1532 1531
1533 1532 def incoming(ui, repo, source="default", **opts):
1534 1533 """show new changesets found in source
1535 1534
1536 1535 Show new changesets found in the specified path/URL or the default
1537 1536 pull location. These are the changesets that would be pulled if a pull
1538 1537 was requested.
1539 1538
1540 1539 For remote repository, using --bundle avoids downloading the changesets
1541 1540 twice if the incoming is followed by a pull.
1542 1541
1543 1542 See pull for valid source format details.
1544 1543 """
1545 1544 source = ui.expandpath(source)
1546 1545 setremoteconfig(ui, opts)
1547 1546
1548 1547 other = hg.repository(ui, source)
1549 1548 ui.status(_('comparing with %s\n') % source)
1550 1549 incoming = repo.findincoming(other, force=opts["force"])
1551 1550 if not incoming:
1552 1551 try:
1553 1552 os.unlink(opts["bundle"])
1554 1553 except:
1555 1554 pass
1556 1555 ui.status(_("no changes found\n"))
1557 1556 return 1
1558 1557
1559 1558 cleanup = None
1560 1559 try:
1561 1560 fname = opts["bundle"]
1562 1561 if fname or not other.local():
1563 1562 # create a bundle (uncompressed if other repo is not local)
1564 1563 cg = other.changegroup(incoming, "incoming")
1565 1564 bundletype = other.local() and "HG10BZ" or "HG10UN"
1566 1565 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1567 1566 # keep written bundle?
1568 1567 if opts["bundle"]:
1569 1568 cleanup = None
1570 1569 if not other.local():
1571 1570 # use the created uncompressed bundlerepo
1572 1571 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1573 1572
1574 1573 revs = None
1575 1574 if opts['rev']:
1576 1575 revs = [other.lookup(rev) for rev in opts['rev']]
1577 1576 o = other.changelog.nodesbetween(incoming, revs)[0]
1578 1577 if opts['newest_first']:
1579 1578 o.reverse()
1580 1579 displayer = cmdutil.show_changeset(ui, other, opts)
1581 1580 for n in o:
1582 1581 parents = [p for p in other.changelog.parents(n) if p != nullid]
1583 1582 if opts['no_merges'] and len(parents) == 2:
1584 1583 continue
1585 1584 displayer.show(changenode=n)
1586 1585 finally:
1587 1586 if hasattr(other, 'close'):
1588 1587 other.close()
1589 1588 if cleanup:
1590 1589 os.unlink(cleanup)
1591 1590
1592 1591 def init(ui, dest=".", **opts):
1593 1592 """create a new repository in the given directory
1594 1593
1595 1594 Initialize a new repository in the given directory. If the given
1596 1595 directory does not exist, it is created.
1597 1596
1598 1597 If no directory is given, the current directory is used.
1599 1598
1600 1599 It is possible to specify an ssh:// URL as the destination.
1601 1600 Look at the help text for the pull command for important details
1602 1601 about ssh:// URLs.
1603 1602 """
1604 1603 setremoteconfig(ui, opts)
1605 1604 hg.repository(ui, dest, create=1)
1606 1605
1607 1606 def locate(ui, repo, *pats, **opts):
1608 1607 """locate files matching specific patterns
1609 1608
1610 1609 Print all files under Mercurial control whose names match the
1611 1610 given patterns.
1612 1611
1613 1612 This command searches the entire repository by default. To search
1614 1613 just the current directory and its subdirectories, use "--include .".
1615 1614
1616 1615 If no patterns are given to match, this command prints all file
1617 1616 names.
1618 1617
1619 1618 If you want to feed the output of this command into the "xargs"
1620 1619 command, use the "-0" option to both this command and "xargs".
1621 1620 This will avoid the problem of "xargs" treating single filenames
1622 1621 that contain white space as multiple filenames.
1623 1622 """
1624 1623 end = opts['print0'] and '\0' or '\n'
1625 1624 rev = opts['rev']
1626 1625 if rev:
1627 1626 node = repo.lookup(rev)
1628 1627 else:
1629 1628 node = None
1630 1629
1631 1630 ret = 1
1632 1631 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1633 1632 default='relglob'):
1634 1633 if not node and repo.dirstate.state(abs) == '?':
1635 1634 continue
1636 1635 if opts['fullpath']:
1637 1636 ui.write(os.path.join(repo.root, abs), end)
1638 1637 else:
1639 1638 ui.write(((pats and rel) or abs), end)
1640 1639 ret = 0
1641 1640
1642 1641 return ret
1643 1642
1644 1643 def log(ui, repo, *pats, **opts):
1645 1644 """show revision history of entire repository or files
1646 1645
1647 1646 Print the revision history of the specified files or the entire
1648 1647 project.
1649 1648
1650 1649 File history is shown without following rename or copy history of
1651 1650 files. Use -f/--follow with a file name to follow history across
1652 1651 renames and copies. --follow without a file name will only show
1653 1652 ancestors or descendants of the starting revision. --follow-first
1654 1653 only follows the first parent of merge revisions.
1655 1654
1656 1655 If no revision range is specified, the default is tip:0 unless
1657 1656 --follow is set, in which case the working directory parent is
1658 1657 used as the starting revision.
1659 1658
1660 1659 By default this command outputs: changeset id and hash, tags,
1661 1660 non-trivial parents, user, date and time, and a summary for each
1662 1661 commit. When the -v/--verbose switch is used, the list of changed
1663 1662 files and full commit message is shown.
1664 1663
1665 1664 NOTE: log -p may generate unexpected diff output for merge
1666 1665 changesets, as it will compare the merge changeset against its
1667 1666 first parent only. Also, the files: list will only reflect files
1668 1667 that are different from BOTH parents.
1669 1668
1670 1669 """
1671 1670
1672 1671 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1673 1672 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1674 1673
1675 1674 if opts['limit']:
1676 1675 try:
1677 1676 limit = int(opts['limit'])
1678 1677 except ValueError:
1679 1678 raise util.Abort(_('limit must be a positive integer'))
1680 1679 if limit <= 0: raise util.Abort(_('limit must be positive'))
1681 1680 else:
1682 1681 limit = sys.maxint
1683 1682 count = 0
1684 1683
1685 1684 if opts['copies'] and opts['rev']:
1686 1685 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1687 1686 else:
1688 1687 endrev = repo.changelog.count()
1689 1688 rcache = {}
1690 1689 ncache = {}
1691 1690 dcache = []
1692 1691 def getrenamed(fn, rev, man):
1693 1692 '''looks up all renames for a file (up to endrev) the first
1694 1693 time the file is given. It indexes on the changerev and only
1695 1694 parses the manifest if linkrev != changerev.
1696 1695 Returns rename info for fn at changerev rev.'''
1697 1696 if fn not in rcache:
1698 1697 rcache[fn] = {}
1699 1698 ncache[fn] = {}
1700 1699 fl = repo.file(fn)
1701 1700 for i in xrange(fl.count()):
1702 1701 node = fl.node(i)
1703 1702 lr = fl.linkrev(node)
1704 1703 renamed = fl.renamed(node)
1705 1704 rcache[fn][lr] = renamed
1706 1705 if renamed:
1707 1706 ncache[fn][node] = renamed
1708 1707 if lr >= endrev:
1709 1708 break
1710 1709 if rev in rcache[fn]:
1711 1710 return rcache[fn][rev]
1712 1711 mr = repo.manifest.rev(man)
1713 1712 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1714 1713 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1715 1714 if not dcache or dcache[0] != man:
1716 1715 dcache[:] = [man, repo.manifest.readdelta(man)]
1717 1716 if fn in dcache[1]:
1718 1717 return ncache[fn].get(dcache[1][fn])
1719 1718 return None
1720 1719
1721 1720 df = False
1722 1721 if opts["date"]:
1723 1722 df = util.matchdate(opts["date"])
1724 1723
1725 1724 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1726 1725 for st, rev, fns in changeiter:
1727 1726 if st == 'add':
1728 1727 changenode = repo.changelog.node(rev)
1729 1728 parents = [p for p in repo.changelog.parentrevs(rev)
1730 1729 if p != nullrev]
1731 1730 if opts['no_merges'] and len(parents) == 2:
1732 1731 continue
1733 1732 if opts['only_merges'] and len(parents) != 2:
1734 1733 continue
1735 1734
1736 1735 if df:
1737 1736 changes = get(rev)
1738 1737 if not df(changes[2][0]):
1739 1738 continue
1740 1739
1741 1740 if opts['keyword']:
1742 1741 changes = get(rev)
1743 1742 miss = 0
1744 1743 for k in [kw.lower() for kw in opts['keyword']]:
1745 1744 if not (k in changes[1].lower() or
1746 1745 k in changes[4].lower() or
1747 1746 k in " ".join(changes[3][:20]).lower()):
1748 1747 miss = 1
1749 1748 break
1750 1749 if miss:
1751 1750 continue
1752 1751
1753 1752 copies = []
1754 1753 if opts.get('copies') and rev:
1755 1754 mf = get(rev)[0]
1756 1755 for fn in get(rev)[3]:
1757 1756 rename = getrenamed(fn, rev, mf)
1758 1757 if rename:
1759 1758 copies.append((fn, rename[0]))
1760 1759 displayer.show(rev, changenode, copies=copies)
1761 1760 elif st == 'iter':
1762 1761 if count == limit: break
1763 1762 if displayer.flush(rev):
1764 1763 count += 1
1765 1764
1766 1765 def manifest(ui, repo, rev=None):
1767 1766 """output the current or given revision of the project manifest
1768 1767
1769 1768 Print a list of version controlled files for the given revision.
1770 1769 If no revision is given, the parent of the working directory is used,
1771 1770 or tip if no revision is checked out.
1772 1771
1773 1772 The manifest is the list of files being version controlled. If no revision
1774 1773 is given then the first parent of the working directory is used.
1775 1774
1776 1775 With -v flag, print file permissions. With --debug flag, print
1777 1776 file revision hashes.
1778 1777 """
1779 1778
1780 1779 m = repo.changectx(rev).manifest()
1781 1780 files = m.keys()
1782 1781 files.sort()
1783 1782
1784 1783 for f in files:
1785 1784 if ui.debugflag:
1786 1785 ui.write("%40s " % hex(m[f]))
1787 1786 if ui.verbose:
1788 1787 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1789 1788 ui.write("%s\n" % f)
1790 1789
1791 1790 def merge(ui, repo, node=None, force=None):
1792 1791 """merge working directory with another revision
1793 1792
1794 1793 Merge the contents of the current working directory and the
1795 1794 requested revision. Files that changed between either parent are
1796 1795 marked as changed for the next commit and a commit must be
1797 1796 performed before any further updates are allowed.
1798 1797
1799 1798 If no revision is specified, the working directory's parent is a
1800 1799 head revision, and the repository contains exactly one other head,
1801 1800 the other head is merged with by default. Otherwise, an explicit
1802 1801 revision to merge with must be provided.
1803 1802 """
1804 1803
1805 1804 if not node:
1806 1805 heads = repo.heads()
1807 1806 if len(heads) > 2:
1808 1807 raise util.Abort(_('repo has %d heads - '
1809 1808 'please merge with an explicit rev') %
1810 1809 len(heads))
1811 1810 if len(heads) == 1:
1812 1811 raise util.Abort(_('there is nothing to merge - '
1813 1812 'use "hg update" instead'))
1814 1813 parent = repo.dirstate.parents()[0]
1815 1814 if parent not in heads:
1816 1815 raise util.Abort(_('working dir not at a head rev - '
1817 1816 'use "hg update" or merge with an explicit rev'))
1818 1817 node = parent == heads[0] and heads[-1] or heads[0]
1819 1818 return hg.merge(repo, node, force=force)
1820 1819
1821 1820 def outgoing(ui, repo, dest=None, **opts):
1822 1821 """show changesets not found in destination
1823 1822
1824 1823 Show changesets not found in the specified destination repository or
1825 1824 the default push location. These are the changesets that would be pushed
1826 1825 if a push was requested.
1827 1826
1828 1827 See pull for valid destination format details.
1829 1828 """
1830 1829 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1831 1830 setremoteconfig(ui, opts)
1832 1831 revs = None
1833 1832 if opts['rev']:
1834 1833 revs = [repo.lookup(rev) for rev in opts['rev']]
1835 1834
1836 1835 other = hg.repository(ui, dest)
1837 1836 ui.status(_('comparing with %s\n') % dest)
1838 1837 o = repo.findoutgoing(other, force=opts['force'])
1839 1838 if not o:
1840 1839 ui.status(_("no changes found\n"))
1841 1840 return 1
1842 1841 o = repo.changelog.nodesbetween(o, revs)[0]
1843 1842 if opts['newest_first']:
1844 1843 o.reverse()
1845 1844 displayer = cmdutil.show_changeset(ui, repo, opts)
1846 1845 for n in o:
1847 1846 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1848 1847 if opts['no_merges'] and len(parents) == 2:
1849 1848 continue
1850 1849 displayer.show(changenode=n)
1851 1850
1852 1851 def parents(ui, repo, file_=None, **opts):
1853 1852 """show the parents of the working dir or revision
1854 1853
1855 1854 Print the working directory's parent revisions.
1856 1855 """
1857 1856 rev = opts.get('rev')
1858 1857 if rev:
1859 1858 if file_:
1860 1859 ctx = repo.filectx(file_, changeid=rev)
1861 1860 else:
1862 1861 ctx = repo.changectx(rev)
1863 1862 p = [cp.node() for cp in ctx.parents()]
1864 1863 else:
1865 1864 p = repo.dirstate.parents()
1866 1865
1867 1866 displayer = cmdutil.show_changeset(ui, repo, opts)
1868 1867 for n in p:
1869 1868 if n != nullid:
1870 1869 displayer.show(changenode=n)
1871 1870
1872 1871 def paths(ui, repo, search=None):
1873 1872 """show definition of symbolic path names
1874 1873
1875 1874 Show definition of symbolic path name NAME. If no name is given, show
1876 1875 definition of available names.
1877 1876
1878 1877 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1879 1878 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1880 1879 """
1881 1880 if search:
1882 1881 for name, path in ui.configitems("paths"):
1883 1882 if name == search:
1884 1883 ui.write("%s\n" % path)
1885 1884 return
1886 1885 ui.warn(_("not found!\n"))
1887 1886 return 1
1888 1887 else:
1889 1888 for name, path in ui.configitems("paths"):
1890 1889 ui.write("%s = %s\n" % (name, path))
1891 1890
1892 1891 def postincoming(ui, repo, modheads, optupdate):
1893 1892 if modheads == 0:
1894 1893 return
1895 1894 if optupdate:
1896 1895 if modheads == 1:
1897 1896 return hg.update(repo, repo.changelog.tip()) # update
1898 1897 else:
1899 1898 ui.status(_("not updating, since new heads added\n"))
1900 1899 if modheads > 1:
1901 1900 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1902 1901 else:
1903 1902 ui.status(_("(run 'hg update' to get a working copy)\n"))
1904 1903
1905 1904 def pull(ui, repo, source="default", **opts):
1906 1905 """pull changes from the specified source
1907 1906
1908 1907 Pull changes from a remote repository to a local one.
1909 1908
1910 1909 This finds all changes from the repository at the specified path
1911 1910 or URL and adds them to the local repository. By default, this
1912 1911 does not update the copy of the project in the working directory.
1913 1912
1914 1913 Valid URLs are of the form:
1915 1914
1916 1915 local/filesystem/path (or file://local/filesystem/path)
1917 1916 http://[user@]host[:port]/[path]
1918 1917 https://[user@]host[:port]/[path]
1919 1918 ssh://[user@]host[:port]/[path]
1920 1919 static-http://host[:port]/[path]
1921 1920
1922 1921 Paths in the local filesystem can either point to Mercurial
1923 1922 repositories or to bundle files (as created by 'hg bundle' or
1924 1923 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1925 1924 allows access to a Mercurial repository where you simply use a web
1926 1925 server to publish the .hg directory as static content.
1927 1926
1928 1927 Some notes about using SSH with Mercurial:
1929 1928 - SSH requires an accessible shell account on the destination machine
1930 1929 and a copy of hg in the remote path or specified with as remotecmd.
1931 1930 - path is relative to the remote user's home directory by default.
1932 1931 Use an extra slash at the start of a path to specify an absolute path:
1933 1932 ssh://example.com//tmp/repository
1934 1933 - Mercurial doesn't use its own compression via SSH; the right thing
1935 1934 to do is to configure it in your ~/.ssh/config, e.g.:
1936 1935 Host *.mylocalnetwork.example.com
1937 1936 Compression no
1938 1937 Host *
1939 1938 Compression yes
1940 1939 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1941 1940 with the --ssh command line option.
1942 1941 """
1943 1942 source = ui.expandpath(source)
1944 1943 setremoteconfig(ui, opts)
1945 1944
1946 1945 other = hg.repository(ui, source)
1947 1946 ui.status(_('pulling from %s\n') % (source))
1948 1947 revs = None
1949 1948 if opts['rev']:
1950 1949 if 'lookup' in other.capabilities:
1951 1950 revs = [other.lookup(rev) for rev in opts['rev']]
1952 1951 else:
1953 1952 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1954 1953 raise util.Abort(error)
1955 1954 modheads = repo.pull(other, heads=revs, force=opts['force'])
1956 1955 return postincoming(ui, repo, modheads, opts['update'])
1957 1956
1958 1957 def push(ui, repo, dest=None, **opts):
1959 1958 """push changes to the specified destination
1960 1959
1961 1960 Push changes from the local repository to the given destination.
1962 1961
1963 1962 This is the symmetrical operation for pull. It helps to move
1964 1963 changes from the current repository to a different one. If the
1965 1964 destination is local this is identical to a pull in that directory
1966 1965 from the current one.
1967 1966
1968 1967 By default, push will refuse to run if it detects the result would
1969 1968 increase the number of remote heads. This generally indicates the
1970 1969 the client has forgotten to sync and merge before pushing.
1971 1970
1972 1971 Valid URLs are of the form:
1973 1972
1974 1973 local/filesystem/path (or file://local/filesystem/path)
1975 1974 ssh://[user@]host[:port]/[path]
1976 1975 http://[user@]host[:port]/[path]
1977 1976 https://[user@]host[:port]/[path]
1978 1977
1979 1978 Look at the help text for the pull command for important details
1980 1979 about ssh:// URLs.
1981 1980
1982 1981 Pushing to http:// and https:// URLs is only possible, if this
1983 1982 feature is explicitly enabled on the remote Mercurial server.
1984 1983 """
1985 1984 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1986 1985 setremoteconfig(ui, opts)
1987 1986
1988 1987 other = hg.repository(ui, dest)
1989 1988 ui.status('pushing to %s\n' % (dest))
1990 1989 revs = None
1991 1990 if opts['rev']:
1992 1991 revs = [repo.lookup(rev) for rev in opts['rev']]
1993 1992 r = repo.push(other, opts['force'], revs=revs)
1994 1993 return r == 0
1995 1994
1996 1995 def rawcommit(ui, repo, *pats, **opts):
1997 1996 """raw commit interface (DEPRECATED)
1998 1997
1999 1998 (DEPRECATED)
2000 1999 Lowlevel commit, for use in helper scripts.
2001 2000
2002 2001 This command is not intended to be used by normal users, as it is
2003 2002 primarily useful for importing from other SCMs.
2004 2003
2005 2004 This command is now deprecated and will be removed in a future
2006 2005 release, please use debugsetparents and commit instead.
2007 2006 """
2008 2007
2009 2008 ui.warn(_("(the rawcommit command is deprecated)\n"))
2010 2009
2011 2010 message = logmessage(opts)
2012 2011
2013 2012 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2014 2013 if opts['files']:
2015 2014 files += open(opts['files']).read().splitlines()
2016 2015
2017 2016 parents = [repo.lookup(p) for p in opts['parent']]
2018 2017
2019 2018 try:
2020 2019 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2021 2020 except ValueError, inst:
2022 2021 raise util.Abort(str(inst))
2023 2022
2024 2023 def recover(ui, repo):
2025 2024 """roll back an interrupted transaction
2026 2025
2027 2026 Recover from an interrupted commit or pull.
2028 2027
2029 2028 This command tries to fix the repository status after an interrupted
2030 2029 operation. It should only be necessary when Mercurial suggests it.
2031 2030 """
2032 2031 if repo.recover():
2033 2032 return hg.verify(repo)
2034 2033 return 1
2035 2034
2036 2035 def remove(ui, repo, *pats, **opts):
2037 2036 """remove the specified files on the next commit
2038 2037
2039 2038 Schedule the indicated files for removal from the repository.
2040 2039
2041 2040 This only removes files from the current branch, not from the
2042 2041 entire project history. If the files still exist in the working
2043 2042 directory, they will be deleted from it. If invoked with --after,
2044 2043 files that have been manually deleted are marked as removed.
2045 2044
2046 2045 This command schedules the files to be removed at the next commit.
2047 2046 To undo a remove before that, see hg revert.
2048 2047
2049 2048 Modified files and added files are not removed by default. To
2050 2049 remove them, use the -f/--force option.
2051 2050 """
2052 2051 names = []
2053 2052 if not opts['after'] and not pats:
2054 2053 raise util.Abort(_('no files specified'))
2055 2054 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2056 2055 exact = dict.fromkeys(files)
2057 2056 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2058 2057 modified, added, removed, deleted, unknown = mardu
2059 2058 remove, forget = [], []
2060 2059 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2061 2060 reason = None
2062 2061 if abs not in deleted and opts['after']:
2063 2062 reason = _('is still present')
2064 2063 elif abs in modified and not opts['force']:
2065 2064 reason = _('is modified (use -f to force removal)')
2066 2065 elif abs in added:
2067 2066 if opts['force']:
2068 2067 forget.append(abs)
2069 2068 continue
2070 2069 reason = _('has been marked for add (use -f to force removal)')
2071 2070 elif abs in unknown:
2072 2071 reason = _('is not managed')
2073 2072 elif abs in removed:
2074 2073 continue
2075 2074 if reason:
2076 2075 if exact:
2077 2076 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2078 2077 else:
2079 2078 if ui.verbose or not exact:
2080 2079 ui.status(_('removing %s\n') % rel)
2081 2080 remove.append(abs)
2082 2081 repo.forget(forget)
2083 2082 repo.remove(remove, unlink=not opts['after'])
2084 2083
2085 2084 def rename(ui, repo, *pats, **opts):
2086 2085 """rename files; equivalent of copy + remove
2087 2086
2088 2087 Mark dest as copies of sources; mark sources for deletion. If
2089 2088 dest is a directory, copies are put in that directory. If dest is
2090 2089 a file, there can only be one source.
2091 2090
2092 2091 By default, this command copies the contents of files as they
2093 2092 stand in the working directory. If invoked with --after, the
2094 2093 operation is recorded, but no copying is performed.
2095 2094
2096 2095 This command takes effect in the next commit. To undo a rename
2097 2096 before that, see hg revert.
2098 2097 """
2099 2098 wlock = repo.wlock(0)
2100 2099 errs, copied = docopy(ui, repo, pats, opts, wlock)
2101 2100 names = []
2102 2101 for abs, rel, exact in copied:
2103 2102 if ui.verbose or not exact:
2104 2103 ui.status(_('removing %s\n') % rel)
2105 2104 names.append(abs)
2106 2105 if not opts.get('dry_run'):
2107 2106 repo.remove(names, True, wlock)
2108 2107 return errs
2109 2108
2110 2109 def revert(ui, repo, *pats, **opts):
2111 2110 """revert files or dirs to their states as of some revision
2112 2111
2113 2112 With no revision specified, revert the named files or directories
2114 2113 to the contents they had in the parent of the working directory.
2115 2114 This restores the contents of the affected files to an unmodified
2116 2115 state and unschedules adds, removes, copies, and renames. If the
2117 2116 working directory has two parents, you must explicitly specify the
2118 2117 revision to revert to.
2119 2118
2120 2119 Modified files are saved with a .orig suffix before reverting.
2121 2120 To disable these backups, use --no-backup.
2122 2121
2123 2122 Using the -r option, revert the given files or directories to their
2124 2123 contents as of a specific revision. This can be helpful to "roll
2125 2124 back" some or all of a change that should not have been committed.
2126 2125
2127 2126 Revert modifies the working directory. It does not commit any
2128 2127 changes, or change the parent of the working directory. If you
2129 2128 revert to a revision other than the parent of the working
2130 2129 directory, the reverted files will thus appear modified
2131 2130 afterwards.
2132 2131
2133 2132 If a file has been deleted, it is recreated. If the executable
2134 2133 mode of a file was changed, it is reset.
2135 2134
2136 2135 If names are given, all files matching the names are reverted.
2137 2136
2138 2137 If no arguments are given, no files are reverted.
2139 2138 """
2140 2139
2141 2140 if opts["date"]:
2142 2141 if opts["rev"]:
2143 2142 raise util.Abort(_("you can't specify a revision and a date"))
2144 2143 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2145 2144
2146 2145 if not pats and not opts['all']:
2147 2146 raise util.Abort(_('no files or directories specified; '
2148 2147 'use --all to revert the whole repo'))
2149 2148
2150 2149 parent, p2 = repo.dirstate.parents()
2151 2150 if not opts['rev'] and p2 != nullid:
2152 2151 raise util.Abort(_('uncommitted merge - please provide a '
2153 2152 'specific revision'))
2154 2153 ctx = repo.changectx(opts['rev'])
2155 2154 node = ctx.node()
2156 2155 mf = ctx.manifest()
2157 2156 if node == parent:
2158 2157 pmf = mf
2159 2158 else:
2160 2159 pmf = None
2161 2160
2162 2161 wlock = repo.wlock()
2163 2162
2164 2163 # need all matching names in dirstate and manifest of target rev,
2165 2164 # so have to walk both. do not print errors if files exist in one
2166 2165 # but not other.
2167 2166
2168 2167 names = {}
2169 2168 target_only = {}
2170 2169
2171 2170 # walk dirstate.
2172 2171
2173 2172 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2174 2173 badmatch=mf.has_key):
2175 2174 names[abs] = (rel, exact)
2176 2175 if src == 'b':
2177 2176 target_only[abs] = True
2178 2177
2179 2178 # walk target manifest.
2180 2179
2181 2180 def badmatch(path):
2182 2181 if path in names:
2183 2182 return True
2184 2183 path_ = path + '/'
2185 2184 for f in names:
2186 2185 if f.startswith(path_):
2187 2186 return True
2188 2187 return False
2189 2188
2190 2189 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2191 2190 badmatch=badmatch):
2192 2191 if abs in names or src == 'b':
2193 2192 continue
2194 2193 names[abs] = (rel, exact)
2195 2194 target_only[abs] = True
2196 2195
2197 2196 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2198 2197 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2199 2198
2200 2199 revert = ([], _('reverting %s\n'))
2201 2200 add = ([], _('adding %s\n'))
2202 2201 remove = ([], _('removing %s\n'))
2203 2202 forget = ([], _('forgetting %s\n'))
2204 2203 undelete = ([], _('undeleting %s\n'))
2205 2204 update = {}
2206 2205
2207 2206 disptable = (
2208 2207 # dispatch table:
2209 2208 # file state
2210 2209 # action if in target manifest
2211 2210 # action if not in target manifest
2212 2211 # make backup if in target manifest
2213 2212 # make backup if not in target manifest
2214 2213 (modified, revert, remove, True, True),
2215 2214 (added, revert, forget, True, False),
2216 2215 (removed, undelete, None, False, False),
2217 2216 (deleted, revert, remove, False, False),
2218 2217 (unknown, add, None, True, False),
2219 2218 (target_only, add, None, False, False),
2220 2219 )
2221 2220
2222 2221 entries = names.items()
2223 2222 entries.sort()
2224 2223
2225 2224 for abs, (rel, exact) in entries:
2226 2225 mfentry = mf.get(abs)
2227 2226 def handle(xlist, dobackup):
2228 2227 xlist[0].append(abs)
2229 2228 update[abs] = 1
2230 2229 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2231 2230 bakname = "%s.orig" % rel
2232 2231 ui.note(_('saving current version of %s as %s\n') %
2233 2232 (rel, bakname))
2234 2233 if not opts.get('dry_run'):
2235 2234 util.copyfile(rel, bakname)
2236 2235 if ui.verbose or not exact:
2237 2236 ui.status(xlist[1] % rel)
2238 2237 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2239 2238 if abs not in table: continue
2240 2239 # file has changed in dirstate
2241 2240 if mfentry:
2242 2241 handle(hitlist, backuphit)
2243 2242 elif misslist is not None:
2244 2243 handle(misslist, backupmiss)
2245 2244 else:
2246 2245 if exact: ui.warn(_('file not managed: %s\n') % rel)
2247 2246 break
2248 2247 else:
2249 2248 # file has not changed in dirstate
2250 2249 if node == parent:
2251 2250 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2252 2251 continue
2253 2252 if pmf is None:
2254 2253 # only need parent manifest in this unlikely case,
2255 2254 # so do not read by default
2256 2255 pmf = repo.changectx(parent).manifest()
2257 2256 if abs in pmf:
2258 2257 if mfentry:
2259 2258 # if version of file is same in parent and target
2260 2259 # manifests, do nothing
2261 2260 if pmf[abs] != mfentry:
2262 2261 handle(revert, False)
2263 2262 else:
2264 2263 handle(remove, False)
2265 2264
2266 2265 if not opts.get('dry_run'):
2267 2266 repo.dirstate.forget(forget[0])
2268 2267 r = hg.revert(repo, node, update.has_key, wlock)
2269 2268 repo.dirstate.update(add[0], 'a')
2270 2269 repo.dirstate.update(undelete[0], 'n')
2271 2270 repo.dirstate.update(remove[0], 'r')
2272 2271 return r
2273 2272
2274 2273 def rollback(ui, repo):
2275 2274 """roll back the last transaction in this repository
2276 2275
2277 2276 Roll back the last transaction in this repository, restoring the
2278 2277 project to its state prior to the transaction.
2279 2278
2280 2279 Transactions are used to encapsulate the effects of all commands
2281 2280 that create new changesets or propagate existing changesets into a
2282 2281 repository. For example, the following commands are transactional,
2283 2282 and their effects can be rolled back:
2284 2283
2285 2284 commit
2286 2285 import
2287 2286 pull
2288 2287 push (with this repository as destination)
2289 2288 unbundle
2290 2289
2291 2290 This command should be used with care. There is only one level of
2292 2291 rollback, and there is no way to undo a rollback.
2293 2292
2294 2293 This command is not intended for use on public repositories. Once
2295 2294 changes are visible for pull by other users, rolling a transaction
2296 2295 back locally is ineffective (someone else may already have pulled
2297 2296 the changes). Furthermore, a race is possible with readers of the
2298 2297 repository; for example an in-progress pull from the repository
2299 2298 may fail if a rollback is performed.
2300 2299 """
2301 2300 repo.rollback()
2302 2301
2303 2302 def root(ui, repo):
2304 2303 """print the root (top) of the current working dir
2305 2304
2306 2305 Print the root directory of the current repository.
2307 2306 """
2308 2307 ui.write(repo.root + "\n")
2309 2308
2310 2309 def serve(ui, repo, **opts):
2311 2310 """export the repository via HTTP
2312 2311
2313 2312 Start a local HTTP repository browser and pull server.
2314 2313
2315 2314 By default, the server logs accesses to stdout and errors to
2316 2315 stderr. Use the "-A" and "-E" options to log to files.
2317 2316 """
2318 2317
2319 2318 if opts["stdio"]:
2320 2319 if repo is None:
2321 2320 raise hg.RepoError(_("There is no Mercurial repository here"
2322 2321 " (.hg not found)"))
2323 2322 s = sshserver.sshserver(ui, repo)
2324 2323 s.serve_forever()
2325 2324
2326 2325 parentui = ui.parentui or ui
2327 2326 optlist = ("name templates style address port ipv6"
2328 2327 " accesslog errorlog webdir_conf")
2329 2328 for o in optlist.split():
2330 2329 if opts[o]:
2331 2330 parentui.setconfig("web", o, str(opts[o]))
2332 2331
2333 2332 if repo is None and not ui.config("web", "webdir_conf"):
2334 2333 raise hg.RepoError(_("There is no Mercurial repository here"
2335 2334 " (.hg not found)"))
2336 2335
2337 2336 if opts['daemon'] and not opts['daemon_pipefds']:
2338 2337 rfd, wfd = os.pipe()
2339 2338 args = sys.argv[:]
2340 2339 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2341 2340 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2342 2341 args[0], args)
2343 2342 os.close(wfd)
2344 2343 os.read(rfd, 1)
2345 2344 os._exit(0)
2346 2345
2347 2346 httpd = hgweb.server.create_server(parentui, repo)
2348 2347
2349 2348 if ui.verbose:
2350 2349 if httpd.port != 80:
2351 2350 ui.status(_('listening at http://%s:%d/\n') %
2352 2351 (httpd.addr, httpd.port))
2353 2352 else:
2354 2353 ui.status(_('listening at http://%s/\n') % httpd.addr)
2355 2354
2356 2355 if opts['pid_file']:
2357 2356 fp = open(opts['pid_file'], 'w')
2358 2357 fp.write(str(os.getpid()) + '\n')
2359 2358 fp.close()
2360 2359
2361 2360 if opts['daemon_pipefds']:
2362 2361 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2363 2362 os.close(rfd)
2364 2363 os.write(wfd, 'y')
2365 2364 os.close(wfd)
2366 2365 sys.stdout.flush()
2367 2366 sys.stderr.flush()
2368 2367 fd = os.open(util.nulldev, os.O_RDWR)
2369 2368 if fd != 0: os.dup2(fd, 0)
2370 2369 if fd != 1: os.dup2(fd, 1)
2371 2370 if fd != 2: os.dup2(fd, 2)
2372 2371 if fd not in (0, 1, 2): os.close(fd)
2373 2372
2374 2373 httpd.serve_forever()
2375 2374
2376 2375 def status(ui, repo, *pats, **opts):
2377 2376 """show changed files in the working directory
2378 2377
2379 2378 Show status of files in the repository. If names are given, only
2380 2379 files that match are shown. Files that are clean or ignored, are
2381 2380 not listed unless -c (clean), -i (ignored) or -A is given.
2382 2381
2383 2382 NOTE: status may appear to disagree with diff if permissions have
2384 2383 changed or a merge has occurred. The standard diff format does not
2385 2384 report permission changes and diff only reports changes relative
2386 2385 to one merge parent.
2387 2386
2388 2387 If one revision is given, it is used as the base revision.
2389 2388 If two revisions are given, the difference between them is shown.
2390 2389
2391 2390 The codes used to show the status of files are:
2392 2391 M = modified
2393 2392 A = added
2394 2393 R = removed
2395 2394 C = clean
2396 2395 ! = deleted, but still tracked
2397 2396 ? = not tracked
2398 2397 I = ignored (not shown by default)
2399 2398 = the previous added file was copied from here
2400 2399 """
2401 2400
2402 2401 all = opts['all']
2403 2402 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2404 2403
2405 2404 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2406 2405 cwd = (pats and repo.getcwd()) or ''
2407 2406 modified, added, removed, deleted, unknown, ignored, clean = [
2408 2407 n for n in repo.status(node1=node1, node2=node2, files=files,
2409 2408 match=matchfn,
2410 2409 list_ignored=all or opts['ignored'],
2411 2410 list_clean=all or opts['clean'])]
2412 2411
2413 2412 changetypes = (('modified', 'M', modified),
2414 2413 ('added', 'A', added),
2415 2414 ('removed', 'R', removed),
2416 2415 ('deleted', '!', deleted),
2417 2416 ('unknown', '?', unknown),
2418 2417 ('ignored', 'I', ignored))
2419 2418
2420 2419 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2421 2420
2422 2421 end = opts['print0'] and '\0' or '\n'
2423 2422
2424 2423 for opt, char, changes in ([ct for ct in explicit_changetypes
2425 2424 if all or opts[ct[0]]]
2426 2425 or changetypes):
2427 2426 if opts['no_status']:
2428 2427 format = "%%s%s" % end
2429 2428 else:
2430 2429 format = "%s %%s%s" % (char, end)
2431 2430
2432 2431 for f in changes:
2433 2432 ui.write(format % util.pathto(repo.root, cwd, f))
2434 2433 if ((all or opts.get('copies')) and not opts.get('no_status')):
2435 2434 copied = repo.dirstate.copied(f)
2436 2435 if copied:
2437 2436 ui.write(' %s%s' % (util.pathto(repo.root, cwd, copied),
2438 2437 end))
2439 2438
2440 2439 def tag(ui, repo, name, rev_=None, **opts):
2441 2440 """add a tag for the current or given revision
2442 2441
2443 2442 Name a particular revision using <name>.
2444 2443
2445 2444 Tags are used to name particular revisions of the repository and are
2446 2445 very useful to compare different revision, to go back to significant
2447 2446 earlier versions or to mark branch points as releases, etc.
2448 2447
2449 2448 If no revision is given, the parent of the working directory is used,
2450 2449 or tip if no revision is checked out.
2451 2450
2452 2451 To facilitate version control, distribution, and merging of tags,
2453 2452 they are stored as a file named ".hgtags" which is managed
2454 2453 similarly to other project files and can be hand-edited if
2455 2454 necessary. The file '.hg/localtags' is used for local tags (not
2456 2455 shared among repositories).
2457 2456 """
2458 2457 if name in ['tip', '.', 'null']:
2459 2458 raise util.Abort(_("the name '%s' is reserved") % name)
2460 2459 if rev_ is not None:
2461 2460 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2462 2461 "please use 'hg tag [-r REV] NAME' instead\n"))
2463 2462 if opts['rev']:
2464 2463 raise util.Abort(_("use only one form to specify the revision"))
2465 2464 if opts['rev'] and opts['remove']:
2466 2465 raise util.Abort(_("--rev and --remove are incompatible"))
2467 2466 if opts['rev']:
2468 2467 rev_ = opts['rev']
2469 2468 message = opts['message']
2470 2469 if opts['remove']:
2471 2470 rev_ = nullid
2472 2471 if not message:
2473 2472 message = _('Removed tag %s') % name
2474 2473 if not rev_ and repo.dirstate.parents()[1] != nullid:
2475 2474 raise util.Abort(_('uncommitted merge - please provide a '
2476 2475 'specific revision'))
2477 2476 r = repo.changectx(rev_).node()
2478 2477
2479 2478 if not message:
2480 2479 message = _('Added tag %s for changeset %s') % (name, short(r))
2481 2480
2482 2481 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2483 2482
2484 2483 def tags(ui, repo):
2485 2484 """list repository tags
2486 2485
2487 2486 List the repository tags.
2488 2487
2489 2488 This lists both regular and local tags.
2490 2489 """
2491 2490
2492 2491 l = repo.tagslist()
2493 2492 l.reverse()
2494 2493 hexfunc = ui.debugflag and hex or short
2495 2494 for t, n in l:
2496 2495 try:
2497 2496 hn = hexfunc(n)
2498 2497 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2499 2498 except revlog.LookupError:
2500 2499 r = " ?:%s" % hn
2501 2500 if ui.quiet:
2502 2501 ui.write("%s\n" % t)
2503 2502 else:
2504 2503 spaces = " " * (30 - util.locallen(t))
2505 2504 ui.write("%s%s %s\n" % (t, spaces, r))
2506 2505
2507 2506 def tip(ui, repo, **opts):
2508 2507 """show the tip revision
2509 2508
2510 2509 Show the tip revision.
2511 2510 """
2512 2511 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2513 2512
2514 2513 def unbundle(ui, repo, fname, **opts):
2515 2514 """apply a changegroup file
2516 2515
2517 2516 Apply a compressed changegroup file generated by the bundle
2518 2517 command.
2519 2518 """
2520 2519 if os.path.exists(fname):
2521 2520 f = open(fname, "rb")
2522 2521 else:
2523 2522 f = urllib.urlopen(fname)
2524 2523 gen = changegroup.readbundle(f, fname)
2525 2524 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2526 2525 return postincoming(ui, repo, modheads, opts['update'])
2527 2526
2528 2527 def update(ui, repo, node=None, clean=False, date=None):
2529 2528 """update working directory
2530 2529
2531 2530 Update the working directory to the specified revision, or the
2532 2531 tip of the current branch if none is specified.
2533 2532
2534 2533 If there are no outstanding changes in the working directory and
2535 2534 there is a linear relationship between the current version and the
2536 2535 requested version, the result is the requested version.
2537 2536
2538 2537 To merge the working directory with another revision, use the
2539 2538 merge command.
2540 2539
2541 2540 By default, update will refuse to run if doing so would require
2542 2541 discarding local changes.
2543 2542 """
2544 2543 if date:
2545 2544 if node:
2546 2545 raise util.Abort(_("you can't specify a revision and a date"))
2547 2546 node = cmdutil.finddate(ui, repo, date)
2548 2547
2549 2548 if clean:
2550 2549 return hg.clean(repo, node)
2551 2550 else:
2552 2551 return hg.update(repo, node)
2553 2552
2554 2553 def verify(ui, repo):
2555 2554 """verify the integrity of the repository
2556 2555
2557 2556 Verify the integrity of the current repository.
2558 2557
2559 2558 This will perform an extensive check of the repository's
2560 2559 integrity, validating the hashes and checksums of each entry in
2561 2560 the changelog, manifest, and tracked files, as well as the
2562 2561 integrity of their crosslinks and indices.
2563 2562 """
2564 2563 return hg.verify(repo)
2565 2564
2566 2565 def version_(ui):
2567 2566 """output version and copyright information"""
2568 2567 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2569 2568 % version.get_version())
2570 2569 ui.status(_(
2571 2570 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
2572 2571 "This is free software; see the source for copying conditions. "
2573 2572 "There is NO\nwarranty; "
2574 2573 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2575 2574 ))
2576 2575
2577 2576 # Command options and aliases are listed here, alphabetically
2578 2577
2579 2578 globalopts = [
2580 2579 ('R', 'repository', '',
2581 2580 _('repository root directory or symbolic path name')),
2582 2581 ('', 'cwd', '', _('change working directory')),
2583 2582 ('y', 'noninteractive', None,
2584 2583 _('do not prompt, assume \'yes\' for any required answers')),
2585 2584 ('q', 'quiet', None, _('suppress output')),
2586 2585 ('v', 'verbose', None, _('enable additional output')),
2587 2586 ('', 'config', [], _('set/override config option')),
2588 2587 ('', 'debug', None, _('enable debugging output')),
2589 2588 ('', 'debugger', None, _('start debugger')),
2590 2589 ('', 'encoding', util._encoding, _('set the charset encoding')),
2591 2590 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2592 2591 ('', 'lsprof', None, _('print improved command execution profile')),
2593 2592 ('', 'traceback', None, _('print traceback on exception')),
2594 2593 ('', 'time', None, _('time how long the command takes')),
2595 2594 ('', 'profile', None, _('print command execution profile')),
2596 2595 ('', 'version', None, _('output version information and exit')),
2597 2596 ('h', 'help', None, _('display help and exit')),
2598 2597 ]
2599 2598
2600 2599 dryrunopts = [('n', 'dry-run', None,
2601 2600 _('do not perform actions, just print output'))]
2602 2601
2603 2602 remoteopts = [
2604 2603 ('e', 'ssh', '', _('specify ssh command to use')),
2605 2604 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2606 2605 ]
2607 2606
2608 2607 walkopts = [
2609 2608 ('I', 'include', [], _('include names matching the given patterns')),
2610 2609 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2611 2610 ]
2612 2611
2613 2612 commitopts = [
2614 2613 ('m', 'message', '', _('use <text> as commit message')),
2615 2614 ('l', 'logfile', '', _('read commit message from <file>')),
2616 2615 ]
2617 2616
2618 2617 table = {
2619 2618 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2620 2619 "addremove":
2621 2620 (addremove,
2622 2621 [('s', 'similarity', '',
2623 2622 _('guess renamed files by similarity (0<=s<=100)')),
2624 2623 ] + walkopts + dryrunopts,
2625 2624 _('hg addremove [OPTION]... [FILE]...')),
2626 2625 "^annotate":
2627 2626 (annotate,
2628 2627 [('r', 'rev', '', _('annotate the specified revision')),
2629 2628 ('f', 'follow', None, _('follow file copies and renames')),
2630 2629 ('a', 'text', None, _('treat all files as text')),
2631 2630 ('u', 'user', None, _('list the author')),
2632 2631 ('d', 'date', None, _('list the date')),
2633 2632 ('n', 'number', None, _('list the revision number (default)')),
2634 2633 ('c', 'changeset', None, _('list the changeset')),
2635 2634 ] + walkopts,
2636 2635 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] FILE...')),
2637 2636 "archive":
2638 2637 (archive,
2639 2638 [('', 'no-decode', None, _('do not pass files through decoders')),
2640 2639 ('p', 'prefix', '', _('directory prefix for files in archive')),
2641 2640 ('r', 'rev', '', _('revision to distribute')),
2642 2641 ('t', 'type', '', _('type of distribution to create')),
2643 2642 ] + walkopts,
2644 2643 _('hg archive [OPTION]... DEST')),
2645 2644 "backout":
2646 2645 (backout,
2647 2646 [('', 'merge', None,
2648 2647 _('merge with old dirstate parent after backout')),
2649 2648 ('d', 'date', '', _('record datecode as commit date')),
2650 2649 ('', 'parent', '', _('parent to choose when backing out merge')),
2651 2650 ('u', 'user', '', _('record user as committer')),
2652 2651 ] + walkopts + commitopts,
2653 2652 _('hg backout [OPTION]... REV')),
2654 2653 "branch": (branch,
2655 2654 [('f', 'force', None,
2656 2655 _('set branch name even if it shadows an existing branch'))],
2657 2656 _('hg branch [NAME]')),
2658 2657 "branches": (branches, [], _('hg branches')),
2659 2658 "bundle":
2660 2659 (bundle,
2661 2660 [('f', 'force', None,
2662 2661 _('run even when remote repository is unrelated')),
2663 2662 ('r', 'rev', [],
2664 2663 _('a changeset you would like to bundle')),
2665 2664 ('', 'base', [],
2666 2665 _('a base changeset to specify instead of a destination')),
2667 2666 ] + remoteopts,
2668 2667 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2669 2668 "cat":
2670 2669 (cat,
2671 2670 [('o', 'output', '', _('print output to file with formatted name')),
2672 2671 ('r', 'rev', '', _('print the given revision')),
2673 2672 ] + walkopts,
2674 2673 _('hg cat [OPTION]... FILE...')),
2675 2674 "^clone":
2676 2675 (clone,
2677 2676 [('U', 'noupdate', None, _('do not update the new working directory')),
2678 2677 ('r', 'rev', [],
2679 2678 _('a changeset you would like to have after cloning')),
2680 2679 ('', 'pull', None, _('use pull protocol to copy metadata')),
2681 2680 ('', 'uncompressed', None,
2682 2681 _('use uncompressed transfer (fast over LAN)')),
2683 2682 ] + remoteopts,
2684 2683 _('hg clone [OPTION]... SOURCE [DEST]')),
2685 2684 "^commit|ci":
2686 2685 (commit,
2687 2686 [('A', 'addremove', None,
2688 2687 _('mark new/missing files as added/removed before committing')),
2689 2688 ('d', 'date', '', _('record datecode as commit date')),
2690 2689 ('u', 'user', '', _('record user as commiter')),
2691 2690 ] + walkopts + commitopts,
2692 2691 _('hg commit [OPTION]... [FILE]...')),
2693 2692 "copy|cp":
2694 2693 (copy,
2695 2694 [('A', 'after', None, _('record a copy that has already occurred')),
2696 2695 ('f', 'force', None,
2697 2696 _('forcibly copy over an existing managed file')),
2698 2697 ] + walkopts + dryrunopts,
2699 2698 _('hg copy [OPTION]... [SOURCE]... DEST')),
2700 2699 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2701 2700 "debugcomplete":
2702 2701 (debugcomplete,
2703 2702 [('o', 'options', None, _('show the command options'))],
2704 2703 _('debugcomplete [-o] CMD')),
2705 2704 "debuginstall": (debuginstall, [], _('debuginstall')),
2706 2705 "debugrebuildstate":
2707 2706 (debugrebuildstate,
2708 2707 [('r', 'rev', '', _('revision to rebuild to'))],
2709 2708 _('debugrebuildstate [-r REV] [REV]')),
2710 2709 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2711 2710 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2712 2711 "debugstate": (debugstate, [], _('debugstate')),
2713 2712 "debugdate":
2714 2713 (debugdate,
2715 2714 [('e', 'extended', None, _('try extended date formats'))],
2716 2715 _('debugdate [-e] DATE [RANGE]')),
2717 2716 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2718 2717 "debugindex": (debugindex, [], _('debugindex FILE')),
2719 2718 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2720 2719 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2721 2720 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2722 2721 "^diff":
2723 2722 (diff,
2724 2723 [('r', 'rev', [], _('revision')),
2725 2724 ('a', 'text', None, _('treat all files as text')),
2726 2725 ('p', 'show-function', None,
2727 2726 _('show which function each change is in')),
2728 2727 ('g', 'git', None, _('use git extended diff format')),
2729 2728 ('', 'nodates', None, _("don't include dates in diff headers")),
2730 2729 ('w', 'ignore-all-space', None,
2731 2730 _('ignore white space when comparing lines')),
2732 2731 ('b', 'ignore-space-change', None,
2733 2732 _('ignore changes in the amount of white space')),
2734 2733 ('B', 'ignore-blank-lines', None,
2735 2734 _('ignore changes whose lines are all blank')),
2736 2735 ] + walkopts,
2737 2736 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2738 2737 "^export":
2739 2738 (export,
2740 2739 [('o', 'output', '', _('print output to file with formatted name')),
2741 2740 ('a', 'text', None, _('treat all files as text')),
2742 2741 ('g', 'git', None, _('use git extended diff format')),
2743 2742 ('', 'nodates', None, _("don't include dates in diff headers")),
2744 2743 ('', 'switch-parent', None, _('diff against the second parent'))],
2745 2744 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2746 2745 "grep":
2747 2746 (grep,
2748 2747 [('0', 'print0', None, _('end fields with NUL')),
2749 2748 ('', 'all', None, _('print all revisions that match')),
2750 2749 ('f', 'follow', None,
2751 2750 _('follow changeset history, or file history across copies and renames')),
2752 2751 ('i', 'ignore-case', None, _('ignore case when matching')),
2753 2752 ('l', 'files-with-matches', None,
2754 2753 _('print only filenames and revs that match')),
2755 2754 ('n', 'line-number', None, _('print matching line numbers')),
2756 2755 ('r', 'rev', [], _('search in given revision range')),
2757 2756 ('u', 'user', None, _('print user who committed change')),
2758 2757 ] + walkopts,
2759 2758 _('hg grep [OPTION]... PATTERN [FILE]...')),
2760 2759 "heads":
2761 2760 (heads,
2762 2761 [('', 'style', '', _('display using template map file')),
2763 2762 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2764 2763 ('', 'template', '', _('display with template'))],
2765 2764 _('hg heads [-r REV]')),
2766 2765 "help": (help_, [], _('hg help [COMMAND]')),
2767 2766 "identify|id": (identify, [], _('hg identify')),
2768 2767 "import|patch":
2769 2768 (import_,
2770 2769 [('p', 'strip', 1,
2771 2770 _('directory strip option for patch. This has the same\n'
2772 2771 'meaning as the corresponding patch option')),
2773 2772 ('b', 'base', '', _('base path')),
2774 2773 ('f', 'force', None,
2775 2774 _('skip check for outstanding uncommitted changes'))] + commitopts,
2776 2775 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2777 2776 "incoming|in": (incoming,
2778 2777 [('M', 'no-merges', None, _('do not show merges')),
2779 2778 ('f', 'force', None,
2780 2779 _('run even when remote repository is unrelated')),
2781 2780 ('', 'style', '', _('display using template map file')),
2782 2781 ('n', 'newest-first', None, _('show newest record first')),
2783 2782 ('', 'bundle', '', _('file to store the bundles into')),
2784 2783 ('p', 'patch', None, _('show patch')),
2785 2784 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2786 2785 ('', 'template', '', _('display with template')),
2787 2786 ] + remoteopts,
2788 2787 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2789 2788 ' [--bundle FILENAME] [SOURCE]')),
2790 2789 "^init":
2791 2790 (init,
2792 2791 remoteopts,
2793 2792 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2794 2793 "locate":
2795 2794 (locate,
2796 2795 [('r', 'rev', '', _('search the repository as it stood at rev')),
2797 2796 ('0', 'print0', None,
2798 2797 _('end filenames with NUL, for use with xargs')),
2799 2798 ('f', 'fullpath', None,
2800 2799 _('print complete paths from the filesystem root')),
2801 2800 ] + walkopts,
2802 2801 _('hg locate [OPTION]... [PATTERN]...')),
2803 2802 "^log|history":
2804 2803 (log,
2805 2804 [('f', 'follow', None,
2806 2805 _('follow changeset history, or file history across copies and renames')),
2807 2806 ('', 'follow-first', None,
2808 2807 _('only follow the first parent of merge changesets')),
2809 2808 ('d', 'date', '', _('show revs matching date spec')),
2810 2809 ('C', 'copies', None, _('show copied files')),
2811 2810 ('k', 'keyword', [], _('search for a keyword')),
2812 2811 ('l', 'limit', '', _('limit number of changes displayed')),
2813 2812 ('r', 'rev', [], _('show the specified revision or range')),
2814 2813 ('', 'removed', None, _('include revs where files were removed')),
2815 2814 ('M', 'no-merges', None, _('do not show merges')),
2816 2815 ('', 'style', '', _('display using template map file')),
2817 2816 ('m', 'only-merges', None, _('show only merges')),
2818 2817 ('p', 'patch', None, _('show patch')),
2819 2818 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2820 2819 ('', 'template', '', _('display with template')),
2821 2820 ] + walkopts,
2822 2821 _('hg log [OPTION]... [FILE]')),
2823 2822 "manifest": (manifest, [], _('hg manifest [REV]')),
2824 2823 "^merge":
2825 2824 (merge,
2826 2825 [('f', 'force', None, _('force a merge with outstanding changes'))],
2827 2826 _('hg merge [-f] [REV]')),
2828 2827 "outgoing|out": (outgoing,
2829 2828 [('M', 'no-merges', None, _('do not show merges')),
2830 2829 ('f', 'force', None,
2831 2830 _('run even when remote repository is unrelated')),
2832 2831 ('p', 'patch', None, _('show patch')),
2833 2832 ('', 'style', '', _('display using template map file')),
2834 2833 ('r', 'rev', [], _('a specific revision you would like to push')),
2835 2834 ('n', 'newest-first', None, _('show newest record first')),
2836 2835 ('', 'template', '', _('display with template')),
2837 2836 ] + remoteopts,
2838 2837 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
2839 2838 "^parents":
2840 2839 (parents,
2841 2840 [('r', 'rev', '', _('show parents from the specified rev')),
2842 2841 ('', 'style', '', _('display using template map file')),
2843 2842 ('', 'template', '', _('display with template'))],
2844 2843 _('hg parents [-r REV] [FILE]')),
2845 2844 "paths": (paths, [], _('hg paths [NAME]')),
2846 2845 "^pull":
2847 2846 (pull,
2848 2847 [('u', 'update', None,
2849 2848 _('update to new tip if changesets were pulled')),
2850 2849 ('f', 'force', None,
2851 2850 _('run even when remote repository is unrelated')),
2852 2851 ('r', 'rev', [],
2853 2852 _('a specific revision up to which you would like to pull')),
2854 2853 ] + remoteopts,
2855 2854 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
2856 2855 "^push":
2857 2856 (push,
2858 2857 [('f', 'force', None, _('force push')),
2859 2858 ('r', 'rev', [], _('a specific revision you would like to push')),
2860 2859 ] + remoteopts,
2861 2860 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
2862 2861 "debugrawcommit|rawcommit":
2863 2862 (rawcommit,
2864 2863 [('p', 'parent', [], _('parent')),
2865 2864 ('d', 'date', '', _('date code')),
2866 2865 ('u', 'user', '', _('user')),
2867 2866 ('F', 'files', '', _('file list'))
2868 2867 ] + commitopts,
2869 2868 _('hg debugrawcommit [OPTION]... [FILE]...')),
2870 2869 "recover": (recover, [], _('hg recover')),
2871 2870 "^remove|rm":
2872 2871 (remove,
2873 2872 [('A', 'after', None, _('record remove that has already occurred')),
2874 2873 ('f', 'force', None, _('remove file even if modified')),
2875 2874 ] + walkopts,
2876 2875 _('hg remove [OPTION]... FILE...')),
2877 2876 "rename|mv":
2878 2877 (rename,
2879 2878 [('A', 'after', None, _('record a rename that has already occurred')),
2880 2879 ('f', 'force', None,
2881 2880 _('forcibly copy over an existing managed file')),
2882 2881 ] + walkopts + dryrunopts,
2883 2882 _('hg rename [OPTION]... SOURCE... DEST')),
2884 2883 "^revert":
2885 2884 (revert,
2886 2885 [('a', 'all', None, _('revert all changes when no arguments given')),
2887 2886 ('d', 'date', '', _('tipmost revision matching date')),
2888 2887 ('r', 'rev', '', _('revision to revert to')),
2889 2888 ('', 'no-backup', None, _('do not save backup copies of files')),
2890 2889 ] + walkopts + dryrunopts,
2891 2890 _('hg revert [OPTION]... [-r REV] [NAME]...')),
2892 2891 "rollback": (rollback, [], _('hg rollback')),
2893 2892 "root": (root, [], _('hg root')),
2894 2893 "showconfig|debugconfig":
2895 2894 (showconfig,
2896 2895 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2897 2896 _('showconfig [-u] [NAME]...')),
2898 2897 "^serve":
2899 2898 (serve,
2900 2899 [('A', 'accesslog', '', _('name of access log file to write to')),
2901 2900 ('d', 'daemon', None, _('run server in background')),
2902 2901 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2903 2902 ('E', 'errorlog', '', _('name of error log file to write to')),
2904 2903 ('p', 'port', 0, _('port to use (default: 8000)')),
2905 2904 ('a', 'address', '', _('address to use')),
2906 2905 ('n', 'name', '',
2907 2906 _('name to show in web pages (default: working dir)')),
2908 2907 ('', 'webdir-conf', '', _('name of the webdir config file'
2909 2908 ' (serve more than one repo)')),
2910 2909 ('', 'pid-file', '', _('name of file to write process ID to')),
2911 2910 ('', 'stdio', None, _('for remote clients')),
2912 2911 ('t', 'templates', '', _('web templates to use')),
2913 2912 ('', 'style', '', _('template style to use')),
2914 2913 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2915 2914 _('hg serve [OPTION]...')),
2916 2915 "^status|st":
2917 2916 (status,
2918 2917 [('A', 'all', None, _('show status of all files')),
2919 2918 ('m', 'modified', None, _('show only modified files')),
2920 2919 ('a', 'added', None, _('show only added files')),
2921 2920 ('r', 'removed', None, _('show only removed files')),
2922 2921 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2923 2922 ('c', 'clean', None, _('show only files without changes')),
2924 2923 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2925 2924 ('i', 'ignored', None, _('show only ignored files')),
2926 2925 ('n', 'no-status', None, _('hide status prefix')),
2927 2926 ('C', 'copies', None, _('show source of copied files')),
2928 2927 ('0', 'print0', None,
2929 2928 _('end filenames with NUL, for use with xargs')),
2930 2929 ('', 'rev', [], _('show difference from revision')),
2931 2930 ] + walkopts,
2932 2931 _('hg status [OPTION]... [FILE]...')),
2933 2932 "tag":
2934 2933 (tag,
2935 2934 [('l', 'local', None, _('make the tag local')),
2936 2935 ('m', 'message', '', _('message for tag commit log entry')),
2937 2936 ('d', 'date', '', _('record datecode as commit date')),
2938 2937 ('u', 'user', '', _('record user as commiter')),
2939 2938 ('r', 'rev', '', _('revision to tag')),
2940 2939 ('', 'remove', None, _('remove a tag'))],
2941 2940 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2942 2941 "tags": (tags, [], _('hg tags')),
2943 2942 "tip":
2944 2943 (tip,
2945 2944 [('', 'style', '', _('display using template map file')),
2946 2945 ('p', 'patch', None, _('show patch')),
2947 2946 ('', 'template', '', _('display with template'))],
2948 2947 _('hg tip [-p]')),
2949 2948 "unbundle":
2950 2949 (unbundle,
2951 2950 [('u', 'update', None,
2952 2951 _('update to new tip if changesets were unbundled'))],
2953 2952 _('hg unbundle [-u] FILE')),
2954 2953 "^update|up|checkout|co":
2955 2954 (update,
2956 2955 [('C', 'clean', None, _('overwrite locally modified files')),
2957 2956 ('d', 'date', '', _('tipmost revision matching date'))],
2958 2957 _('hg update [-C] [-d DATE] [REV]')),
2959 2958 "verify": (verify, [], _('hg verify')),
2960 2959 "version": (version_, [], _('hg version')),
2961 2960 }
2962 2961
2963 2962 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2964 2963 " debugindex debugindexdot debugdate debuginstall")
2965 2964 optionalrepo = ("paths serve showconfig")
2966 2965
2967 2966 def findpossible(ui, cmd):
2968 2967 """
2969 2968 Return cmd -> (aliases, command table entry)
2970 2969 for each matching command.
2971 2970 Return debug commands (or their aliases) only if no normal command matches.
2972 2971 """
2973 2972 choice = {}
2974 2973 debugchoice = {}
2975 2974 for e in table.keys():
2976 2975 aliases = e.lstrip("^").split("|")
2977 2976 found = None
2978 2977 if cmd in aliases:
2979 2978 found = cmd
2980 2979 elif not ui.config("ui", "strict"):
2981 2980 for a in aliases:
2982 2981 if a.startswith(cmd):
2983 2982 found = a
2984 2983 break
2985 2984 if found is not None:
2986 2985 if aliases[0].startswith("debug") or found.startswith("debug"):
2987 2986 debugchoice[found] = (aliases, table[e])
2988 2987 else:
2989 2988 choice[found] = (aliases, table[e])
2990 2989
2991 2990 if not choice and debugchoice:
2992 2991 choice = debugchoice
2993 2992
2994 2993 return choice
2995 2994
2996 2995 def findcmd(ui, cmd):
2997 2996 """Return (aliases, command table entry) for command string."""
2998 2997 choice = findpossible(ui, cmd)
2999 2998
3000 2999 if choice.has_key(cmd):
3001 3000 return choice[cmd]
3002 3001
3003 3002 if len(choice) > 1:
3004 3003 clist = choice.keys()
3005 3004 clist.sort()
3006 3005 raise AmbiguousCommand(cmd, clist)
3007 3006
3008 3007 if choice:
3009 3008 return choice.values()[0]
3010 3009
3011 3010 raise UnknownCommand(cmd)
3012 3011
3013 3012 def catchterm(*args):
3014 3013 raise util.SignalInterrupt
3015 3014
3016 3015 def run():
3017 3016 sys.exit(dispatch(sys.argv[1:]))
3018 3017
3019 3018 class ParseError(Exception):
3020 3019 """Exception raised on errors in parsing the command line."""
3021 3020
3022 3021 def parse(ui, args):
3023 3022 options = {}
3024 3023 cmdoptions = {}
3025 3024
3026 3025 try:
3027 3026 args = fancyopts.fancyopts(args, globalopts, options)
3028 3027 except fancyopts.getopt.GetoptError, inst:
3029 3028 raise ParseError(None, inst)
3030 3029
3031 3030 if args:
3032 3031 cmd, args = args[0], args[1:]
3033 3032 aliases, i = findcmd(ui, cmd)
3034 3033 cmd = aliases[0]
3035 3034 defaults = ui.config("defaults", cmd)
3036 3035 if defaults:
3037 3036 args = shlex.split(defaults) + args
3038 3037 c = list(i[1])
3039 3038 else:
3040 3039 cmd = None
3041 3040 c = []
3042 3041
3043 3042 # combine global options into local
3044 3043 for o in globalopts:
3045 3044 c.append((o[0], o[1], options[o[1]], o[3]))
3046 3045
3047 3046 try:
3048 3047 args = fancyopts.fancyopts(args, c, cmdoptions)
3049 3048 except fancyopts.getopt.GetoptError, inst:
3050 3049 raise ParseError(cmd, inst)
3051 3050
3052 3051 # separate global options back out
3053 3052 for o in globalopts:
3054 3053 n = o[1]
3055 3054 options[n] = cmdoptions[n]
3056 3055 del cmdoptions[n]
3057 3056
3058 3057 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3059 3058
3060 3059 external = {}
3061 3060
3062 3061 def findext(name):
3063 3062 '''return module with given extension name'''
3064 3063 try:
3065 3064 return sys.modules[external[name]]
3066 3065 except KeyError:
3067 3066 for k, v in external.iteritems():
3068 3067 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3069 3068 return sys.modules[v]
3070 3069 raise KeyError(name)
3071 3070
3072 3071 def load_extensions(ui):
3073 3072 added = []
3074 3073 for ext_name, load_from_name in ui.extensions():
3075 3074 if ext_name in external:
3076 3075 continue
3077 3076 try:
3078 3077 if load_from_name:
3079 3078 # the module will be loaded in sys.modules
3080 3079 # choose an unique name so that it doesn't
3081 3080 # conflicts with other modules
3082 3081 module_name = "hgext_%s" % ext_name.replace('.', '_')
3083 3082 mod = imp.load_source(module_name, load_from_name)
3084 3083 else:
3085 3084 def importh(name):
3086 3085 mod = __import__(name)
3087 3086 components = name.split('.')
3088 3087 for comp in components[1:]:
3089 3088 mod = getattr(mod, comp)
3090 3089 return mod
3091 3090 try:
3092 3091 mod = importh("hgext.%s" % ext_name)
3093 3092 except ImportError:
3094 3093 mod = importh(ext_name)
3095 3094 external[ext_name] = mod.__name__
3096 3095 added.append((mod, ext_name))
3097 3096 except (util.SignalInterrupt, KeyboardInterrupt):
3098 3097 raise
3099 3098 except Exception, inst:
3100 3099 ui.warn(_("*** failed to import extension %s: %s\n") %
3101 3100 (ext_name, inst))
3102 3101 if ui.print_exc():
3103 3102 return 1
3104 3103
3105 3104 for mod, name in added:
3106 3105 uisetup = getattr(mod, 'uisetup', None)
3107 3106 if uisetup:
3108 3107 uisetup(ui)
3109 3108 reposetup = getattr(mod, 'reposetup', None)
3110 3109 if reposetup:
3111 3110 hg.repo_setup_hooks.append(reposetup)
3112 3111 cmdtable = getattr(mod, 'cmdtable', {})
3113 3112 overrides = [cmd for cmd in cmdtable if cmd in table]
3114 3113 if overrides:
3115 3114 ui.warn(_("extension '%s' overrides commands: %s\n")
3116 3115 % (name, " ".join(overrides)))
3117 3116 table.update(cmdtable)
3118 3117
3119 3118 def parseconfig(config):
3120 3119 """parse the --config options from the command line"""
3121 3120 parsed = []
3122 3121 for cfg in config:
3123 3122 try:
3124 3123 name, value = cfg.split('=', 1)
3125 3124 section, name = name.split('.', 1)
3126 3125 if not section or not name:
3127 3126 raise IndexError
3128 3127 parsed.append((section, name, value))
3129 3128 except (IndexError, ValueError):
3130 3129 raise util.Abort(_('malformed --config option: %s') % cfg)
3131 3130 return parsed
3132 3131
3133 3132 def dispatch(args):
3134 3133 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3135 3134 num = getattr(signal, name, None)
3136 3135 if num: signal.signal(num, catchterm)
3137 3136
3138 3137 try:
3139 3138 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3140 3139 except util.Abort, inst:
3141 3140 sys.stderr.write(_("abort: %s\n") % inst)
3142 3141 return -1
3143 3142
3144 3143 load_extensions(u)
3145 3144 u.addreadhook(load_extensions)
3146 3145
3147 3146 try:
3148 3147 cmd, func, args, options, cmdoptions = parse(u, args)
3149 3148 if options["encoding"]:
3150 3149 util._encoding = options["encoding"]
3151 3150 if options["encodingmode"]:
3152 3151 util._encodingmode = options["encodingmode"]
3153 3152 if options["time"]:
3154 3153 def get_times():
3155 3154 t = os.times()
3156 3155 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3157 3156 t = (t[0], t[1], t[2], t[3], time.clock())
3158 3157 return t
3159 3158 s = get_times()
3160 3159 def print_time():
3161 3160 t = get_times()
3162 3161 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3163 3162 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3164 3163 atexit.register(print_time)
3165 3164
3166 3165 # enter the debugger before command execution
3167 3166 if options['debugger']:
3168 3167 pdb.set_trace()
3169 3168
3170 3169 try:
3171 3170 if options['cwd']:
3172 3171 os.chdir(options['cwd'])
3173 3172
3174 3173 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3175 3174 not options["noninteractive"], options["traceback"],
3176 3175 parseconfig(options["config"]))
3177 3176
3178 3177 path = u.expandpath(options["repository"]) or ""
3179 3178 repo = path and hg.repository(u, path=path) or None
3180 3179 if repo and not repo.local():
3181 3180 raise util.Abort(_("repository '%s' is not local") % path)
3182 3181
3183 3182 if options['help']:
3184 3183 return help_(u, cmd, options['version'])
3185 3184 elif options['version']:
3186 3185 return version_(u)
3187 3186 elif not cmd:
3188 3187 return help_(u, 'shortlist')
3189 3188
3190 3189 if cmd not in norepo.split():
3191 3190 try:
3192 3191 if not repo:
3193 3192 repo = hg.repository(u, path=path)
3194 3193 u = repo.ui
3195 3194 except hg.RepoError:
3196 3195 if cmd not in optionalrepo.split():
3197 3196 raise
3198 3197 d = lambda: func(u, repo, *args, **cmdoptions)
3199 3198 else:
3200 3199 d = lambda: func(u, *args, **cmdoptions)
3201 3200
3202 3201 try:
3203 3202 if options['profile']:
3204 3203 import hotshot, hotshot.stats
3205 3204 prof = hotshot.Profile("hg.prof")
3206 3205 try:
3207 3206 try:
3208 3207 return prof.runcall(d)
3209 3208 except:
3210 3209 try:
3211 3210 u.warn(_('exception raised - generating '
3212 3211 'profile anyway\n'))
3213 3212 except:
3214 3213 pass
3215 3214 raise
3216 3215 finally:
3217 3216 prof.close()
3218 3217 stats = hotshot.stats.load("hg.prof")
3219 3218 stats.strip_dirs()
3220 3219 stats.sort_stats('time', 'calls')
3221 3220 stats.print_stats(40)
3222 3221 elif options['lsprof']:
3223 3222 try:
3224 3223 from mercurial import lsprof
3225 3224 except ImportError:
3226 3225 raise util.Abort(_(
3227 3226 'lsprof not available - install from '
3228 3227 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3229 3228 p = lsprof.Profiler()
3230 3229 p.enable(subcalls=True)
3231 3230 try:
3232 3231 return d()
3233 3232 finally:
3234 3233 p.disable()
3235 3234 stats = lsprof.Stats(p.getstats())
3236 3235 stats.sort()
3237 3236 stats.pprint(top=10, file=sys.stderr, climit=5)
3238 3237 else:
3239 3238 return d()
3240 3239 finally:
3241 3240 u.flush()
3242 3241 except:
3243 3242 # enter the debugger when we hit an exception
3244 3243 if options['debugger']:
3245 3244 pdb.post_mortem(sys.exc_info()[2])
3246 3245 u.print_exc()
3247 3246 raise
3248 3247 except ParseError, inst:
3249 3248 if inst.args[0]:
3250 3249 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3251 3250 help_(u, inst.args[0])
3252 3251 else:
3253 3252 u.warn(_("hg: %s\n") % inst.args[1])
3254 3253 help_(u, 'shortlist')
3255 3254 except AmbiguousCommand, inst:
3256 3255 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3257 3256 (inst.args[0], " ".join(inst.args[1])))
3258 3257 except UnknownCommand, inst:
3259 3258 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3260 3259 help_(u, 'shortlist')
3261 3260 except hg.RepoError, inst:
3262 3261 u.warn(_("abort: %s!\n") % inst)
3263 3262 except lock.LockHeld, inst:
3264 3263 if inst.errno == errno.ETIMEDOUT:
3265 3264 reason = _('timed out waiting for lock held by %s') % inst.locker
3266 3265 else:
3267 3266 reason = _('lock held by %s') % inst.locker
3268 3267 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3269 3268 except lock.LockUnavailable, inst:
3270 3269 u.warn(_("abort: could not lock %s: %s\n") %
3271 3270 (inst.desc or inst.filename, inst.strerror))
3272 3271 except revlog.RevlogError, inst:
3273 3272 u.warn(_("abort: %s!\n") % inst)
3274 3273 except util.SignalInterrupt:
3275 3274 u.warn(_("killed!\n"))
3276 3275 except KeyboardInterrupt:
3277 3276 try:
3278 3277 u.warn(_("interrupted!\n"))
3279 3278 except IOError, inst:
3280 3279 if inst.errno == errno.EPIPE:
3281 3280 if u.debugflag:
3282 3281 u.warn(_("\nbroken pipe\n"))
3283 3282 else:
3284 3283 raise
3285 3284 except socket.error, inst:
3286 3285 u.warn(_("abort: %s\n") % inst[1])
3287 3286 except IOError, inst:
3288 3287 if hasattr(inst, "code"):
3289 3288 u.warn(_("abort: %s\n") % inst)
3290 3289 elif hasattr(inst, "reason"):
3291 3290 try: # usually it is in the form (errno, strerror)
3292 3291 reason = inst.reason.args[1]
3293 3292 except: # it might be anything, for example a string
3294 3293 reason = inst.reason
3295 3294 u.warn(_("abort: error: %s\n") % reason)
3296 3295 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3297 3296 if u.debugflag:
3298 3297 u.warn(_("broken pipe\n"))
3299 3298 elif getattr(inst, "strerror", None):
3300 3299 if getattr(inst, "filename", None):
3301 3300 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3302 3301 else:
3303 3302 u.warn(_("abort: %s\n") % inst.strerror)
3304 3303 else:
3305 3304 raise
3306 3305 except OSError, inst:
3307 3306 if getattr(inst, "filename", None):
3308 3307 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3309 3308 else:
3310 3309 u.warn(_("abort: %s\n") % inst.strerror)
3311 3310 except util.UnexpectedOutput, inst:
3312 3311 u.warn(_("abort: %s") % inst[0])
3313 3312 if not isinstance(inst[1], basestring):
3314 3313 u.warn(" %r\n" % (inst[1],))
3315 3314 elif not inst[1]:
3316 3315 u.warn(_(" empty string\n"))
3317 3316 else:
3318 3317 u.warn("\n%r\n" % util.ellipsis(inst[1]))
3319 3318 except util.Abort, inst:
3320 3319 u.warn(_("abort: %s\n") % inst)
3321 3320 except TypeError, inst:
3322 3321 # was this an argument error?
3323 3322 tb = traceback.extract_tb(sys.exc_info()[2])
3324 3323 if len(tb) > 2: # no
3325 3324 raise
3326 3325 u.debug(inst, "\n")
3327 3326 u.warn(_("%s: invalid arguments\n") % cmd)
3328 3327 help_(u, cmd)
3329 3328 except SystemExit, inst:
3330 3329 # Commands shouldn't sys.exit directly, but give a return code.
3331 3330 # Just in case catch this and and pass exit code to caller.
3332 3331 return inst.code
3333 3332 except:
3334 3333 u.warn(_("** unknown exception encountered, details follow\n"))
3335 3334 u.warn(_("** report bug details to "
3336 3335 "http://www.selenic.com/mercurial/bts\n"))
3337 3336 u.warn(_("** or mercurial@selenic.com\n"))
3338 3337 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3339 3338 % version.get_version())
3340 3339 raise
3341 3340
3342 3341 return -1
@@ -1,84 +1,83
1 1 # filelog.py - file history class for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from revlog import *
9 9 import os
10 10
11 11 class filelog(revlog):
12 def __init__(self, opener, path, defversion=REVLOG_DEFAULT_VERSION):
12 def __init__(self, opener, path):
13 13 revlog.__init__(self, opener,
14 "/".join(("data", self.encodedir(path + ".i"))),
15 defversion)
14 "/".join(("data", self.encodedir(path + ".i"))))
16 15
17 16 # This avoids a collision between a file named foo and a dir named
18 17 # foo.i or foo.d
19 18 def encodedir(self, path):
20 19 return (path
21 20 .replace(".hg/", ".hg.hg/")
22 21 .replace(".i/", ".i.hg/")
23 22 .replace(".d/", ".d.hg/"))
24 23
25 24 def decodedir(self, path):
26 25 return (path
27 26 .replace(".d.hg/", ".d/")
28 27 .replace(".i.hg/", ".i/")
29 28 .replace(".hg.hg/", ".hg/"))
30 29
31 30 def read(self, node):
32 31 t = self.revision(node)
33 32 if not t.startswith('\1\n'):
34 33 return t
35 34 s = t.index('\1\n', 2)
36 35 return t[s+2:]
37 36
38 37 def _readmeta(self, node):
39 38 t = self.revision(node)
40 39 if not t.startswith('\1\n'):
41 40 return {}
42 41 s = t.index('\1\n', 2)
43 42 mt = t[2:s]
44 43 m = {}
45 44 for l in mt.splitlines():
46 45 k, v = l.split(": ", 1)
47 46 m[k] = v
48 47 return m
49 48
50 49 def add(self, text, meta, transaction, link, p1=None, p2=None):
51 50 if meta or text.startswith('\1\n'):
52 51 mt = ""
53 52 if meta:
54 53 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
55 54 text = "\1\n%s\1\n%s" % ("".join(mt), text)
56 55 return self.addrevision(text, transaction, link, p1, p2)
57 56
58 57 def renamed(self, node):
59 58 if self.parents(node)[0] != nullid:
60 59 return False
61 60 m = self._readmeta(node)
62 61 if m and m.has_key("copy"):
63 62 return (m["copy"], bin(m["copyrev"]))
64 63 return False
65 64
66 65 def size(self, rev):
67 66 """return the size of a given revision"""
68 67
69 68 # for revisions with renames, we have to go the slow way
70 69 node = self.node(rev)
71 70 if self.renamed(node):
72 71 return len(self.read(node))
73 72
74 73 return revlog.size(self, rev)
75 74
76 75 def cmp(self, node, text):
77 76 """compare text with a given file revision"""
78 77
79 78 # for renames, we have to go the slow way
80 79 if self.renamed(node):
81 80 t2 = self.read(node)
82 81 return t2 != text
83 82
84 83 return revlog.cmp(self, node, text)
@@ -1,1173 +1,1173
1 1 # hgweb/hgweb_mod.py - Web interface for a repository.
2 2 #
3 3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 import os, mimetypes, re, zlib, mimetools, cStringIO, sys
10 10 import tempfile, urllib, bz2
11 11 from mercurial.node import *
12 12 from mercurial.i18n import gettext as _
13 13 from mercurial import mdiff, ui, hg, util, archival, streamclone, patch
14 14 from mercurial import revlog, templater
15 15 from common import get_mtime, staticfile, style_map
16 16
17 17 def _up(p):
18 18 if p[0] != "/":
19 19 p = "/" + p
20 20 if p[-1] == "/":
21 21 p = p[:-1]
22 22 up = os.path.dirname(p)
23 23 if up == "/":
24 24 return "/"
25 25 return up + "/"
26 26
27 27 def revnavgen(pos, pagelen, limit, nodefunc):
28 28 def seq(factor, limit=None):
29 29 if limit:
30 30 yield limit
31 31 if limit >= 20 and limit <= 40:
32 32 yield 50
33 33 else:
34 34 yield 1 * factor
35 35 yield 3 * factor
36 36 for f in seq(factor * 10):
37 37 yield f
38 38
39 39 def nav(**map):
40 40 l = []
41 41 last = 0
42 42 for f in seq(1, pagelen):
43 43 if f < pagelen or f <= last:
44 44 continue
45 45 if f > limit:
46 46 break
47 47 last = f
48 48 if pos + f < limit:
49 49 l.append(("+%d" % f, hex(nodefunc(pos + f).node())))
50 50 if pos - f >= 0:
51 51 l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node())))
52 52
53 53 try:
54 54 yield {"label": "(0)", "node": hex(nodefunc('0').node())}
55 55
56 56 for label, node in l:
57 57 yield {"label": label, "node": node}
58 58
59 59 yield {"label": "tip", "node": "tip"}
60 60 except hg.RepoError:
61 61 pass
62 62
63 63 return nav
64 64
65 65 class hgweb(object):
66 66 def __init__(self, repo, name=None):
67 67 if type(repo) == type(""):
68 68 self.repo = hg.repository(ui.ui(report_untrusted=False), repo)
69 69 else:
70 70 self.repo = repo
71 71
72 72 self.mtime = -1
73 73 self.reponame = name
74 74 self.archives = 'zip', 'gz', 'bz2'
75 75 self.stripecount = 1
76 76 # a repo owner may set web.templates in .hg/hgrc to get any file
77 77 # readable by the user running the CGI script
78 78 self.templatepath = self.config("web", "templates",
79 79 templater.templatepath(),
80 80 untrusted=False)
81 81
82 82 # The CGI scripts are often run by a user different from the repo owner.
83 83 # Trust the settings from the .hg/hgrc files by default.
84 84 def config(self, section, name, default=None, untrusted=True):
85 85 return self.repo.ui.config(section, name, default,
86 86 untrusted=untrusted)
87 87
88 88 def configbool(self, section, name, default=False, untrusted=True):
89 89 return self.repo.ui.configbool(section, name, default,
90 90 untrusted=untrusted)
91 91
92 92 def configlist(self, section, name, default=None, untrusted=True):
93 93 return self.repo.ui.configlist(section, name, default,
94 94 untrusted=untrusted)
95 95
96 96 def refresh(self):
97 97 mtime = get_mtime(self.repo.root)
98 98 if mtime != self.mtime:
99 99 self.mtime = mtime
100 100 self.repo = hg.repository(self.repo.ui, self.repo.root)
101 101 self.maxchanges = int(self.config("web", "maxchanges", 10))
102 102 self.stripecount = int(self.config("web", "stripes", 1))
103 103 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
104 104 self.maxfiles = int(self.config("web", "maxfiles", 10))
105 105 self.allowpull = self.configbool("web", "allowpull", True)
106 106
107 107 def archivelist(self, nodeid):
108 108 allowed = self.configlist("web", "allow_archive")
109 109 for i, spec in self.archive_specs.iteritems():
110 110 if i in allowed or self.configbool("web", "allow" + i):
111 111 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
112 112
113 113 def listfilediffs(self, files, changeset):
114 114 for f in files[:self.maxfiles]:
115 115 yield self.t("filedifflink", node=hex(changeset), file=f)
116 116 if len(files) > self.maxfiles:
117 117 yield self.t("fileellipses")
118 118
119 119 def siblings(self, siblings=[], hiderev=None, **args):
120 120 siblings = [s for s in siblings if s.node() != nullid]
121 121 if len(siblings) == 1 and siblings[0].rev() == hiderev:
122 122 return
123 123 for s in siblings:
124 124 d = {'node': hex(s.node()), 'rev': s.rev()}
125 125 if hasattr(s, 'path'):
126 126 d['file'] = s.path()
127 127 d.update(args)
128 128 yield d
129 129
130 130 def renamelink(self, fl, node):
131 131 r = fl.renamed(node)
132 132 if r:
133 133 return [dict(file=r[0], node=hex(r[1]))]
134 134 return []
135 135
136 136 def showtag(self, t1, node=nullid, **args):
137 137 for t in self.repo.nodetags(node):
138 138 yield self.t(t1, tag=t, **args)
139 139
140 140 def diff(self, node1, node2, files):
141 141 def filterfiles(filters, files):
142 142 l = [x for x in files if x in filters]
143 143
144 144 for t in filters:
145 145 if t and t[-1] != os.sep:
146 146 t += os.sep
147 147 l += [x for x in files if x.startswith(t)]
148 148 return l
149 149
150 150 parity = [0]
151 151 def diffblock(diff, f, fn):
152 152 yield self.t("diffblock",
153 153 lines=prettyprintlines(diff),
154 154 parity=parity[0],
155 155 file=f,
156 156 filenode=hex(fn or nullid))
157 157 parity[0] = 1 - parity[0]
158 158
159 159 def prettyprintlines(diff):
160 160 for l in diff.splitlines(1):
161 161 if l.startswith('+'):
162 162 yield self.t("difflineplus", line=l)
163 163 elif l.startswith('-'):
164 164 yield self.t("difflineminus", line=l)
165 165 elif l.startswith('@'):
166 166 yield self.t("difflineat", line=l)
167 167 else:
168 168 yield self.t("diffline", line=l)
169 169
170 170 r = self.repo
171 171 c1 = r.changectx(node1)
172 172 c2 = r.changectx(node2)
173 173 date1 = util.datestr(c1.date())
174 174 date2 = util.datestr(c2.date())
175 175
176 176 modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
177 177 if files:
178 178 modified, added, removed = map(lambda x: filterfiles(files, x),
179 179 (modified, added, removed))
180 180
181 181 diffopts = patch.diffopts(self.repo.ui, untrusted=True)
182 182 for f in modified:
183 183 to = c1.filectx(f).data()
184 184 tn = c2.filectx(f).data()
185 185 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
186 186 opts=diffopts), f, tn)
187 187 for f in added:
188 188 to = None
189 189 tn = c2.filectx(f).data()
190 190 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
191 191 opts=diffopts), f, tn)
192 192 for f in removed:
193 193 to = c1.filectx(f).data()
194 194 tn = None
195 195 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
196 196 opts=diffopts), f, tn)
197 197
198 198 def changelog(self, ctx, shortlog=False):
199 199 def changelist(**map):
200 200 parity = (start - end) & 1
201 201 cl = self.repo.changelog
202 202 l = [] # build a list in forward order for efficiency
203 203 for i in xrange(start, end):
204 204 ctx = self.repo.changectx(i)
205 205 n = ctx.node()
206 206
207 207 l.insert(0, {"parity": parity,
208 208 "author": ctx.user(),
209 209 "parent": self.siblings(ctx.parents(), i - 1),
210 210 "child": self.siblings(ctx.children(), i + 1),
211 211 "changelogtag": self.showtag("changelogtag",n),
212 212 "desc": ctx.description(),
213 213 "date": ctx.date(),
214 214 "files": self.listfilediffs(ctx.files(), n),
215 215 "rev": i,
216 216 "node": hex(n)})
217 217 parity = 1 - parity
218 218
219 219 for e in l:
220 220 yield e
221 221
222 222 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
223 223 cl = self.repo.changelog
224 224 count = cl.count()
225 225 pos = ctx.rev()
226 226 start = max(0, pos - maxchanges + 1)
227 227 end = min(count, start + maxchanges)
228 228 pos = end - 1
229 229
230 230 changenav = revnavgen(pos, maxchanges, count, self.repo.changectx)
231 231
232 232 yield self.t(shortlog and 'shortlog' or 'changelog',
233 233 changenav=changenav,
234 234 node=hex(cl.tip()),
235 235 rev=pos, changesets=count, entries=changelist,
236 236 archives=self.archivelist("tip"))
237 237
238 238 def search(self, query):
239 239
240 240 def changelist(**map):
241 241 cl = self.repo.changelog
242 242 count = 0
243 243 qw = query.lower().split()
244 244
245 245 def revgen():
246 246 for i in xrange(cl.count() - 1, 0, -100):
247 247 l = []
248 248 for j in xrange(max(0, i - 100), i):
249 249 ctx = self.repo.changectx(j)
250 250 l.append(ctx)
251 251 l.reverse()
252 252 for e in l:
253 253 yield e
254 254
255 255 for ctx in revgen():
256 256 miss = 0
257 257 for q in qw:
258 258 if not (q in ctx.user().lower() or
259 259 q in ctx.description().lower() or
260 260 q in " ".join(ctx.files()[:20]).lower()):
261 261 miss = 1
262 262 break
263 263 if miss:
264 264 continue
265 265
266 266 count += 1
267 267 n = ctx.node()
268 268
269 269 yield self.t('searchentry',
270 270 parity=self.stripes(count),
271 271 author=ctx.user(),
272 272 parent=self.siblings(ctx.parents()),
273 273 child=self.siblings(ctx.children()),
274 274 changelogtag=self.showtag("changelogtag",n),
275 275 desc=ctx.description(),
276 276 date=ctx.date(),
277 277 files=self.listfilediffs(ctx.files(), n),
278 278 rev=ctx.rev(),
279 279 node=hex(n))
280 280
281 281 if count >= self.maxchanges:
282 282 break
283 283
284 284 cl = self.repo.changelog
285 285
286 286 yield self.t('search',
287 287 query=query,
288 288 node=hex(cl.tip()),
289 289 entries=changelist)
290 290
291 291 def changeset(self, ctx):
292 292 n = ctx.node()
293 293 parents = ctx.parents()
294 294 p1 = parents[0].node()
295 295
296 296 files = []
297 297 parity = 0
298 298 for f in ctx.files():
299 299 files.append(self.t("filenodelink",
300 300 node=hex(n), file=f,
301 301 parity=parity))
302 302 parity = 1 - parity
303 303
304 304 def diff(**map):
305 305 yield self.diff(p1, n, None)
306 306
307 307 yield self.t('changeset',
308 308 diff=diff,
309 309 rev=ctx.rev(),
310 310 node=hex(n),
311 311 parent=self.siblings(parents),
312 312 child=self.siblings(ctx.children()),
313 313 changesettag=self.showtag("changesettag",n),
314 314 author=ctx.user(),
315 315 desc=ctx.description(),
316 316 date=ctx.date(),
317 317 files=files,
318 318 archives=self.archivelist(hex(n)))
319 319
320 320 def filelog(self, fctx):
321 321 f = fctx.path()
322 322 fl = fctx.filelog()
323 323 count = fl.count()
324 324 pagelen = self.maxshortchanges
325 325 pos = fctx.filerev()
326 326 start = max(0, pos - pagelen + 1)
327 327 end = min(count, start + pagelen)
328 328 pos = end - 1
329 329
330 330 def entries(**map):
331 331 l = []
332 332 parity = (count - 1) & 1
333 333
334 334 for i in xrange(start, end):
335 335 ctx = fctx.filectx(i)
336 336 n = fl.node(i)
337 337
338 338 l.insert(0, {"parity": parity,
339 339 "filerev": i,
340 340 "file": f,
341 341 "node": hex(ctx.node()),
342 342 "author": ctx.user(),
343 343 "date": ctx.date(),
344 344 "rename": self.renamelink(fl, n),
345 345 "parent": self.siblings(fctx.parents()),
346 346 "child": self.siblings(fctx.children()),
347 347 "desc": ctx.description()})
348 348 parity = 1 - parity
349 349
350 350 for e in l:
351 351 yield e
352 352
353 353 nodefunc = lambda x: fctx.filectx(fileid=x)
354 354 nav = revnavgen(pos, pagelen, count, nodefunc)
355 355 yield self.t("filelog", file=f, node=hex(fctx.node()), nav=nav,
356 356 entries=entries)
357 357
358 358 def filerevision(self, fctx):
359 359 f = fctx.path()
360 360 text = fctx.data()
361 361 fl = fctx.filelog()
362 362 n = fctx.filenode()
363 363
364 364 mt = mimetypes.guess_type(f)[0]
365 365 rawtext = text
366 366 if util.binary(text):
367 367 mt = mt or 'application/octet-stream'
368 368 text = "(binary:%s)" % mt
369 369 mt = mt or 'text/plain'
370 370
371 371 def lines():
372 372 for l, t in enumerate(text.splitlines(1)):
373 373 yield {"line": t,
374 374 "linenumber": "% 6d" % (l + 1),
375 375 "parity": self.stripes(l)}
376 376
377 377 yield self.t("filerevision",
378 378 file=f,
379 379 path=_up(f),
380 380 text=lines(),
381 381 raw=rawtext,
382 382 mimetype=mt,
383 383 rev=fctx.rev(),
384 384 node=hex(fctx.node()),
385 385 author=fctx.user(),
386 386 date=fctx.date(),
387 387 desc=fctx.description(),
388 388 parent=self.siblings(fctx.parents()),
389 389 child=self.siblings(fctx.children()),
390 390 rename=self.renamelink(fl, n),
391 391 permissions=fctx.manifest().execf(f))
392 392
393 393 def fileannotate(self, fctx):
394 394 f = fctx.path()
395 395 n = fctx.filenode()
396 396 fl = fctx.filelog()
397 397
398 398 def annotate(**map):
399 399 parity = 0
400 400 last = None
401 401 for f, l in fctx.annotate(follow=True):
402 402 fnode = f.filenode()
403 403 name = self.repo.ui.shortuser(f.user())
404 404
405 405 if last != fnode:
406 406 parity = 1 - parity
407 407 last = fnode
408 408
409 409 yield {"parity": parity,
410 410 "node": hex(f.node()),
411 411 "rev": f.rev(),
412 412 "author": name,
413 413 "file": f.path(),
414 414 "line": l}
415 415
416 416 yield self.t("fileannotate",
417 417 file=f,
418 418 annotate=annotate,
419 419 path=_up(f),
420 420 rev=fctx.rev(),
421 421 node=hex(fctx.node()),
422 422 author=fctx.user(),
423 423 date=fctx.date(),
424 424 desc=fctx.description(),
425 425 rename=self.renamelink(fl, n),
426 426 parent=self.siblings(fctx.parents()),
427 427 child=self.siblings(fctx.children()),
428 428 permissions=fctx.manifest().execf(f))
429 429
430 430 def manifest(self, ctx, path):
431 431 mf = ctx.manifest()
432 432 node = ctx.node()
433 433
434 434 files = {}
435 435
436 436 if path and path[-1] != "/":
437 437 path += "/"
438 438 l = len(path)
439 439 abspath = "/" + path
440 440
441 441 for f, n in mf.items():
442 442 if f[:l] != path:
443 443 continue
444 444 remain = f[l:]
445 445 if "/" in remain:
446 446 short = remain[:remain.index("/") + 1] # bleah
447 447 files[short] = (f, None)
448 448 else:
449 449 short = os.path.basename(remain)
450 450 files[short] = (f, n)
451 451
452 452 def filelist(**map):
453 453 parity = 0
454 454 fl = files.keys()
455 455 fl.sort()
456 456 for f in fl:
457 457 full, fnode = files[f]
458 458 if not fnode:
459 459 continue
460 460
461 461 yield {"file": full,
462 462 "parity": self.stripes(parity),
463 463 "basename": f,
464 464 "size": ctx.filectx(full).size(),
465 465 "permissions": mf.execf(full)}
466 466 parity += 1
467 467
468 468 def dirlist(**map):
469 469 parity = 0
470 470 fl = files.keys()
471 471 fl.sort()
472 472 for f in fl:
473 473 full, fnode = files[f]
474 474 if fnode:
475 475 continue
476 476
477 477 yield {"parity": self.stripes(parity),
478 478 "path": os.path.join(abspath, f),
479 479 "basename": f[:-1]}
480 480 parity += 1
481 481
482 482 yield self.t("manifest",
483 483 rev=ctx.rev(),
484 484 node=hex(node),
485 485 path=abspath,
486 486 up=_up(abspath),
487 487 fentries=filelist,
488 488 dentries=dirlist,
489 489 archives=self.archivelist(hex(node)))
490 490
491 491 def tags(self):
492 492 i = self.repo.tagslist()
493 493 i.reverse()
494 494
495 495 def entries(notip=False, **map):
496 496 parity = 0
497 497 for k, n in i:
498 498 if notip and k == "tip":
499 499 continue
500 500 yield {"parity": self.stripes(parity),
501 501 "tag": k,
502 502 "date": self.repo.changectx(n).date(),
503 503 "node": hex(n)}
504 504 parity += 1
505 505
506 506 yield self.t("tags",
507 507 node=hex(self.repo.changelog.tip()),
508 508 entries=lambda **x: entries(False, **x),
509 509 entriesnotip=lambda **x: entries(True, **x))
510 510
511 511 def summary(self):
512 512 i = self.repo.tagslist()
513 513 i.reverse()
514 514
515 515 def tagentries(**map):
516 516 parity = 0
517 517 count = 0
518 518 for k, n in i:
519 519 if k == "tip": # skip tip
520 520 continue;
521 521
522 522 count += 1
523 523 if count > 10: # limit to 10 tags
524 524 break;
525 525
526 526 yield self.t("tagentry",
527 527 parity=self.stripes(parity),
528 528 tag=k,
529 529 node=hex(n),
530 530 date=self.repo.changectx(n).date())
531 531 parity += 1
532 532
533 533 def heads(**map):
534 534 parity = 0
535 535 count = 0
536 536
537 537 for node in self.repo.heads():
538 538 count += 1
539 539 if count > 10:
540 540 break;
541 541
542 542 ctx = self.repo.changectx(node)
543 543
544 544 yield {'parity': self.stripes(parity),
545 545 'branch': ctx.branch(),
546 546 'node': hex(node),
547 547 'date': ctx.date()}
548 548 parity += 1
549 549
550 550 def changelist(**map):
551 551 parity = 0
552 552 l = [] # build a list in forward order for efficiency
553 553 for i in xrange(start, end):
554 554 ctx = self.repo.changectx(i)
555 555 hn = hex(ctx.node())
556 556
557 557 l.insert(0, self.t(
558 558 'shortlogentry',
559 559 parity=parity,
560 560 author=ctx.user(),
561 561 desc=ctx.description(),
562 562 date=ctx.date(),
563 563 rev=i,
564 564 node=hn))
565 565 parity = 1 - parity
566 566
567 567 yield l
568 568
569 569 cl = self.repo.changelog
570 570 count = cl.count()
571 571 start = max(0, count - self.maxchanges)
572 572 end = min(count, start + self.maxchanges)
573 573
574 574 yield self.t("summary",
575 575 desc=self.config("web", "description", "unknown"),
576 576 owner=(self.config("ui", "username") or # preferred
577 577 self.config("web", "contact") or # deprecated
578 578 self.config("web", "author", "unknown")), # also
579 579 lastchange=cl.read(cl.tip())[2],
580 580 tags=tagentries,
581 581 heads=heads,
582 582 shortlog=changelist,
583 583 node=hex(cl.tip()),
584 584 archives=self.archivelist("tip"))
585 585
586 586 def filediff(self, fctx):
587 587 n = fctx.node()
588 588 path = fctx.path()
589 589 parents = fctx.parents()
590 590 p1 = parents and parents[0].node() or nullid
591 591
592 592 def diff(**map):
593 593 yield self.diff(p1, n, [path])
594 594
595 595 yield self.t("filediff",
596 596 file=path,
597 597 node=hex(n),
598 598 rev=fctx.rev(),
599 599 parent=self.siblings(parents),
600 600 child=self.siblings(fctx.children()),
601 601 diff=diff)
602 602
603 603 archive_specs = {
604 604 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
605 605 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
606 606 'zip': ('application/zip', 'zip', '.zip', None),
607 607 }
608 608
609 609 def archive(self, req, id, type_):
610 610 reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
611 611 cnode = self.repo.lookup(id)
612 612 arch_version = id
613 613 if cnode == id:
614 614 arch_version = short(cnode)
615 615 name = "%s-%s" % (reponame, arch_version)
616 616 mimetype, artype, extension, encoding = self.archive_specs[type_]
617 617 headers = [('Content-type', mimetype),
618 618 ('Content-disposition', 'attachment; filename=%s%s' %
619 619 (name, extension))]
620 620 if encoding:
621 621 headers.append(('Content-encoding', encoding))
622 622 req.header(headers)
623 623 archival.archive(self.repo, req.out, cnode, artype, prefix=name)
624 624
625 625 # add tags to things
626 626 # tags -> list of changesets corresponding to tags
627 627 # find tag, changeset, file
628 628
629 629 def cleanpath(self, path):
630 630 path = path.lstrip('/')
631 631 return util.canonpath(self.repo.root, '', path)
632 632
633 633 def run(self):
634 634 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
635 635 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
636 636 import mercurial.hgweb.wsgicgi as wsgicgi
637 637 from request import wsgiapplication
638 638 def make_web_app():
639 639 return self
640 640 wsgicgi.launch(wsgiapplication(make_web_app))
641 641
642 642 def run_wsgi(self, req):
643 643 def header(**map):
644 644 header_file = cStringIO.StringIO(
645 645 ''.join(self.t("header", encoding=util._encoding, **map)))
646 646 msg = mimetools.Message(header_file, 0)
647 647 req.header(msg.items())
648 648 yield header_file.read()
649 649
650 650 def rawfileheader(**map):
651 651 req.header([('Content-type', map['mimetype']),
652 652 ('Content-disposition', 'filename=%s' % map['file']),
653 653 ('Content-length', str(len(map['raw'])))])
654 654 yield ''
655 655
656 656 def footer(**map):
657 657 yield self.t("footer", **map)
658 658
659 659 def motd(**map):
660 660 yield self.config("web", "motd", "")
661 661
662 662 def expand_form(form):
663 663 shortcuts = {
664 664 'cl': [('cmd', ['changelog']), ('rev', None)],
665 665 'sl': [('cmd', ['shortlog']), ('rev', None)],
666 666 'cs': [('cmd', ['changeset']), ('node', None)],
667 667 'f': [('cmd', ['file']), ('filenode', None)],
668 668 'fl': [('cmd', ['filelog']), ('filenode', None)],
669 669 'fd': [('cmd', ['filediff']), ('node', None)],
670 670 'fa': [('cmd', ['annotate']), ('filenode', None)],
671 671 'mf': [('cmd', ['manifest']), ('manifest', None)],
672 672 'ca': [('cmd', ['archive']), ('node', None)],
673 673 'tags': [('cmd', ['tags'])],
674 674 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
675 675 'static': [('cmd', ['static']), ('file', None)]
676 676 }
677 677
678 678 for k in shortcuts.iterkeys():
679 679 if form.has_key(k):
680 680 for name, value in shortcuts[k]:
681 681 if value is None:
682 682 value = form[k]
683 683 form[name] = value
684 684 del form[k]
685 685
686 686 def rewrite_request(req):
687 687 '''translate new web interface to traditional format'''
688 688
689 689 def spliturl(req):
690 690 def firstitem(query):
691 691 return query.split('&', 1)[0].split(';', 1)[0]
692 692
693 693 def normurl(url):
694 694 inner = '/'.join([x for x in url.split('/') if x])
695 695 tl = len(url) > 1 and url.endswith('/') and '/' or ''
696 696
697 697 return '%s%s%s' % (url.startswith('/') and '/' or '',
698 698 inner, tl)
699 699
700 700 root = normurl(urllib.unquote(req.env.get('REQUEST_URI', '').split('?', 1)[0]))
701 701 pi = normurl(req.env.get('PATH_INFO', ''))
702 702 if pi:
703 703 # strip leading /
704 704 pi = pi[1:]
705 705 if pi:
706 706 root = root[:-len(pi)]
707 707 if req.env.has_key('REPO_NAME'):
708 708 rn = req.env['REPO_NAME'] + '/'
709 709 root += rn
710 710 query = pi[len(rn):]
711 711 else:
712 712 query = pi
713 713 else:
714 714 root += '?'
715 715 query = firstitem(req.env['QUERY_STRING'])
716 716
717 717 return (root, query)
718 718
719 719 req.url, query = spliturl(req)
720 720
721 721 if req.form.has_key('cmd'):
722 722 # old style
723 723 return
724 724
725 725 args = query.split('/', 2)
726 726 if not args or not args[0]:
727 727 return
728 728
729 729 cmd = args.pop(0)
730 730 style = cmd.rfind('-')
731 731 if style != -1:
732 732 req.form['style'] = [cmd[:style]]
733 733 cmd = cmd[style+1:]
734 734 # avoid accepting e.g. style parameter as command
735 735 if hasattr(self, 'do_' + cmd):
736 736 req.form['cmd'] = [cmd]
737 737
738 738 if args and args[0]:
739 739 node = args.pop(0)
740 740 req.form['node'] = [node]
741 741 if args:
742 742 req.form['file'] = args
743 743
744 744 if cmd == 'static':
745 745 req.form['file'] = req.form['node']
746 746 elif cmd == 'archive':
747 747 fn = req.form['node'][0]
748 748 for type_, spec in self.archive_specs.iteritems():
749 749 ext = spec[2]
750 750 if fn.endswith(ext):
751 751 req.form['node'] = [fn[:-len(ext)]]
752 752 req.form['type'] = [type_]
753 753
754 754 def sessionvars(**map):
755 755 fields = []
756 756 if req.form.has_key('style'):
757 757 style = req.form['style'][0]
758 758 if style != self.config('web', 'style', ''):
759 759 fields.append(('style', style))
760 760
761 761 separator = req.url[-1] == '?' and ';' or '?'
762 762 for name, value in fields:
763 763 yield dict(name=name, value=value, separator=separator)
764 764 separator = ';'
765 765
766 766 self.refresh()
767 767
768 768 expand_form(req.form)
769 769 rewrite_request(req)
770 770
771 771 style = self.config("web", "style", "")
772 772 if req.form.has_key('style'):
773 773 style = req.form['style'][0]
774 774 mapfile = style_map(self.templatepath, style)
775 775
776 776 port = req.env["SERVER_PORT"]
777 777 port = port != "80" and (":" + port) or ""
778 778 urlbase = 'http://%s%s' % (req.env['SERVER_NAME'], port)
779 779 staticurl = self.config("web", "staticurl") or req.url + 'static/'
780 780 if not staticurl.endswith('/'):
781 781 staticurl += '/'
782 782
783 783 if not self.reponame:
784 784 self.reponame = (self.config("web", "name")
785 785 or req.env.get('REPO_NAME')
786 786 or req.url.strip('/') or self.repo.root)
787 787
788 788 self.t = templater.templater(mapfile, templater.common_filters,
789 789 defaults={"url": req.url,
790 790 "staticurl": staticurl,
791 791 "urlbase": urlbase,
792 792 "repo": self.reponame,
793 793 "header": header,
794 794 "footer": footer,
795 795 "motd": motd,
796 796 "rawfileheader": rawfileheader,
797 797 "sessionvars": sessionvars
798 798 })
799 799
800 800 try:
801 801 if not req.form.has_key('cmd'):
802 802 req.form['cmd'] = [self.t.cache['default']]
803 803
804 804 cmd = req.form['cmd'][0]
805 805
806 806 method = getattr(self, 'do_' + cmd, None)
807 807 if method:
808 808 try:
809 809 method(req)
810 810 except (hg.RepoError, revlog.RevlogError), inst:
811 811 req.write(self.t("error", error=str(inst)))
812 812 else:
813 813 req.write(self.t("error", error='No such method: ' + cmd))
814 814 finally:
815 815 self.t = None
816 816
817 817 def changectx(self, req):
818 818 if req.form.has_key('node'):
819 819 changeid = req.form['node'][0]
820 820 elif req.form.has_key('manifest'):
821 821 changeid = req.form['manifest'][0]
822 822 else:
823 823 changeid = self.repo.changelog.count() - 1
824 824
825 825 try:
826 826 ctx = self.repo.changectx(changeid)
827 827 except hg.RepoError:
828 828 man = self.repo.manifest
829 829 mn = man.lookup(changeid)
830 830 ctx = self.repo.changectx(man.linkrev(mn))
831 831
832 832 return ctx
833 833
834 834 def filectx(self, req):
835 835 path = self.cleanpath(req.form['file'][0])
836 836 if req.form.has_key('node'):
837 837 changeid = req.form['node'][0]
838 838 else:
839 839 changeid = req.form['filenode'][0]
840 840 try:
841 841 ctx = self.repo.changectx(changeid)
842 842 fctx = ctx.filectx(path)
843 843 except hg.RepoError:
844 844 fctx = self.repo.filectx(path, fileid=changeid)
845 845
846 846 return fctx
847 847
848 848 def stripes(self, parity):
849 849 "make horizontal stripes for easier reading"
850 850 if self.stripecount:
851 851 return (1 + parity / self.stripecount) & 1
852 852 else:
853 853 return 0
854 854
855 855 def do_log(self, req):
856 856 if req.form.has_key('file') and req.form['file'][0]:
857 857 self.do_filelog(req)
858 858 else:
859 859 self.do_changelog(req)
860 860
861 861 def do_rev(self, req):
862 862 self.do_changeset(req)
863 863
864 864 def do_file(self, req):
865 865 path = self.cleanpath(req.form.get('file', [''])[0])
866 866 if path:
867 867 try:
868 868 req.write(self.filerevision(self.filectx(req)))
869 869 return
870 870 except revlog.LookupError:
871 871 pass
872 872
873 873 req.write(self.manifest(self.changectx(req), path))
874 874
875 875 def do_diff(self, req):
876 876 self.do_filediff(req)
877 877
878 878 def do_changelog(self, req, shortlog = False):
879 879 if req.form.has_key('node'):
880 880 ctx = self.changectx(req)
881 881 else:
882 882 if req.form.has_key('rev'):
883 883 hi = req.form['rev'][0]
884 884 else:
885 885 hi = self.repo.changelog.count() - 1
886 886 try:
887 887 ctx = self.repo.changectx(hi)
888 888 except hg.RepoError:
889 889 req.write(self.search(hi)) # XXX redirect to 404 page?
890 890 return
891 891
892 892 req.write(self.changelog(ctx, shortlog = shortlog))
893 893
894 894 def do_shortlog(self, req):
895 895 self.do_changelog(req, shortlog = True)
896 896
897 897 def do_changeset(self, req):
898 898 req.write(self.changeset(self.changectx(req)))
899 899
900 900 def do_manifest(self, req):
901 901 req.write(self.manifest(self.changectx(req),
902 902 self.cleanpath(req.form['path'][0])))
903 903
904 904 def do_tags(self, req):
905 905 req.write(self.tags())
906 906
907 907 def do_summary(self, req):
908 908 req.write(self.summary())
909 909
910 910 def do_filediff(self, req):
911 911 req.write(self.filediff(self.filectx(req)))
912 912
913 913 def do_annotate(self, req):
914 914 req.write(self.fileannotate(self.filectx(req)))
915 915
916 916 def do_filelog(self, req):
917 917 req.write(self.filelog(self.filectx(req)))
918 918
919 919 def do_lookup(self, req):
920 920 try:
921 921 r = hex(self.repo.lookup(req.form['key'][0]))
922 922 success = 1
923 923 except Exception,inst:
924 924 r = str(inst)
925 925 success = 0
926 926 resp = "%s %s\n" % (success, r)
927 927 req.httphdr("application/mercurial-0.1", length=len(resp))
928 928 req.write(resp)
929 929
930 930 def do_heads(self, req):
931 931 resp = " ".join(map(hex, self.repo.heads())) + "\n"
932 932 req.httphdr("application/mercurial-0.1", length=len(resp))
933 933 req.write(resp)
934 934
935 935 def do_branches(self, req):
936 936 nodes = []
937 937 if req.form.has_key('nodes'):
938 938 nodes = map(bin, req.form['nodes'][0].split(" "))
939 939 resp = cStringIO.StringIO()
940 940 for b in self.repo.branches(nodes):
941 941 resp.write(" ".join(map(hex, b)) + "\n")
942 942 resp = resp.getvalue()
943 943 req.httphdr("application/mercurial-0.1", length=len(resp))
944 944 req.write(resp)
945 945
946 946 def do_between(self, req):
947 947 if req.form.has_key('pairs'):
948 948 pairs = [map(bin, p.split("-"))
949 949 for p in req.form['pairs'][0].split(" ")]
950 950 resp = cStringIO.StringIO()
951 951 for b in self.repo.between(pairs):
952 952 resp.write(" ".join(map(hex, b)) + "\n")
953 953 resp = resp.getvalue()
954 954 req.httphdr("application/mercurial-0.1", length=len(resp))
955 955 req.write(resp)
956 956
957 957 def do_changegroup(self, req):
958 958 req.httphdr("application/mercurial-0.1")
959 959 nodes = []
960 960 if not self.allowpull:
961 961 return
962 962
963 963 if req.form.has_key('roots'):
964 964 nodes = map(bin, req.form['roots'][0].split(" "))
965 965
966 966 z = zlib.compressobj()
967 967 f = self.repo.changegroup(nodes, 'serve')
968 968 while 1:
969 969 chunk = f.read(4096)
970 970 if not chunk:
971 971 break
972 972 req.write(z.compress(chunk))
973 973
974 974 req.write(z.flush())
975 975
976 976 def do_changegroupsubset(self, req):
977 977 req.httphdr("application/mercurial-0.1")
978 978 bases = []
979 979 heads = []
980 980 if not self.allowpull:
981 981 return
982 982
983 983 if req.form.has_key('bases'):
984 984 bases = [bin(x) for x in req.form['bases'][0].split(' ')]
985 985 if req.form.has_key('heads'):
986 986 heads = [bin(x) for x in req.form['heads'][0].split(' ')]
987 987
988 988 z = zlib.compressobj()
989 989 f = self.repo.changegroupsubset(bases, heads, 'serve')
990 990 while 1:
991 991 chunk = f.read(4096)
992 992 if not chunk:
993 993 break
994 994 req.write(z.compress(chunk))
995 995
996 996 req.write(z.flush())
997 997
998 998 def do_archive(self, req):
999 999 type_ = req.form['type'][0]
1000 1000 allowed = self.configlist("web", "allow_archive")
1001 1001 if (type_ in self.archives and (type_ in allowed or
1002 1002 self.configbool("web", "allow" + type_, False))):
1003 1003 self.archive(req, req.form['node'][0], type_)
1004 1004 return
1005 1005
1006 1006 req.write(self.t("error"))
1007 1007
1008 1008 def do_static(self, req):
1009 1009 fname = req.form['file'][0]
1010 1010 # a repo owner may set web.static in .hg/hgrc to get any file
1011 1011 # readable by the user running the CGI script
1012 1012 static = self.config("web", "static",
1013 1013 os.path.join(self.templatepath, "static"),
1014 1014 untrusted=False)
1015 1015 req.write(staticfile(static, fname, req)
1016 1016 or self.t("error", error="%r not found" % fname))
1017 1017
1018 1018 def do_capabilities(self, req):
1019 1019 caps = ['lookup', 'changegroupsubset']
1020 1020 if self.configbool('server', 'uncompressed'):
1021 caps.append('stream=%d' % self.repo.revlogversion)
1021 caps.append('stream=%d' % self.repo.changelog.version)
1022 1022 # XXX: make configurable and/or share code with do_unbundle:
1023 1023 unbundleversions = ['HG10GZ', 'HG10BZ', 'HG10UN']
1024 1024 if unbundleversions:
1025 1025 caps.append('unbundle=%s' % ','.join(unbundleversions))
1026 1026 resp = ' '.join(caps)
1027 1027 req.httphdr("application/mercurial-0.1", length=len(resp))
1028 1028 req.write(resp)
1029 1029
1030 1030 def check_perm(self, req, op, default):
1031 1031 '''check permission for operation based on user auth.
1032 1032 return true if op allowed, else false.
1033 1033 default is policy to use if no config given.'''
1034 1034
1035 1035 user = req.env.get('REMOTE_USER')
1036 1036
1037 1037 deny = self.configlist('web', 'deny_' + op)
1038 1038 if deny and (not user or deny == ['*'] or user in deny):
1039 1039 return False
1040 1040
1041 1041 allow = self.configlist('web', 'allow_' + op)
1042 1042 return (allow and (allow == ['*'] or user in allow)) or default
1043 1043
1044 1044 def do_unbundle(self, req):
1045 1045 def bail(response, headers={}):
1046 1046 length = int(req.env['CONTENT_LENGTH'])
1047 1047 for s in util.filechunkiter(req, limit=length):
1048 1048 # drain incoming bundle, else client will not see
1049 1049 # response when run outside cgi script
1050 1050 pass
1051 1051 req.httphdr("application/mercurial-0.1", headers=headers)
1052 1052 req.write('0\n')
1053 1053 req.write(response)
1054 1054
1055 1055 # require ssl by default, auth info cannot be sniffed and
1056 1056 # replayed
1057 1057 ssl_req = self.configbool('web', 'push_ssl', True)
1058 1058 if ssl_req:
1059 1059 if not req.env.get('HTTPS'):
1060 1060 bail(_('ssl required\n'))
1061 1061 return
1062 1062 proto = 'https'
1063 1063 else:
1064 1064 proto = 'http'
1065 1065
1066 1066 # do not allow push unless explicitly allowed
1067 1067 if not self.check_perm(req, 'push', False):
1068 1068 bail(_('push not authorized\n'),
1069 1069 headers={'status': '401 Unauthorized'})
1070 1070 return
1071 1071
1072 1072 their_heads = req.form['heads'][0].split(' ')
1073 1073
1074 1074 def check_heads():
1075 1075 heads = map(hex, self.repo.heads())
1076 1076 return their_heads == [hex('force')] or their_heads == heads
1077 1077
1078 1078 # fail early if possible
1079 1079 if not check_heads():
1080 1080 bail(_('unsynced changes\n'))
1081 1081 return
1082 1082
1083 1083 req.httphdr("application/mercurial-0.1")
1084 1084
1085 1085 # do not lock repo until all changegroup data is
1086 1086 # streamed. save to temporary file.
1087 1087
1088 1088 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
1089 1089 fp = os.fdopen(fd, 'wb+')
1090 1090 try:
1091 1091 length = int(req.env['CONTENT_LENGTH'])
1092 1092 for s in util.filechunkiter(req, limit=length):
1093 1093 fp.write(s)
1094 1094
1095 1095 try:
1096 1096 lock = self.repo.lock()
1097 1097 try:
1098 1098 if not check_heads():
1099 1099 req.write('0\n')
1100 1100 req.write(_('unsynced changes\n'))
1101 1101 return
1102 1102
1103 1103 fp.seek(0)
1104 1104 header = fp.read(6)
1105 1105 if not header.startswith("HG"):
1106 1106 # old client with uncompressed bundle
1107 1107 def generator(f):
1108 1108 yield header
1109 1109 for chunk in f:
1110 1110 yield chunk
1111 1111 elif not header.startswith("HG10"):
1112 1112 req.write("0\n")
1113 1113 req.write(_("unknown bundle version\n"))
1114 1114 return
1115 1115 elif header == "HG10GZ":
1116 1116 def generator(f):
1117 1117 zd = zlib.decompressobj()
1118 1118 for chunk in f:
1119 1119 yield zd.decompress(chunk)
1120 1120 elif header == "HG10BZ":
1121 1121 def generator(f):
1122 1122 zd = bz2.BZ2Decompressor()
1123 1123 zd.decompress("BZ")
1124 1124 for chunk in f:
1125 1125 yield zd.decompress(chunk)
1126 1126 elif header == "HG10UN":
1127 1127 def generator(f):
1128 1128 for chunk in f:
1129 1129 yield chunk
1130 1130 else:
1131 1131 req.write("0\n")
1132 1132 req.write(_("unknown bundle compression type\n"))
1133 1133 return
1134 1134 gen = generator(util.filechunkiter(fp, 4096))
1135 1135
1136 1136 # send addchangegroup output to client
1137 1137
1138 1138 old_stdout = sys.stdout
1139 1139 sys.stdout = cStringIO.StringIO()
1140 1140
1141 1141 try:
1142 1142 url = 'remote:%s:%s' % (proto,
1143 1143 req.env.get('REMOTE_HOST', ''))
1144 1144 try:
1145 1145 ret = self.repo.addchangegroup(
1146 1146 util.chunkbuffer(gen), 'serve', url)
1147 1147 except util.Abort, inst:
1148 1148 sys.stdout.write("abort: %s\n" % inst)
1149 1149 ret = 0
1150 1150 finally:
1151 1151 val = sys.stdout.getvalue()
1152 1152 sys.stdout = old_stdout
1153 1153 req.write('%d\n' % ret)
1154 1154 req.write(val)
1155 1155 finally:
1156 1156 lock.release()
1157 1157 except (OSError, IOError), inst:
1158 1158 req.write('0\n')
1159 1159 filename = getattr(inst, 'filename', '')
1160 1160 # Don't send our filesystem layout to the client
1161 1161 if filename.startswith(self.repo.root):
1162 1162 filename = filename[len(self.repo.root)+1:]
1163 1163 else:
1164 1164 filename = ''
1165 1165 error = getattr(inst, 'strerror', 'Unknown error')
1166 1166 req.write('%s: %s\n' % (error, filename))
1167 1167 finally:
1168 1168 fp.close()
1169 1169 os.unlink(tempname)
1170 1170
1171 1171 def do_stream_out(self, req):
1172 1172 req.httphdr("application/mercurial-0.1")
1173 1173 streamclone.stream_out(self.repo, req)
@@ -1,1964 +1,1941
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import _
10 10 import repo, appendfile, changegroup
11 11 import changelog, dirstate, filelog, manifest, context
12 12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
13 13 import os, revlog, time, util
14 14
15 15 class localrepository(repo.repository):
16 16 capabilities = ('lookup', 'changegroupsubset')
17 17 supported = ('revlogv1', 'store')
18 18
19 19 def __del__(self):
20 20 self.transhandle = None
21 21 def __init__(self, parentui, path=None, create=0):
22 22 repo.repository.__init__(self)
23 23 if not path:
24 24 p = os.getcwd()
25 25 while not os.path.isdir(os.path.join(p, ".hg")):
26 26 oldp = p
27 27 p = os.path.dirname(p)
28 28 if p == oldp:
29 29 raise repo.RepoError(_("There is no Mercurial repository"
30 30 " here (.hg not found)"))
31 31 path = p
32 32
33 33 self.root = os.path.realpath(path)
34 34 self.path = os.path.join(self.root, ".hg")
35 35 self.origroot = path
36 36 self.opener = util.opener(self.path)
37 37 self.wopener = util.opener(self.root)
38 38
39 39 if not os.path.isdir(self.path):
40 40 if create:
41 41 if not os.path.exists(path):
42 42 os.mkdir(path)
43 43 os.mkdir(self.path)
44 44 requirements = ["revlogv1"]
45 45 if parentui.configbool('format', 'usestore', True):
46 46 os.mkdir(os.path.join(self.path, "store"))
47 47 requirements.append("store")
48 48 # create an invalid changelog
49 49 self.opener("00changelog.i", "a").write(
50 50 '\0\0\0\2' # represents revlogv2
51 51 ' dummy changelog to prevent using the old repo layout'
52 52 )
53 53 reqfile = self.opener("requires", "w")
54 54 for r in requirements:
55 55 reqfile.write("%s\n" % r)
56 56 reqfile.close()
57 57 else:
58 58 raise repo.RepoError(_("repository %s not found") % path)
59 59 elif create:
60 60 raise repo.RepoError(_("repository %s already exists") % path)
61 61 else:
62 62 # find requirements
63 63 try:
64 64 requirements = self.opener("requires").read().splitlines()
65 65 except IOError, inst:
66 66 if inst.errno != errno.ENOENT:
67 67 raise
68 68 requirements = []
69 69 # check them
70 70 for r in requirements:
71 71 if r not in self.supported:
72 72 raise repo.RepoError(_("requirement '%s' not supported") % r)
73 73
74 74 # setup store
75 75 if "store" in requirements:
76 76 self.encodefn = util.encodefilename
77 77 self.decodefn = util.decodefilename
78 78 self.spath = os.path.join(self.path, "store")
79 79 else:
80 80 self.encodefn = lambda x: x
81 81 self.decodefn = lambda x: x
82 82 self.spath = self.path
83 83 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
84 84
85 85 self.ui = ui.ui(parentui=parentui)
86 86 try:
87 87 self.ui.readconfig(self.join("hgrc"), self.root)
88 88 except IOError:
89 89 pass
90 90
91 v = self.ui.configrevlog()
92 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
93 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
94 fl = v.get('flags', None)
95 flags = 0
96 if fl != None:
97 for x in fl.split():
98 flags |= revlog.flagstr(x)
99 elif self.revlogv1:
100 flags = revlog.REVLOG_DEFAULT_FLAGS
101
102 v = self.revlogversion | flags
103 self.manifest = manifest.manifest(self.sopener, v)
104 self.changelog = changelog.changelog(self.sopener, v)
91 self.changelog = changelog.changelog(self.sopener)
92 self.sopener.defversion = self.changelog.version
93 self.manifest = manifest.manifest(self.sopener)
105 94
106 95 fallback = self.ui.config('ui', 'fallbackencoding')
107 96 if fallback:
108 97 util._fallbackencoding = fallback
109 98
110 # the changelog might not have the inline index flag
111 # on. If the format of the changelog is the same as found in
112 # .hgrc, apply any flags found in the .hgrc as well.
113 # Otherwise, just version from the changelog
114 v = self.changelog.version
115 if v == self.revlogversion:
116 v |= flags
117 self.revlogversion = v
118
119 99 self.tagscache = None
120 100 self.branchcache = None
121 101 self.nodetagscache = None
122 102 self.filterpats = {}
123 103 self.transhandle = None
124 104
125 105 self._link = lambda x: False
126 106 if util.checklink(self.root):
127 107 r = self.root # avoid circular reference in lambda
128 108 self._link = lambda x: util.is_link(os.path.join(r, x))
129 109
130 110 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
131 111
132 112 def url(self):
133 113 return 'file:' + self.root
134 114
135 115 def hook(self, name, throw=False, **args):
136 116 def callhook(hname, funcname):
137 117 '''call python hook. hook is callable object, looked up as
138 118 name in python module. if callable returns "true", hook
139 119 fails, else passes. if hook raises exception, treated as
140 120 hook failure. exception propagates if throw is "true".
141 121
142 122 reason for "true" meaning "hook failed" is so that
143 123 unmodified commands (e.g. mercurial.commands.update) can
144 124 be run as hooks without wrappers to convert return values.'''
145 125
146 126 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
147 127 obj = funcname
148 128 if not callable(obj):
149 129 d = funcname.rfind('.')
150 130 if d == -1:
151 131 raise util.Abort(_('%s hook is invalid ("%s" not in '
152 132 'a module)') % (hname, funcname))
153 133 modname = funcname[:d]
154 134 try:
155 135 obj = __import__(modname)
156 136 except ImportError:
157 137 try:
158 138 # extensions are loaded with hgext_ prefix
159 139 obj = __import__("hgext_%s" % modname)
160 140 except ImportError:
161 141 raise util.Abort(_('%s hook is invalid '
162 142 '(import of "%s" failed)') %
163 143 (hname, modname))
164 144 try:
165 145 for p in funcname.split('.')[1:]:
166 146 obj = getattr(obj, p)
167 147 except AttributeError, err:
168 148 raise util.Abort(_('%s hook is invalid '
169 149 '("%s" is not defined)') %
170 150 (hname, funcname))
171 151 if not callable(obj):
172 152 raise util.Abort(_('%s hook is invalid '
173 153 '("%s" is not callable)') %
174 154 (hname, funcname))
175 155 try:
176 156 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
177 157 except (KeyboardInterrupt, util.SignalInterrupt):
178 158 raise
179 159 except Exception, exc:
180 160 if isinstance(exc, util.Abort):
181 161 self.ui.warn(_('error: %s hook failed: %s\n') %
182 162 (hname, exc.args[0]))
183 163 else:
184 164 self.ui.warn(_('error: %s hook raised an exception: '
185 165 '%s\n') % (hname, exc))
186 166 if throw:
187 167 raise
188 168 self.ui.print_exc()
189 169 return True
190 170 if r:
191 171 if throw:
192 172 raise util.Abort(_('%s hook failed') % hname)
193 173 self.ui.warn(_('warning: %s hook failed\n') % hname)
194 174 return r
195 175
196 176 def runhook(name, cmd):
197 177 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
198 178 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
199 179 r = util.system(cmd, environ=env, cwd=self.root)
200 180 if r:
201 181 desc, r = util.explain_exit(r)
202 182 if throw:
203 183 raise util.Abort(_('%s hook %s') % (name, desc))
204 184 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
205 185 return r
206 186
207 187 r = False
208 188 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
209 189 if hname.split(".", 1)[0] == name and cmd]
210 190 hooks.sort()
211 191 for hname, cmd in hooks:
212 192 if callable(cmd):
213 193 r = callhook(hname, cmd) or r
214 194 elif cmd.startswith('python:'):
215 195 r = callhook(hname, cmd[7:].strip()) or r
216 196 else:
217 197 r = runhook(hname, cmd) or r
218 198 return r
219 199
220 200 tag_disallowed = ':\r\n'
221 201
222 202 def _tag(self, name, node, message, local, user, date, parent=None):
223 203 use_dirstate = parent is None
224 204
225 205 for c in self.tag_disallowed:
226 206 if c in name:
227 207 raise util.Abort(_('%r cannot be used in a tag name') % c)
228 208
229 209 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
230 210
231 211 if local:
232 212 # local tags are stored in the current charset
233 213 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
234 214 self.hook('tag', node=hex(node), tag=name, local=local)
235 215 return
236 216
237 217 # committed tags are stored in UTF-8
238 218 line = '%s %s\n' % (hex(node), util.fromlocal(name))
239 219 if use_dirstate:
240 220 self.wfile('.hgtags', 'ab').write(line)
241 221 else:
242 222 ntags = self.filectx('.hgtags', parent).data()
243 223 self.wfile('.hgtags', 'ab').write(ntags + line)
244 224 if use_dirstate and self.dirstate.state('.hgtags') == '?':
245 225 self.add(['.hgtags'])
246 226
247 227 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent)
248 228
249 229 self.hook('tag', node=hex(node), tag=name, local=local)
250 230
251 231 return tagnode
252 232
253 233 def tag(self, name, node, message, local, user, date):
254 234 '''tag a revision with a symbolic name.
255 235
256 236 if local is True, the tag is stored in a per-repository file.
257 237 otherwise, it is stored in the .hgtags file, and a new
258 238 changeset is committed with the change.
259 239
260 240 keyword arguments:
261 241
262 242 local: whether to store tag in non-version-controlled file
263 243 (default False)
264 244
265 245 message: commit message to use if committing
266 246
267 247 user: name of user to use if committing
268 248
269 249 date: date tuple to use if committing'''
270 250
271 251 for x in self.status()[:5]:
272 252 if '.hgtags' in x:
273 253 raise util.Abort(_('working copy of .hgtags is changed '
274 254 '(please commit .hgtags manually)'))
275 255
276 256
277 257 self._tag(name, node, message, local, user, date)
278 258
279 259 def tags(self):
280 260 '''return a mapping of tag to node'''
281 261 if self.tagscache:
282 262 return self.tagscache
283 263
284 264 globaltags = {}
285 265
286 266 def readtags(lines, fn):
287 267 filetags = {}
288 268 count = 0
289 269
290 270 def warn(msg):
291 271 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
292 272
293 273 for l in lines:
294 274 count += 1
295 275 if not l:
296 276 continue
297 277 s = l.split(" ", 1)
298 278 if len(s) != 2:
299 279 warn(_("cannot parse entry"))
300 280 continue
301 281 node, key = s
302 282 key = util.tolocal(key.strip()) # stored in UTF-8
303 283 try:
304 284 bin_n = bin(node)
305 285 except TypeError:
306 286 warn(_("node '%s' is not well formed") % node)
307 287 continue
308 288 if bin_n not in self.changelog.nodemap:
309 289 warn(_("tag '%s' refers to unknown node") % key)
310 290 continue
311 291
312 292 h = {}
313 293 if key in filetags:
314 294 n, h = filetags[key]
315 295 h[n] = True
316 296 filetags[key] = (bin_n, h)
317 297
318 298 for k,nh in filetags.items():
319 299 if k not in globaltags:
320 300 globaltags[k] = nh
321 301 continue
322 302 # we prefer the global tag if:
323 303 # it supercedes us OR
324 304 # mutual supercedes and it has a higher rank
325 305 # otherwise we win because we're tip-most
326 306 an, ah = nh
327 307 bn, bh = globaltags[k]
328 308 if bn != an and an in bh and \
329 309 (bn not in ah or len(bh) > len(ah)):
330 310 an = bn
331 311 ah.update(bh)
332 312 globaltags[k] = an, ah
333 313
334 314 # read the tags file from each head, ending with the tip
335 315 f = None
336 316 for rev, node, fnode in self._hgtagsnodes():
337 317 f = (f and f.filectx(fnode) or
338 318 self.filectx('.hgtags', fileid=fnode))
339 319 readtags(f.data().splitlines(), f)
340 320
341 321 try:
342 322 data = util.fromlocal(self.opener("localtags").read())
343 323 # localtags are stored in the local character set
344 324 # while the internal tag table is stored in UTF-8
345 325 readtags(data.splitlines(), "localtags")
346 326 except IOError:
347 327 pass
348 328
349 329 self.tagscache = {}
350 330 for k,nh in globaltags.items():
351 331 n = nh[0]
352 332 if n != nullid:
353 333 self.tagscache[k] = n
354 334 self.tagscache['tip'] = self.changelog.tip()
355 335
356 336 return self.tagscache
357 337
358 338 def _hgtagsnodes(self):
359 339 heads = self.heads()
360 340 heads.reverse()
361 341 last = {}
362 342 ret = []
363 343 for node in heads:
364 344 c = self.changectx(node)
365 345 rev = c.rev()
366 346 try:
367 347 fnode = c.filenode('.hgtags')
368 348 except revlog.LookupError:
369 349 continue
370 350 ret.append((rev, node, fnode))
371 351 if fnode in last:
372 352 ret[last[fnode]] = None
373 353 last[fnode] = len(ret) - 1
374 354 return [item for item in ret if item]
375 355
376 356 def tagslist(self):
377 357 '''return a list of tags ordered by revision'''
378 358 l = []
379 359 for t, n in self.tags().items():
380 360 try:
381 361 r = self.changelog.rev(n)
382 362 except:
383 363 r = -2 # sort to the beginning of the list if unknown
384 364 l.append((r, t, n))
385 365 l.sort()
386 366 return [(t, n) for r, t, n in l]
387 367
388 368 def nodetags(self, node):
389 369 '''return the tags associated with a node'''
390 370 if not self.nodetagscache:
391 371 self.nodetagscache = {}
392 372 for t, n in self.tags().items():
393 373 self.nodetagscache.setdefault(n, []).append(t)
394 374 return self.nodetagscache.get(node, [])
395 375
396 376 def _branchtags(self):
397 377 partial, last, lrev = self._readbranchcache()
398 378
399 379 tiprev = self.changelog.count() - 1
400 380 if lrev != tiprev:
401 381 self._updatebranchcache(partial, lrev+1, tiprev+1)
402 382 self._writebranchcache(partial, self.changelog.tip(), tiprev)
403 383
404 384 return partial
405 385
406 386 def branchtags(self):
407 387 if self.branchcache is not None:
408 388 return self.branchcache
409 389
410 390 self.branchcache = {} # avoid recursion in changectx
411 391 partial = self._branchtags()
412 392
413 393 # the branch cache is stored on disk as UTF-8, but in the local
414 394 # charset internally
415 395 for k, v in partial.items():
416 396 self.branchcache[util.tolocal(k)] = v
417 397 return self.branchcache
418 398
419 399 def _readbranchcache(self):
420 400 partial = {}
421 401 try:
422 402 f = self.opener("branch.cache")
423 403 lines = f.read().split('\n')
424 404 f.close()
425 405 last, lrev = lines.pop(0).split(" ", 1)
426 406 last, lrev = bin(last), int(lrev)
427 407 if not (lrev < self.changelog.count() and
428 408 self.changelog.node(lrev) == last): # sanity check
429 409 # invalidate the cache
430 410 raise ValueError('Invalid branch cache: unknown tip')
431 411 for l in lines:
432 412 if not l: continue
433 413 node, label = l.split(" ", 1)
434 414 partial[label.strip()] = bin(node)
435 415 except (KeyboardInterrupt, util.SignalInterrupt):
436 416 raise
437 417 except Exception, inst:
438 418 if self.ui.debugflag:
439 419 self.ui.warn(str(inst), '\n')
440 420 partial, last, lrev = {}, nullid, nullrev
441 421 return partial, last, lrev
442 422
443 423 def _writebranchcache(self, branches, tip, tiprev):
444 424 try:
445 425 f = self.opener("branch.cache", "w")
446 426 f.write("%s %s\n" % (hex(tip), tiprev))
447 427 for label, node in branches.iteritems():
448 428 f.write("%s %s\n" % (hex(node), label))
449 429 except IOError:
450 430 pass
451 431
452 432 def _updatebranchcache(self, partial, start, end):
453 433 for r in xrange(start, end):
454 434 c = self.changectx(r)
455 435 b = c.branch()
456 436 partial[b] = c.node()
457 437
458 438 def lookup(self, key):
459 439 if key == '.':
460 440 key = self.dirstate.parents()[0]
461 441 if key == nullid:
462 442 raise repo.RepoError(_("no revision checked out"))
463 443 elif key == 'null':
464 444 return nullid
465 445 n = self.changelog._match(key)
466 446 if n:
467 447 return n
468 448 if key in self.tags():
469 449 return self.tags()[key]
470 450 if key in self.branchtags():
471 451 return self.branchtags()[key]
472 452 n = self.changelog._partialmatch(key)
473 453 if n:
474 454 return n
475 455 raise repo.RepoError(_("unknown revision '%s'") % key)
476 456
477 457 def dev(self):
478 458 return os.lstat(self.path).st_dev
479 459
480 460 def local(self):
481 461 return True
482 462
483 463 def join(self, f):
484 464 return os.path.join(self.path, f)
485 465
486 466 def sjoin(self, f):
487 467 f = self.encodefn(f)
488 468 return os.path.join(self.spath, f)
489 469
490 470 def wjoin(self, f):
491 471 return os.path.join(self.root, f)
492 472
493 473 def file(self, f):
494 474 if f[0] == '/':
495 475 f = f[1:]
496 return filelog.filelog(self.sopener, f, self.revlogversion)
476 return filelog.filelog(self.sopener, f)
497 477
498 478 def changectx(self, changeid=None):
499 479 return context.changectx(self, changeid)
500 480
501 481 def workingctx(self):
502 482 return context.workingctx(self)
503 483
504 484 def parents(self, changeid=None):
505 485 '''
506 486 get list of changectxs for parents of changeid or working directory
507 487 '''
508 488 if changeid is None:
509 489 pl = self.dirstate.parents()
510 490 else:
511 491 n = self.changelog.lookup(changeid)
512 492 pl = self.changelog.parents(n)
513 493 if pl[1] == nullid:
514 494 return [self.changectx(pl[0])]
515 495 return [self.changectx(pl[0]), self.changectx(pl[1])]
516 496
517 497 def filectx(self, path, changeid=None, fileid=None):
518 498 """changeid can be a changeset revision, node, or tag.
519 499 fileid can be a file revision or node."""
520 500 return context.filectx(self, path, changeid, fileid)
521 501
522 502 def getcwd(self):
523 503 return self.dirstate.getcwd()
524 504
525 505 def wfile(self, f, mode='r'):
526 506 return self.wopener(f, mode)
527 507
528 508 def _filter(self, filter, filename, data):
529 509 if filter not in self.filterpats:
530 510 l = []
531 511 for pat, cmd in self.ui.configitems(filter):
532 512 mf = util.matcher(self.root, "", [pat], [], [])[1]
533 513 l.append((mf, cmd))
534 514 self.filterpats[filter] = l
535 515
536 516 for mf, cmd in self.filterpats[filter]:
537 517 if mf(filename):
538 518 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
539 519 data = util.filter(data, cmd)
540 520 break
541 521
542 522 return data
543 523
544 524 def wread(self, filename):
545 525 if self._link(filename):
546 526 data = os.readlink(self.wjoin(filename))
547 527 else:
548 528 data = self.wopener(filename, 'r').read()
549 529 return self._filter("encode", filename, data)
550 530
551 531 def wwrite(self, filename, data, flags):
552 532 data = self._filter("decode", filename, data)
553 533 if "l" in flags:
554 534 f = self.wjoin(filename)
555 535 try:
556 536 os.unlink(f)
557 537 except OSError:
558 538 pass
559 539 d = os.path.dirname(f)
560 540 if not os.path.exists(d):
561 541 os.makedirs(d)
562 542 os.symlink(data, f)
563 543 else:
564 544 try:
565 545 if self._link(filename):
566 546 os.unlink(self.wjoin(filename))
567 547 except OSError:
568 548 pass
569 549 self.wopener(filename, 'w').write(data)
570 550 util.set_exec(self.wjoin(filename), "x" in flags)
571 551
572 552 def wwritedata(self, filename, data):
573 553 return self._filter("decode", filename, data)
574 554
575 555 def transaction(self):
576 556 tr = self.transhandle
577 557 if tr != None and tr.running():
578 558 return tr.nest()
579 559
580 560 # save dirstate for rollback
581 561 try:
582 562 ds = self.opener("dirstate").read()
583 563 except IOError:
584 564 ds = ""
585 565 self.opener("journal.dirstate", "w").write(ds)
586 566
587 567 renames = [(self.sjoin("journal"), self.sjoin("undo")),
588 568 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
589 569 tr = transaction.transaction(self.ui.warn, self.sopener,
590 570 self.sjoin("journal"),
591 571 aftertrans(renames))
592 572 self.transhandle = tr
593 573 return tr
594 574
595 575 def recover(self):
596 576 l = self.lock()
597 577 if os.path.exists(self.sjoin("journal")):
598 578 self.ui.status(_("rolling back interrupted transaction\n"))
599 579 transaction.rollback(self.sopener, self.sjoin("journal"))
600 580 self.reload()
601 581 return True
602 582 else:
603 583 self.ui.warn(_("no interrupted transaction available\n"))
604 584 return False
605 585
606 586 def rollback(self, wlock=None):
607 587 if not wlock:
608 588 wlock = self.wlock()
609 589 l = self.lock()
610 590 if os.path.exists(self.sjoin("undo")):
611 591 self.ui.status(_("rolling back last transaction\n"))
612 592 transaction.rollback(self.sopener, self.sjoin("undo"))
613 593 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
614 594 self.reload()
615 595 self.wreload()
616 596 else:
617 597 self.ui.warn(_("no rollback information available\n"))
618 598
619 599 def wreload(self):
620 600 self.dirstate.read()
621 601
622 602 def reload(self):
623 603 self.changelog.load()
624 604 self.manifest.load()
625 605 self.tagscache = None
626 606 self.nodetagscache = None
627 607
628 608 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
629 609 desc=None):
630 610 try:
631 611 l = lock.lock(lockname, 0, releasefn, desc=desc)
632 612 except lock.LockHeld, inst:
633 613 if not wait:
634 614 raise
635 615 self.ui.warn(_("waiting for lock on %s held by %r\n") %
636 616 (desc, inst.locker))
637 617 # default to 600 seconds timeout
638 618 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
639 619 releasefn, desc=desc)
640 620 if acquirefn:
641 621 acquirefn()
642 622 return l
643 623
644 624 def lock(self, wait=1):
645 625 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
646 626 desc=_('repository %s') % self.origroot)
647 627
648 628 def wlock(self, wait=1):
649 629 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
650 630 self.wreload,
651 631 desc=_('working directory of %s') % self.origroot)
652 632
653 633 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
654 634 """
655 635 commit an individual file as part of a larger transaction
656 636 """
657 637
658 638 t = self.wread(fn)
659 639 fl = self.file(fn)
660 640 fp1 = manifest1.get(fn, nullid)
661 641 fp2 = manifest2.get(fn, nullid)
662 642
663 643 meta = {}
664 644 cp = self.dirstate.copied(fn)
665 645 if cp:
666 646 # Mark the new revision of this file as a copy of another
667 647 # file. This copy data will effectively act as a parent
668 648 # of this new revision. If this is a merge, the first
669 649 # parent will be the nullid (meaning "look up the copy data")
670 650 # and the second one will be the other parent. For example:
671 651 #
672 652 # 0 --- 1 --- 3 rev1 changes file foo
673 653 # \ / rev2 renames foo to bar and changes it
674 654 # \- 2 -/ rev3 should have bar with all changes and
675 655 # should record that bar descends from
676 656 # bar in rev2 and foo in rev1
677 657 #
678 658 # this allows this merge to succeed:
679 659 #
680 660 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
681 661 # \ / merging rev3 and rev4 should use bar@rev2
682 662 # \- 2 --- 4 as the merge base
683 663 #
684 664 meta["copy"] = cp
685 665 if not manifest2: # not a branch merge
686 666 meta["copyrev"] = hex(manifest1.get(cp, nullid))
687 667 fp2 = nullid
688 668 elif fp2 != nullid: # copied on remote side
689 669 meta["copyrev"] = hex(manifest1.get(cp, nullid))
690 670 elif fp1 != nullid: # copied on local side, reversed
691 671 meta["copyrev"] = hex(manifest2.get(cp))
692 672 fp2 = fp1
693 673 else: # directory rename
694 674 meta["copyrev"] = hex(manifest1.get(cp, nullid))
695 675 self.ui.debug(_(" %s: copy %s:%s\n") %
696 676 (fn, cp, meta["copyrev"]))
697 677 fp1 = nullid
698 678 elif fp2 != nullid:
699 679 # is one parent an ancestor of the other?
700 680 fpa = fl.ancestor(fp1, fp2)
701 681 if fpa == fp1:
702 682 fp1, fp2 = fp2, nullid
703 683 elif fpa == fp2:
704 684 fp2 = nullid
705 685
706 686 # is the file unmodified from the parent? report existing entry
707 687 if fp2 == nullid and not fl.cmp(fp1, t):
708 688 return fp1
709 689
710 690 changelist.append(fn)
711 691 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
712 692
713 693 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
714 694 if p1 is None:
715 695 p1, p2 = self.dirstate.parents()
716 696 return self.commit(files=files, text=text, user=user, date=date,
717 697 p1=p1, p2=p2, wlock=wlock, extra=extra)
718 698
719 699 def commit(self, files=None, text="", user=None, date=None,
720 700 match=util.always, force=False, lock=None, wlock=None,
721 701 force_editor=False, p1=None, p2=None, extra={}):
722 702
723 703 commit = []
724 704 remove = []
725 705 changed = []
726 706 use_dirstate = (p1 is None) # not rawcommit
727 707 extra = extra.copy()
728 708
729 709 if use_dirstate:
730 710 if files:
731 711 for f in files:
732 712 s = self.dirstate.state(f)
733 713 if s in 'nmai':
734 714 commit.append(f)
735 715 elif s == 'r':
736 716 remove.append(f)
737 717 else:
738 718 self.ui.warn(_("%s not tracked!\n") % f)
739 719 else:
740 720 changes = self.status(match=match)[:5]
741 721 modified, added, removed, deleted, unknown = changes
742 722 commit = modified + added
743 723 remove = removed
744 724 else:
745 725 commit = files
746 726
747 727 if use_dirstate:
748 728 p1, p2 = self.dirstate.parents()
749 729 update_dirstate = True
750 730 else:
751 731 p1, p2 = p1, p2 or nullid
752 732 update_dirstate = (self.dirstate.parents()[0] == p1)
753 733
754 734 c1 = self.changelog.read(p1)
755 735 c2 = self.changelog.read(p2)
756 736 m1 = self.manifest.read(c1[0]).copy()
757 737 m2 = self.manifest.read(c2[0])
758 738
759 739 if use_dirstate:
760 740 branchname = self.workingctx().branch()
761 741 try:
762 742 branchname = branchname.decode('UTF-8').encode('UTF-8')
763 743 except UnicodeDecodeError:
764 744 raise util.Abort(_('branch name not in UTF-8!'))
765 745 else:
766 746 branchname = ""
767 747
768 748 if use_dirstate:
769 749 oldname = c1[5].get("branch") # stored in UTF-8
770 750 if not commit and not remove and not force and p2 == nullid and \
771 751 branchname == oldname:
772 752 self.ui.status(_("nothing changed\n"))
773 753 return None
774 754
775 755 xp1 = hex(p1)
776 756 if p2 == nullid: xp2 = ''
777 757 else: xp2 = hex(p2)
778 758
779 759 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
780 760
781 761 if not wlock:
782 762 wlock = self.wlock()
783 763 if not lock:
784 764 lock = self.lock()
785 765 tr = self.transaction()
786 766
787 767 # check in files
788 768 new = {}
789 769 linkrev = self.changelog.count()
790 770 commit.sort()
791 771 is_exec = util.execfunc(self.root, m1.execf)
792 772 is_link = util.linkfunc(self.root, m1.linkf)
793 773 for f in commit:
794 774 self.ui.note(f + "\n")
795 775 try:
796 776 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
797 777 m1.set(f, is_exec(f), is_link(f))
798 778 except (OSError, IOError):
799 779 if use_dirstate:
800 780 self.ui.warn(_("trouble committing %s!\n") % f)
801 781 raise
802 782 else:
803 783 remove.append(f)
804 784
805 785 # update manifest
806 786 m1.update(new)
807 787 remove.sort()
808 788 removed = []
809 789
810 790 for f in remove:
811 791 if f in m1:
812 792 del m1[f]
813 793 removed.append(f)
814 794 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
815 795
816 796 # add changeset
817 797 new = new.keys()
818 798 new.sort()
819 799
820 800 user = user or self.ui.username()
821 801 if not text or force_editor:
822 802 edittext = []
823 803 if text:
824 804 edittext.append(text)
825 805 edittext.append("")
826 806 edittext.append("HG: user: %s" % user)
827 807 if p2 != nullid:
828 808 edittext.append("HG: branch merge")
829 809 if branchname:
830 810 edittext.append("HG: branch %s" % util.tolocal(branchname))
831 811 edittext.extend(["HG: changed %s" % f for f in changed])
832 812 edittext.extend(["HG: removed %s" % f for f in removed])
833 813 if not changed and not remove:
834 814 edittext.append("HG: no files changed")
835 815 edittext.append("")
836 816 # run editor in the repository root
837 817 olddir = os.getcwd()
838 818 os.chdir(self.root)
839 819 text = self.ui.edit("\n".join(edittext), user)
840 820 os.chdir(olddir)
841 821
842 822 lines = [line.rstrip() for line in text.rstrip().splitlines()]
843 823 while lines and not lines[0]:
844 824 del lines[0]
845 825 if not lines:
846 826 return None
847 827 text = '\n'.join(lines)
848 828 if branchname:
849 829 extra["branch"] = branchname
850 830 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
851 831 user, date, extra)
852 832 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
853 833 parent2=xp2)
854 834 tr.close()
855 835
856 836 if self.branchcache and "branch" in extra:
857 837 self.branchcache[util.tolocal(extra["branch"])] = n
858 838
859 839 if use_dirstate or update_dirstate:
860 840 self.dirstate.setparents(n)
861 841 if use_dirstate:
862 842 self.dirstate.update(new, "n")
863 843 self.dirstate.forget(removed)
864 844
865 845 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
866 846 return n
867 847
868 848 def walk(self, node=None, files=[], match=util.always, badmatch=None):
869 849 '''
870 850 walk recursively through the directory tree or a given
871 851 changeset, finding all files matched by the match
872 852 function
873 853
874 854 results are yielded in a tuple (src, filename), where src
875 855 is one of:
876 856 'f' the file was found in the directory tree
877 857 'm' the file was only in the dirstate and not in the tree
878 858 'b' file was not found and matched badmatch
879 859 '''
880 860
881 861 if node:
882 862 fdict = dict.fromkeys(files)
883 863 # for dirstate.walk, files=['.'] means "walk the whole tree".
884 864 # follow that here, too
885 865 fdict.pop('.', None)
886 866 mdict = self.manifest.read(self.changelog.read(node)[0])
887 867 mfiles = mdict.keys()
888 868 mfiles.sort()
889 869 for fn in mfiles:
890 870 for ffn in fdict:
891 871 # match if the file is the exact name or a directory
892 872 if ffn == fn or fn.startswith("%s/" % ffn):
893 873 del fdict[ffn]
894 874 break
895 875 if match(fn):
896 876 yield 'm', fn
897 877 ffiles = fdict.keys()
898 878 ffiles.sort()
899 879 for fn in ffiles:
900 880 if badmatch and badmatch(fn):
901 881 if match(fn):
902 882 yield 'b', fn
903 883 else:
904 884 self.ui.warn(_('%s: No such file in rev %s\n') % (
905 885 util.pathto(self.root, self.getcwd(), fn), short(node)))
906 886 else:
907 887 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
908 888 yield src, fn
909 889
910 890 def status(self, node1=None, node2=None, files=[], match=util.always,
911 891 wlock=None, list_ignored=False, list_clean=False):
912 892 """return status of files between two nodes or node and working directory
913 893
914 894 If node1 is None, use the first dirstate parent instead.
915 895 If node2 is None, compare node1 with working directory.
916 896 """
917 897
918 898 def fcmp(fn, getnode):
919 899 t1 = self.wread(fn)
920 900 return self.file(fn).cmp(getnode(fn), t1)
921 901
922 902 def mfmatches(node):
923 903 change = self.changelog.read(node)
924 904 mf = self.manifest.read(change[0]).copy()
925 905 for fn in mf.keys():
926 906 if not match(fn):
927 907 del mf[fn]
928 908 return mf
929 909
930 910 modified, added, removed, deleted, unknown = [], [], [], [], []
931 911 ignored, clean = [], []
932 912
933 913 compareworking = False
934 914 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
935 915 compareworking = True
936 916
937 917 if not compareworking:
938 918 # read the manifest from node1 before the manifest from node2,
939 919 # so that we'll hit the manifest cache if we're going through
940 920 # all the revisions in parent->child order.
941 921 mf1 = mfmatches(node1)
942 922
943 923 # are we comparing the working directory?
944 924 if not node2:
945 925 if not wlock:
946 926 try:
947 927 wlock = self.wlock(wait=0)
948 928 except lock.LockException:
949 929 wlock = None
950 930 (lookup, modified, added, removed, deleted, unknown,
951 931 ignored, clean) = self.dirstate.status(files, match,
952 932 list_ignored, list_clean)
953 933
954 934 # are we comparing working dir against its parent?
955 935 if compareworking:
956 936 if lookup:
957 937 # do a full compare of any files that might have changed
958 938 mnode = self.changelog.read(self.dirstate.parents()[0])[0]
959 939 getnode = lambda fn: (self.manifest.find(mnode, fn)[0] or
960 940 nullid)
961 941 for f in lookup:
962 942 if fcmp(f, getnode):
963 943 modified.append(f)
964 944 else:
965 945 clean.append(f)
966 946 if wlock is not None:
967 947 self.dirstate.update([f], "n")
968 948 else:
969 949 # we are comparing working dir against non-parent
970 950 # generate a pseudo-manifest for the working dir
971 951 # XXX: create it in dirstate.py ?
972 952 mf2 = mfmatches(self.dirstate.parents()[0])
973 953 is_exec = util.execfunc(self.root, mf2.execf)
974 954 is_link = util.linkfunc(self.root, mf2.linkf)
975 955 for f in lookup + modified + added:
976 956 mf2[f] = ""
977 957 mf2.set(f, is_exec(f), is_link(f))
978 958 for f in removed:
979 959 if f in mf2:
980 960 del mf2[f]
981 961 else:
982 962 # we are comparing two revisions
983 963 mf2 = mfmatches(node2)
984 964
985 965 if not compareworking:
986 966 # flush lists from dirstate before comparing manifests
987 967 modified, added, clean = [], [], []
988 968
989 969 # make sure to sort the files so we talk to the disk in a
990 970 # reasonable order
991 971 mf2keys = mf2.keys()
992 972 mf2keys.sort()
993 973 getnode = lambda fn: mf1.get(fn, nullid)
994 974 for fn in mf2keys:
995 975 if mf1.has_key(fn):
996 976 if mf1.flags(fn) != mf2.flags(fn) or \
997 977 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or
998 978 fcmp(fn, getnode))):
999 979 modified.append(fn)
1000 980 elif list_clean:
1001 981 clean.append(fn)
1002 982 del mf1[fn]
1003 983 else:
1004 984 added.append(fn)
1005 985
1006 986 removed = mf1.keys()
1007 987
1008 988 # sort and return results:
1009 989 for l in modified, added, removed, deleted, unknown, ignored, clean:
1010 990 l.sort()
1011 991 return (modified, added, removed, deleted, unknown, ignored, clean)
1012 992
1013 993 def add(self, list, wlock=None):
1014 994 if not wlock:
1015 995 wlock = self.wlock()
1016 996 for f in list:
1017 997 p = self.wjoin(f)
1018 998 islink = os.path.islink(p)
1019 999 if not islink and not os.path.exists(p):
1020 1000 self.ui.warn(_("%s does not exist!\n") % f)
1021 1001 elif not islink and not os.path.isfile(p):
1022 1002 self.ui.warn(_("%s not added: only files and symlinks "
1023 1003 "supported currently\n") % f)
1024 1004 elif self.dirstate.state(f) in 'an':
1025 1005 self.ui.warn(_("%s already tracked!\n") % f)
1026 1006 else:
1027 1007 self.dirstate.update([f], "a")
1028 1008
1029 1009 def forget(self, list, wlock=None):
1030 1010 if not wlock:
1031 1011 wlock = self.wlock()
1032 1012 for f in list:
1033 1013 if self.dirstate.state(f) not in 'ai':
1034 1014 self.ui.warn(_("%s not added!\n") % f)
1035 1015 else:
1036 1016 self.dirstate.forget([f])
1037 1017
1038 1018 def remove(self, list, unlink=False, wlock=None):
1039 1019 if unlink:
1040 1020 for f in list:
1041 1021 try:
1042 1022 util.unlink(self.wjoin(f))
1043 1023 except OSError, inst:
1044 1024 if inst.errno != errno.ENOENT:
1045 1025 raise
1046 1026 if not wlock:
1047 1027 wlock = self.wlock()
1048 1028 for f in list:
1049 1029 p = self.wjoin(f)
1050 1030 if os.path.exists(p):
1051 1031 self.ui.warn(_("%s still exists!\n") % f)
1052 1032 elif self.dirstate.state(f) == 'a':
1053 1033 self.dirstate.forget([f])
1054 1034 elif f not in self.dirstate:
1055 1035 self.ui.warn(_("%s not tracked!\n") % f)
1056 1036 else:
1057 1037 self.dirstate.update([f], "r")
1058 1038
1059 1039 def undelete(self, list, wlock=None):
1060 1040 p = self.dirstate.parents()[0]
1061 1041 mn = self.changelog.read(p)[0]
1062 1042 m = self.manifest.read(mn)
1063 1043 if not wlock:
1064 1044 wlock = self.wlock()
1065 1045 for f in list:
1066 1046 if self.dirstate.state(f) not in "r":
1067 1047 self.ui.warn("%s not removed!\n" % f)
1068 1048 else:
1069 1049 t = self.file(f).read(m[f])
1070 1050 self.wwrite(f, t, m.flags(f))
1071 1051 self.dirstate.update([f], "n")
1072 1052
1073 1053 def copy(self, source, dest, wlock=None):
1074 1054 p = self.wjoin(dest)
1075 1055 if not os.path.exists(p):
1076 1056 self.ui.warn(_("%s does not exist!\n") % dest)
1077 1057 elif not os.path.isfile(p):
1078 1058 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
1079 1059 else:
1080 1060 if not wlock:
1081 1061 wlock = self.wlock()
1082 1062 if self.dirstate.state(dest) == '?':
1083 1063 self.dirstate.update([dest], "a")
1084 1064 self.dirstate.copy(source, dest)
1085 1065
1086 1066 def heads(self, start=None):
1087 1067 heads = self.changelog.heads(start)
1088 1068 # sort the output in rev descending order
1089 1069 heads = [(-self.changelog.rev(h), h) for h in heads]
1090 1070 heads.sort()
1091 1071 return [n for (r, n) in heads]
1092 1072
1093 1073 def branches(self, nodes):
1094 1074 if not nodes:
1095 1075 nodes = [self.changelog.tip()]
1096 1076 b = []
1097 1077 for n in nodes:
1098 1078 t = n
1099 1079 while 1:
1100 1080 p = self.changelog.parents(n)
1101 1081 if p[1] != nullid or p[0] == nullid:
1102 1082 b.append((t, n, p[0], p[1]))
1103 1083 break
1104 1084 n = p[0]
1105 1085 return b
1106 1086
1107 1087 def between(self, pairs):
1108 1088 r = []
1109 1089
1110 1090 for top, bottom in pairs:
1111 1091 n, l, i = top, [], 0
1112 1092 f = 1
1113 1093
1114 1094 while n != bottom:
1115 1095 p = self.changelog.parents(n)[0]
1116 1096 if i == f:
1117 1097 l.append(n)
1118 1098 f = f * 2
1119 1099 n = p
1120 1100 i += 1
1121 1101
1122 1102 r.append(l)
1123 1103
1124 1104 return r
1125 1105
1126 1106 def findincoming(self, remote, base=None, heads=None, force=False):
1127 1107 """Return list of roots of the subsets of missing nodes from remote
1128 1108
1129 1109 If base dict is specified, assume that these nodes and their parents
1130 1110 exist on the remote side and that no child of a node of base exists
1131 1111 in both remote and self.
1132 1112 Furthermore base will be updated to include the nodes that exists
1133 1113 in self and remote but no children exists in self and remote.
1134 1114 If a list of heads is specified, return only nodes which are heads
1135 1115 or ancestors of these heads.
1136 1116
1137 1117 All the ancestors of base are in self and in remote.
1138 1118 All the descendants of the list returned are missing in self.
1139 1119 (and so we know that the rest of the nodes are missing in remote, see
1140 1120 outgoing)
1141 1121 """
1142 1122 m = self.changelog.nodemap
1143 1123 search = []
1144 1124 fetch = {}
1145 1125 seen = {}
1146 1126 seenbranch = {}
1147 1127 if base == None:
1148 1128 base = {}
1149 1129
1150 1130 if not heads:
1151 1131 heads = remote.heads()
1152 1132
1153 1133 if self.changelog.tip() == nullid:
1154 1134 base[nullid] = 1
1155 1135 if heads != [nullid]:
1156 1136 return [nullid]
1157 1137 return []
1158 1138
1159 1139 # assume we're closer to the tip than the root
1160 1140 # and start by examining the heads
1161 1141 self.ui.status(_("searching for changes\n"))
1162 1142
1163 1143 unknown = []
1164 1144 for h in heads:
1165 1145 if h not in m:
1166 1146 unknown.append(h)
1167 1147 else:
1168 1148 base[h] = 1
1169 1149
1170 1150 if not unknown:
1171 1151 return []
1172 1152
1173 1153 req = dict.fromkeys(unknown)
1174 1154 reqcnt = 0
1175 1155
1176 1156 # search through remote branches
1177 1157 # a 'branch' here is a linear segment of history, with four parts:
1178 1158 # head, root, first parent, second parent
1179 1159 # (a branch always has two parents (or none) by definition)
1180 1160 unknown = remote.branches(unknown)
1181 1161 while unknown:
1182 1162 r = []
1183 1163 while unknown:
1184 1164 n = unknown.pop(0)
1185 1165 if n[0] in seen:
1186 1166 continue
1187 1167
1188 1168 self.ui.debug(_("examining %s:%s\n")
1189 1169 % (short(n[0]), short(n[1])))
1190 1170 if n[0] == nullid: # found the end of the branch
1191 1171 pass
1192 1172 elif n in seenbranch:
1193 1173 self.ui.debug(_("branch already found\n"))
1194 1174 continue
1195 1175 elif n[1] and n[1] in m: # do we know the base?
1196 1176 self.ui.debug(_("found incomplete branch %s:%s\n")
1197 1177 % (short(n[0]), short(n[1])))
1198 1178 search.append(n) # schedule branch range for scanning
1199 1179 seenbranch[n] = 1
1200 1180 else:
1201 1181 if n[1] not in seen and n[1] not in fetch:
1202 1182 if n[2] in m and n[3] in m:
1203 1183 self.ui.debug(_("found new changeset %s\n") %
1204 1184 short(n[1]))
1205 1185 fetch[n[1]] = 1 # earliest unknown
1206 1186 for p in n[2:4]:
1207 1187 if p in m:
1208 1188 base[p] = 1 # latest known
1209 1189
1210 1190 for p in n[2:4]:
1211 1191 if p not in req and p not in m:
1212 1192 r.append(p)
1213 1193 req[p] = 1
1214 1194 seen[n[0]] = 1
1215 1195
1216 1196 if r:
1217 1197 reqcnt += 1
1218 1198 self.ui.debug(_("request %d: %s\n") %
1219 1199 (reqcnt, " ".join(map(short, r))))
1220 1200 for p in xrange(0, len(r), 10):
1221 1201 for b in remote.branches(r[p:p+10]):
1222 1202 self.ui.debug(_("received %s:%s\n") %
1223 1203 (short(b[0]), short(b[1])))
1224 1204 unknown.append(b)
1225 1205
1226 1206 # do binary search on the branches we found
1227 1207 while search:
1228 1208 n = search.pop(0)
1229 1209 reqcnt += 1
1230 1210 l = remote.between([(n[0], n[1])])[0]
1231 1211 l.append(n[1])
1232 1212 p = n[0]
1233 1213 f = 1
1234 1214 for i in l:
1235 1215 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1236 1216 if i in m:
1237 1217 if f <= 2:
1238 1218 self.ui.debug(_("found new branch changeset %s\n") %
1239 1219 short(p))
1240 1220 fetch[p] = 1
1241 1221 base[i] = 1
1242 1222 else:
1243 1223 self.ui.debug(_("narrowed branch search to %s:%s\n")
1244 1224 % (short(p), short(i)))
1245 1225 search.append((p, i))
1246 1226 break
1247 1227 p, f = i, f * 2
1248 1228
1249 1229 # sanity check our fetch list
1250 1230 for f in fetch.keys():
1251 1231 if f in m:
1252 1232 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1253 1233
1254 1234 if base.keys() == [nullid]:
1255 1235 if force:
1256 1236 self.ui.warn(_("warning: repository is unrelated\n"))
1257 1237 else:
1258 1238 raise util.Abort(_("repository is unrelated"))
1259 1239
1260 1240 self.ui.debug(_("found new changesets starting at ") +
1261 1241 " ".join([short(f) for f in fetch]) + "\n")
1262 1242
1263 1243 self.ui.debug(_("%d total queries\n") % reqcnt)
1264 1244
1265 1245 return fetch.keys()
1266 1246
1267 1247 def findoutgoing(self, remote, base=None, heads=None, force=False):
1268 1248 """Return list of nodes that are roots of subsets not in remote
1269 1249
1270 1250 If base dict is specified, assume that these nodes and their parents
1271 1251 exist on the remote side.
1272 1252 If a list of heads is specified, return only nodes which are heads
1273 1253 or ancestors of these heads, and return a second element which
1274 1254 contains all remote heads which get new children.
1275 1255 """
1276 1256 if base == None:
1277 1257 base = {}
1278 1258 self.findincoming(remote, base, heads, force=force)
1279 1259
1280 1260 self.ui.debug(_("common changesets up to ")
1281 1261 + " ".join(map(short, base.keys())) + "\n")
1282 1262
1283 1263 remain = dict.fromkeys(self.changelog.nodemap)
1284 1264
1285 1265 # prune everything remote has from the tree
1286 1266 del remain[nullid]
1287 1267 remove = base.keys()
1288 1268 while remove:
1289 1269 n = remove.pop(0)
1290 1270 if n in remain:
1291 1271 del remain[n]
1292 1272 for p in self.changelog.parents(n):
1293 1273 remove.append(p)
1294 1274
1295 1275 # find every node whose parents have been pruned
1296 1276 subset = []
1297 1277 # find every remote head that will get new children
1298 1278 updated_heads = {}
1299 1279 for n in remain:
1300 1280 p1, p2 = self.changelog.parents(n)
1301 1281 if p1 not in remain and p2 not in remain:
1302 1282 subset.append(n)
1303 1283 if heads:
1304 1284 if p1 in heads:
1305 1285 updated_heads[p1] = True
1306 1286 if p2 in heads:
1307 1287 updated_heads[p2] = True
1308 1288
1309 1289 # this is the set of all roots we have to push
1310 1290 if heads:
1311 1291 return subset, updated_heads.keys()
1312 1292 else:
1313 1293 return subset
1314 1294
1315 1295 def pull(self, remote, heads=None, force=False, lock=None):
1316 1296 mylock = False
1317 1297 if not lock:
1318 1298 lock = self.lock()
1319 1299 mylock = True
1320 1300
1321 1301 try:
1322 1302 fetch = self.findincoming(remote, force=force)
1323 1303 if fetch == [nullid]:
1324 1304 self.ui.status(_("requesting all changes\n"))
1325 1305
1326 1306 if not fetch:
1327 1307 self.ui.status(_("no changes found\n"))
1328 1308 return 0
1329 1309
1330 1310 if heads is None:
1331 1311 cg = remote.changegroup(fetch, 'pull')
1332 1312 else:
1333 1313 if 'changegroupsubset' not in remote.capabilities:
1334 1314 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1335 1315 cg = remote.changegroupsubset(fetch, heads, 'pull')
1336 1316 return self.addchangegroup(cg, 'pull', remote.url())
1337 1317 finally:
1338 1318 if mylock:
1339 1319 lock.release()
1340 1320
1341 1321 def push(self, remote, force=False, revs=None):
1342 1322 # there are two ways to push to remote repo:
1343 1323 #
1344 1324 # addchangegroup assumes local user can lock remote
1345 1325 # repo (local filesystem, old ssh servers).
1346 1326 #
1347 1327 # unbundle assumes local user cannot lock remote repo (new ssh
1348 1328 # servers, http servers).
1349 1329
1350 1330 if remote.capable('unbundle'):
1351 1331 return self.push_unbundle(remote, force, revs)
1352 1332 return self.push_addchangegroup(remote, force, revs)
1353 1333
1354 1334 def prepush(self, remote, force, revs):
1355 1335 base = {}
1356 1336 remote_heads = remote.heads()
1357 1337 inc = self.findincoming(remote, base, remote_heads, force=force)
1358 1338
1359 1339 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1360 1340 if revs is not None:
1361 1341 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1362 1342 else:
1363 1343 bases, heads = update, self.changelog.heads()
1364 1344
1365 1345 if not bases:
1366 1346 self.ui.status(_("no changes found\n"))
1367 1347 return None, 1
1368 1348 elif not force:
1369 1349 # check if we're creating new remote heads
1370 1350 # to be a remote head after push, node must be either
1371 1351 # - unknown locally
1372 1352 # - a local outgoing head descended from update
1373 1353 # - a remote head that's known locally and not
1374 1354 # ancestral to an outgoing head
1375 1355
1376 1356 warn = 0
1377 1357
1378 1358 if remote_heads == [nullid]:
1379 1359 warn = 0
1380 1360 elif not revs and len(heads) > len(remote_heads):
1381 1361 warn = 1
1382 1362 else:
1383 1363 newheads = list(heads)
1384 1364 for r in remote_heads:
1385 1365 if r in self.changelog.nodemap:
1386 1366 desc = self.changelog.heads(r, heads)
1387 1367 l = [h for h in heads if h in desc]
1388 1368 if not l:
1389 1369 newheads.append(r)
1390 1370 else:
1391 1371 newheads.append(r)
1392 1372 if len(newheads) > len(remote_heads):
1393 1373 warn = 1
1394 1374
1395 1375 if warn:
1396 1376 self.ui.warn(_("abort: push creates new remote branches!\n"))
1397 1377 self.ui.status(_("(did you forget to merge?"
1398 1378 " use push -f to force)\n"))
1399 1379 return None, 1
1400 1380 elif inc:
1401 1381 self.ui.warn(_("note: unsynced remote changes!\n"))
1402 1382
1403 1383
1404 1384 if revs is None:
1405 1385 cg = self.changegroup(update, 'push')
1406 1386 else:
1407 1387 cg = self.changegroupsubset(update, revs, 'push')
1408 1388 return cg, remote_heads
1409 1389
1410 1390 def push_addchangegroup(self, remote, force, revs):
1411 1391 lock = remote.lock()
1412 1392
1413 1393 ret = self.prepush(remote, force, revs)
1414 1394 if ret[0] is not None:
1415 1395 cg, remote_heads = ret
1416 1396 return remote.addchangegroup(cg, 'push', self.url())
1417 1397 return ret[1]
1418 1398
1419 1399 def push_unbundle(self, remote, force, revs):
1420 1400 # local repo finds heads on server, finds out what revs it
1421 1401 # must push. once revs transferred, if server finds it has
1422 1402 # different heads (someone else won commit/push race), server
1423 1403 # aborts.
1424 1404
1425 1405 ret = self.prepush(remote, force, revs)
1426 1406 if ret[0] is not None:
1427 1407 cg, remote_heads = ret
1428 1408 if force: remote_heads = ['force']
1429 1409 return remote.unbundle(cg, remote_heads, 'push')
1430 1410 return ret[1]
1431 1411
1432 1412 def changegroupinfo(self, nodes):
1433 1413 self.ui.note(_("%d changesets found\n") % len(nodes))
1434 1414 if self.ui.debugflag:
1435 1415 self.ui.debug(_("List of changesets:\n"))
1436 1416 for node in nodes:
1437 1417 self.ui.debug("%s\n" % hex(node))
1438 1418
1439 1419 def changegroupsubset(self, bases, heads, source):
1440 1420 """This function generates a changegroup consisting of all the nodes
1441 1421 that are descendents of any of the bases, and ancestors of any of
1442 1422 the heads.
1443 1423
1444 1424 It is fairly complex as determining which filenodes and which
1445 1425 manifest nodes need to be included for the changeset to be complete
1446 1426 is non-trivial.
1447 1427
1448 1428 Another wrinkle is doing the reverse, figuring out which changeset in
1449 1429 the changegroup a particular filenode or manifestnode belongs to."""
1450 1430
1451 1431 self.hook('preoutgoing', throw=True, source=source)
1452 1432
1453 1433 # Set up some initial variables
1454 1434 # Make it easy to refer to self.changelog
1455 1435 cl = self.changelog
1456 1436 # msng is short for missing - compute the list of changesets in this
1457 1437 # changegroup.
1458 1438 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1459 1439 self.changegroupinfo(msng_cl_lst)
1460 1440 # Some bases may turn out to be superfluous, and some heads may be
1461 1441 # too. nodesbetween will return the minimal set of bases and heads
1462 1442 # necessary to re-create the changegroup.
1463 1443
1464 1444 # Known heads are the list of heads that it is assumed the recipient
1465 1445 # of this changegroup will know about.
1466 1446 knownheads = {}
1467 1447 # We assume that all parents of bases are known heads.
1468 1448 for n in bases:
1469 1449 for p in cl.parents(n):
1470 1450 if p != nullid:
1471 1451 knownheads[p] = 1
1472 1452 knownheads = knownheads.keys()
1473 1453 if knownheads:
1474 1454 # Now that we know what heads are known, we can compute which
1475 1455 # changesets are known. The recipient must know about all
1476 1456 # changesets required to reach the known heads from the null
1477 1457 # changeset.
1478 1458 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1479 1459 junk = None
1480 1460 # Transform the list into an ersatz set.
1481 1461 has_cl_set = dict.fromkeys(has_cl_set)
1482 1462 else:
1483 1463 # If there were no known heads, the recipient cannot be assumed to
1484 1464 # know about any changesets.
1485 1465 has_cl_set = {}
1486 1466
1487 1467 # Make it easy to refer to self.manifest
1488 1468 mnfst = self.manifest
1489 1469 # We don't know which manifests are missing yet
1490 1470 msng_mnfst_set = {}
1491 1471 # Nor do we know which filenodes are missing.
1492 1472 msng_filenode_set = {}
1493 1473
1494 1474 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1495 1475 junk = None
1496 1476
1497 1477 # A changeset always belongs to itself, so the changenode lookup
1498 1478 # function for a changenode is identity.
1499 1479 def identity(x):
1500 1480 return x
1501 1481
1502 1482 # A function generating function. Sets up an environment for the
1503 1483 # inner function.
1504 1484 def cmp_by_rev_func(revlog):
1505 1485 # Compare two nodes by their revision number in the environment's
1506 1486 # revision history. Since the revision number both represents the
1507 1487 # most efficient order to read the nodes in, and represents a
1508 1488 # topological sorting of the nodes, this function is often useful.
1509 1489 def cmp_by_rev(a, b):
1510 1490 return cmp(revlog.rev(a), revlog.rev(b))
1511 1491 return cmp_by_rev
1512 1492
1513 1493 # If we determine that a particular file or manifest node must be a
1514 1494 # node that the recipient of the changegroup will already have, we can
1515 1495 # also assume the recipient will have all the parents. This function
1516 1496 # prunes them from the set of missing nodes.
1517 1497 def prune_parents(revlog, hasset, msngset):
1518 1498 haslst = hasset.keys()
1519 1499 haslst.sort(cmp_by_rev_func(revlog))
1520 1500 for node in haslst:
1521 1501 parentlst = [p for p in revlog.parents(node) if p != nullid]
1522 1502 while parentlst:
1523 1503 n = parentlst.pop()
1524 1504 if n not in hasset:
1525 1505 hasset[n] = 1
1526 1506 p = [p for p in revlog.parents(n) if p != nullid]
1527 1507 parentlst.extend(p)
1528 1508 for n in hasset:
1529 1509 msngset.pop(n, None)
1530 1510
1531 1511 # This is a function generating function used to set up an environment
1532 1512 # for the inner function to execute in.
1533 1513 def manifest_and_file_collector(changedfileset):
1534 1514 # This is an information gathering function that gathers
1535 1515 # information from each changeset node that goes out as part of
1536 1516 # the changegroup. The information gathered is a list of which
1537 1517 # manifest nodes are potentially required (the recipient may
1538 1518 # already have them) and total list of all files which were
1539 1519 # changed in any changeset in the changegroup.
1540 1520 #
1541 1521 # We also remember the first changenode we saw any manifest
1542 1522 # referenced by so we can later determine which changenode 'owns'
1543 1523 # the manifest.
1544 1524 def collect_manifests_and_files(clnode):
1545 1525 c = cl.read(clnode)
1546 1526 for f in c[3]:
1547 1527 # This is to make sure we only have one instance of each
1548 1528 # filename string for each filename.
1549 1529 changedfileset.setdefault(f, f)
1550 1530 msng_mnfst_set.setdefault(c[0], clnode)
1551 1531 return collect_manifests_and_files
1552 1532
1553 1533 # Figure out which manifest nodes (of the ones we think might be part
1554 1534 # of the changegroup) the recipient must know about and remove them
1555 1535 # from the changegroup.
1556 1536 def prune_manifests():
1557 1537 has_mnfst_set = {}
1558 1538 for n in msng_mnfst_set:
1559 1539 # If a 'missing' manifest thinks it belongs to a changenode
1560 1540 # the recipient is assumed to have, obviously the recipient
1561 1541 # must have that manifest.
1562 1542 linknode = cl.node(mnfst.linkrev(n))
1563 1543 if linknode in has_cl_set:
1564 1544 has_mnfst_set[n] = 1
1565 1545 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1566 1546
1567 1547 # Use the information collected in collect_manifests_and_files to say
1568 1548 # which changenode any manifestnode belongs to.
1569 1549 def lookup_manifest_link(mnfstnode):
1570 1550 return msng_mnfst_set[mnfstnode]
1571 1551
1572 1552 # A function generating function that sets up the initial environment
1573 1553 # the inner function.
1574 1554 def filenode_collector(changedfiles):
1575 1555 next_rev = [0]
1576 1556 # This gathers information from each manifestnode included in the
1577 1557 # changegroup about which filenodes the manifest node references
1578 1558 # so we can include those in the changegroup too.
1579 1559 #
1580 1560 # It also remembers which changenode each filenode belongs to. It
1581 1561 # does this by assuming the a filenode belongs to the changenode
1582 1562 # the first manifest that references it belongs to.
1583 1563 def collect_msng_filenodes(mnfstnode):
1584 1564 r = mnfst.rev(mnfstnode)
1585 1565 if r == next_rev[0]:
1586 1566 # If the last rev we looked at was the one just previous,
1587 1567 # we only need to see a diff.
1588 1568 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1589 1569 # For each line in the delta
1590 1570 for dline in delta.splitlines():
1591 1571 # get the filename and filenode for that line
1592 1572 f, fnode = dline.split('\0')
1593 1573 fnode = bin(fnode[:40])
1594 1574 f = changedfiles.get(f, None)
1595 1575 # And if the file is in the list of files we care
1596 1576 # about.
1597 1577 if f is not None:
1598 1578 # Get the changenode this manifest belongs to
1599 1579 clnode = msng_mnfst_set[mnfstnode]
1600 1580 # Create the set of filenodes for the file if
1601 1581 # there isn't one already.
1602 1582 ndset = msng_filenode_set.setdefault(f, {})
1603 1583 # And set the filenode's changelog node to the
1604 1584 # manifest's if it hasn't been set already.
1605 1585 ndset.setdefault(fnode, clnode)
1606 1586 else:
1607 1587 # Otherwise we need a full manifest.
1608 1588 m = mnfst.read(mnfstnode)
1609 1589 # For every file in we care about.
1610 1590 for f in changedfiles:
1611 1591 fnode = m.get(f, None)
1612 1592 # If it's in the manifest
1613 1593 if fnode is not None:
1614 1594 # See comments above.
1615 1595 clnode = msng_mnfst_set[mnfstnode]
1616 1596 ndset = msng_filenode_set.setdefault(f, {})
1617 1597 ndset.setdefault(fnode, clnode)
1618 1598 # Remember the revision we hope to see next.
1619 1599 next_rev[0] = r + 1
1620 1600 return collect_msng_filenodes
1621 1601
1622 1602 # We have a list of filenodes we think we need for a file, lets remove
1623 1603 # all those we now the recipient must have.
1624 1604 def prune_filenodes(f, filerevlog):
1625 1605 msngset = msng_filenode_set[f]
1626 1606 hasset = {}
1627 1607 # If a 'missing' filenode thinks it belongs to a changenode we
1628 1608 # assume the recipient must have, then the recipient must have
1629 1609 # that filenode.
1630 1610 for n in msngset:
1631 1611 clnode = cl.node(filerevlog.linkrev(n))
1632 1612 if clnode in has_cl_set:
1633 1613 hasset[n] = 1
1634 1614 prune_parents(filerevlog, hasset, msngset)
1635 1615
1636 1616 # A function generator function that sets up the a context for the
1637 1617 # inner function.
1638 1618 def lookup_filenode_link_func(fname):
1639 1619 msngset = msng_filenode_set[fname]
1640 1620 # Lookup the changenode the filenode belongs to.
1641 1621 def lookup_filenode_link(fnode):
1642 1622 return msngset[fnode]
1643 1623 return lookup_filenode_link
1644 1624
1645 1625 # Now that we have all theses utility functions to help out and
1646 1626 # logically divide up the task, generate the group.
1647 1627 def gengroup():
1648 1628 # The set of changed files starts empty.
1649 1629 changedfiles = {}
1650 1630 # Create a changenode group generator that will call our functions
1651 1631 # back to lookup the owning changenode and collect information.
1652 1632 group = cl.group(msng_cl_lst, identity,
1653 1633 manifest_and_file_collector(changedfiles))
1654 1634 for chnk in group:
1655 1635 yield chnk
1656 1636
1657 1637 # The list of manifests has been collected by the generator
1658 1638 # calling our functions back.
1659 1639 prune_manifests()
1660 1640 msng_mnfst_lst = msng_mnfst_set.keys()
1661 1641 # Sort the manifestnodes by revision number.
1662 1642 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1663 1643 # Create a generator for the manifestnodes that calls our lookup
1664 1644 # and data collection functions back.
1665 1645 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1666 1646 filenode_collector(changedfiles))
1667 1647 for chnk in group:
1668 1648 yield chnk
1669 1649
1670 1650 # These are no longer needed, dereference and toss the memory for
1671 1651 # them.
1672 1652 msng_mnfst_lst = None
1673 1653 msng_mnfst_set.clear()
1674 1654
1675 1655 changedfiles = changedfiles.keys()
1676 1656 changedfiles.sort()
1677 1657 # Go through all our files in order sorted by name.
1678 1658 for fname in changedfiles:
1679 1659 filerevlog = self.file(fname)
1680 1660 # Toss out the filenodes that the recipient isn't really
1681 1661 # missing.
1682 1662 if msng_filenode_set.has_key(fname):
1683 1663 prune_filenodes(fname, filerevlog)
1684 1664 msng_filenode_lst = msng_filenode_set[fname].keys()
1685 1665 else:
1686 1666 msng_filenode_lst = []
1687 1667 # If any filenodes are left, generate the group for them,
1688 1668 # otherwise don't bother.
1689 1669 if len(msng_filenode_lst) > 0:
1690 1670 yield changegroup.genchunk(fname)
1691 1671 # Sort the filenodes by their revision #
1692 1672 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1693 1673 # Create a group generator and only pass in a changenode
1694 1674 # lookup function as we need to collect no information
1695 1675 # from filenodes.
1696 1676 group = filerevlog.group(msng_filenode_lst,
1697 1677 lookup_filenode_link_func(fname))
1698 1678 for chnk in group:
1699 1679 yield chnk
1700 1680 if msng_filenode_set.has_key(fname):
1701 1681 # Don't need this anymore, toss it to free memory.
1702 1682 del msng_filenode_set[fname]
1703 1683 # Signal that no more groups are left.
1704 1684 yield changegroup.closechunk()
1705 1685
1706 1686 if msng_cl_lst:
1707 1687 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1708 1688
1709 1689 return util.chunkbuffer(gengroup())
1710 1690
1711 1691 def changegroup(self, basenodes, source):
1712 1692 """Generate a changegroup of all nodes that we have that a recipient
1713 1693 doesn't.
1714 1694
1715 1695 This is much easier than the previous function as we can assume that
1716 1696 the recipient has any changenode we aren't sending them."""
1717 1697
1718 1698 self.hook('preoutgoing', throw=True, source=source)
1719 1699
1720 1700 cl = self.changelog
1721 1701 nodes = cl.nodesbetween(basenodes, None)[0]
1722 1702 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1723 1703 self.changegroupinfo(nodes)
1724 1704
1725 1705 def identity(x):
1726 1706 return x
1727 1707
1728 1708 def gennodelst(revlog):
1729 1709 for r in xrange(0, revlog.count()):
1730 1710 n = revlog.node(r)
1731 1711 if revlog.linkrev(n) in revset:
1732 1712 yield n
1733 1713
1734 1714 def changed_file_collector(changedfileset):
1735 1715 def collect_changed_files(clnode):
1736 1716 c = cl.read(clnode)
1737 1717 for fname in c[3]:
1738 1718 changedfileset[fname] = 1
1739 1719 return collect_changed_files
1740 1720
1741 1721 def lookuprevlink_func(revlog):
1742 1722 def lookuprevlink(n):
1743 1723 return cl.node(revlog.linkrev(n))
1744 1724 return lookuprevlink
1745 1725
1746 1726 def gengroup():
1747 1727 # construct a list of all changed files
1748 1728 changedfiles = {}
1749 1729
1750 1730 for chnk in cl.group(nodes, identity,
1751 1731 changed_file_collector(changedfiles)):
1752 1732 yield chnk
1753 1733 changedfiles = changedfiles.keys()
1754 1734 changedfiles.sort()
1755 1735
1756 1736 mnfst = self.manifest
1757 1737 nodeiter = gennodelst(mnfst)
1758 1738 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1759 1739 yield chnk
1760 1740
1761 1741 for fname in changedfiles:
1762 1742 filerevlog = self.file(fname)
1763 1743 nodeiter = gennodelst(filerevlog)
1764 1744 nodeiter = list(nodeiter)
1765 1745 if nodeiter:
1766 1746 yield changegroup.genchunk(fname)
1767 1747 lookup = lookuprevlink_func(filerevlog)
1768 1748 for chnk in filerevlog.group(nodeiter, lookup):
1769 1749 yield chnk
1770 1750
1771 1751 yield changegroup.closechunk()
1772 1752
1773 1753 if nodes:
1774 1754 self.hook('outgoing', node=hex(nodes[0]), source=source)
1775 1755
1776 1756 return util.chunkbuffer(gengroup())
1777 1757
1778 1758 def addchangegroup(self, source, srctype, url):
1779 1759 """add changegroup to repo.
1780 1760
1781 1761 return values:
1782 1762 - nothing changed or no source: 0
1783 1763 - more heads than before: 1+added heads (2..n)
1784 1764 - less heads than before: -1-removed heads (-2..-n)
1785 1765 - number of heads stays the same: 1
1786 1766 """
1787 1767 def csmap(x):
1788 1768 self.ui.debug(_("add changeset %s\n") % short(x))
1789 1769 return cl.count()
1790 1770
1791 1771 def revmap(x):
1792 1772 return cl.rev(x)
1793 1773
1794 1774 if not source:
1795 1775 return 0
1796 1776
1797 1777 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1798 1778
1799 1779 changesets = files = revisions = 0
1800 1780
1801 1781 tr = self.transaction()
1802 1782
1803 1783 # write changelog data to temp files so concurrent readers will not see
1804 1784 # inconsistent view
1805 1785 cl = None
1806 1786 try:
1807 cl = appendfile.appendchangelog(self.sopener,
1808 self.changelog.version)
1809
1787 cl = appendfile.appendchangelog(self.sopener)
1810 1788 oldheads = len(cl.heads())
1811 1789
1812 1790 # pull off the changeset group
1813 1791 self.ui.status(_("adding changesets\n"))
1814 1792 cor = cl.count() - 1
1815 1793 chunkiter = changegroup.chunkiter(source)
1816 1794 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1817 1795 raise util.Abort(_("received changelog group is empty"))
1818 1796 cnr = cl.count() - 1
1819 1797 changesets = cnr - cor
1820 1798
1821 1799 # pull off the manifest group
1822 1800 self.ui.status(_("adding manifests\n"))
1823 1801 chunkiter = changegroup.chunkiter(source)
1824 1802 # no need to check for empty manifest group here:
1825 1803 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1826 1804 # no new manifest will be created and the manifest group will
1827 1805 # be empty during the pull
1828 1806 self.manifest.addgroup(chunkiter, revmap, tr)
1829 1807
1830 1808 # process the files
1831 1809 self.ui.status(_("adding file changes\n"))
1832 1810 while 1:
1833 1811 f = changegroup.getchunk(source)
1834 1812 if not f:
1835 1813 break
1836 1814 self.ui.debug(_("adding %s revisions\n") % f)
1837 1815 fl = self.file(f)
1838 1816 o = fl.count()
1839 1817 chunkiter = changegroup.chunkiter(source)
1840 1818 if fl.addgroup(chunkiter, revmap, tr) is None:
1841 1819 raise util.Abort(_("received file revlog group is empty"))
1842 1820 revisions += fl.count() - o
1843 1821 files += 1
1844 1822
1845 1823 cl.writedata()
1846 1824 finally:
1847 1825 if cl:
1848 1826 cl.cleanup()
1849 1827
1850 1828 # make changelog see real files again
1851 self.changelog = changelog.changelog(self.sopener,
1852 self.changelog.version)
1829 self.changelog = changelog.changelog(self.sopener)
1853 1830 self.changelog.checkinlinesize(tr)
1854 1831
1855 1832 newheads = len(self.changelog.heads())
1856 1833 heads = ""
1857 1834 if oldheads and newheads != oldheads:
1858 1835 heads = _(" (%+d heads)") % (newheads - oldheads)
1859 1836
1860 1837 self.ui.status(_("added %d changesets"
1861 1838 " with %d changes to %d files%s\n")
1862 1839 % (changesets, revisions, files, heads))
1863 1840
1864 1841 if changesets > 0:
1865 1842 self.hook('pretxnchangegroup', throw=True,
1866 1843 node=hex(self.changelog.node(cor+1)), source=srctype,
1867 1844 url=url)
1868 1845
1869 1846 tr.close()
1870 1847
1871 1848 if changesets > 0:
1872 1849 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1873 1850 source=srctype, url=url)
1874 1851
1875 1852 for i in xrange(cor + 1, cnr + 1):
1876 1853 self.hook("incoming", node=hex(self.changelog.node(i)),
1877 1854 source=srctype, url=url)
1878 1855
1879 1856 # never return 0 here:
1880 1857 if newheads < oldheads:
1881 1858 return newheads - oldheads - 1
1882 1859 else:
1883 1860 return newheads - oldheads + 1
1884 1861
1885 1862
1886 1863 def stream_in(self, remote):
1887 1864 fp = remote.stream_out()
1888 1865 l = fp.readline()
1889 1866 try:
1890 1867 resp = int(l)
1891 1868 except ValueError:
1892 1869 raise util.UnexpectedOutput(
1893 1870 _('Unexpected response from remote server:'), l)
1894 1871 if resp == 1:
1895 1872 raise util.Abort(_('operation forbidden by server'))
1896 1873 elif resp == 2:
1897 1874 raise util.Abort(_('locking the remote repository failed'))
1898 1875 elif resp != 0:
1899 1876 raise util.Abort(_('the server sent an unknown error code'))
1900 1877 self.ui.status(_('streaming all changes\n'))
1901 1878 l = fp.readline()
1902 1879 try:
1903 1880 total_files, total_bytes = map(int, l.split(' ', 1))
1904 1881 except ValueError, TypeError:
1905 1882 raise util.UnexpectedOutput(
1906 1883 _('Unexpected response from remote server:'), l)
1907 1884 self.ui.status(_('%d files to transfer, %s of data\n') %
1908 1885 (total_files, util.bytecount(total_bytes)))
1909 1886 start = time.time()
1910 1887 for i in xrange(total_files):
1911 1888 # XXX doesn't support '\n' or '\r' in filenames
1912 1889 l = fp.readline()
1913 1890 try:
1914 1891 name, size = l.split('\0', 1)
1915 1892 size = int(size)
1916 1893 except ValueError, TypeError:
1917 1894 raise util.UnexpectedOutput(
1918 1895 _('Unexpected response from remote server:'), l)
1919 1896 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1920 1897 ofp = self.sopener(name, 'w')
1921 1898 for chunk in util.filechunkiter(fp, limit=size):
1922 1899 ofp.write(chunk)
1923 1900 ofp.close()
1924 1901 elapsed = time.time() - start
1925 1902 if elapsed <= 0:
1926 1903 elapsed = 0.001
1927 1904 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1928 1905 (util.bytecount(total_bytes), elapsed,
1929 1906 util.bytecount(total_bytes / elapsed)))
1930 1907 self.reload()
1931 1908 return len(self.heads()) + 1
1932 1909
1933 1910 def clone(self, remote, heads=[], stream=False):
1934 1911 '''clone remote repository.
1935 1912
1936 1913 keyword arguments:
1937 1914 heads: list of revs to clone (forces use of pull)
1938 1915 stream: use streaming clone if possible'''
1939 1916
1940 1917 # now, all clients that can request uncompressed clones can
1941 1918 # read repo formats supported by all servers that can serve
1942 1919 # them.
1943 1920
1944 1921 # if revlog format changes, client will have to check version
1945 1922 # and format flags on "stream" capability, and use
1946 1923 # uncompressed only if compatible.
1947 1924
1948 1925 if stream and not heads and remote.capable('stream'):
1949 1926 return self.stream_in(remote)
1950 1927 return self.pull(remote, heads)
1951 1928
1952 1929 # used to avoid circular references so destructors work
1953 1930 def aftertrans(files):
1954 1931 renamefiles = [tuple(t) for t in files]
1955 1932 def a():
1956 1933 for src, dest in renamefiles:
1957 1934 util.rename(src, dest)
1958 1935 return a
1959 1936
1960 1937 def instance(ui, path, create):
1961 1938 return localrepository(ui, util.drop_scheme('file', path), create)
1962 1939
1963 1940 def islocal(path):
1964 1941 return True
@@ -1,215 +1,215
1 1 # manifest.py - manifest revision class for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from revlog import *
9 9 from i18n import _
10 10 import array, bisect, struct, mdiff
11 11
12 12 class manifestdict(dict):
13 13 def __init__(self, mapping=None, flags=None):
14 14 if mapping is None: mapping = {}
15 15 if flags is None: flags = {}
16 16 dict.__init__(self, mapping)
17 17 self._flags = flags
18 18 def flags(self, f):
19 19 return self._flags.get(f, "")
20 20 def execf(self, f):
21 21 "test for executable in manifest flags"
22 22 return "x" in self.flags(f)
23 23 def linkf(self, f):
24 24 "test for symlink in manifest flags"
25 25 return "l" in self.flags(f)
26 26 def rawset(self, f, entry):
27 27 self[f] = bin(entry[:40])
28 28 fl = entry[40:-1]
29 29 if fl: self._flags[f] = fl
30 30 def set(self, f, execf=False, linkf=False):
31 31 if linkf: self._flags[f] = "l"
32 32 elif execf: self._flags[f] = "x"
33 33 else: self._flags[f] = ""
34 34 def copy(self):
35 35 return manifestdict(dict.copy(self), dict.copy(self._flags))
36 36
37 37 class manifest(revlog):
38 def __init__(self, opener, defversion=REVLOGV0):
38 def __init__(self, opener):
39 39 self.mapcache = None
40 40 self.listcache = None
41 revlog.__init__(self, opener, "00manifest.i", defversion)
41 revlog.__init__(self, opener, "00manifest.i")
42 42
43 43 def parselines(self, lines):
44 44 for l in lines.splitlines(1):
45 45 yield l.split('\0')
46 46
47 47 def readdelta(self, node):
48 48 delta = mdiff.patchtext(self.delta(node))
49 49 deltamap = manifestdict()
50 50 for f, n in self.parselines(delta):
51 51 deltamap.rawset(f, n)
52 52 return deltamap
53 53
54 54 def read(self, node):
55 55 if node == nullid: return manifestdict() # don't upset local cache
56 56 if self.mapcache and self.mapcache[0] == node:
57 57 return self.mapcache[1]
58 58 text = self.revision(node)
59 59 self.listcache = array.array('c', text)
60 60 mapping = manifestdict()
61 61 for f, n in self.parselines(text):
62 62 mapping.rawset(f, n)
63 63 self.mapcache = (node, mapping)
64 64 return mapping
65 65
66 66 def _search(self, m, s, lo=0, hi=None):
67 67 '''return a tuple (start, end) that says where to find s within m.
68 68
69 69 If the string is found m[start:end] are the line containing
70 70 that string. If start == end the string was not found and
71 71 they indicate the proper sorted insertion point. This was
72 72 taken from bisect_left, and modified to find line start/end as
73 73 it goes along.
74 74
75 75 m should be a buffer or a string
76 76 s is a string'''
77 77 def advance(i, c):
78 78 while i < lenm and m[i] != c:
79 79 i += 1
80 80 return i
81 81 lenm = len(m)
82 82 if not hi:
83 83 hi = lenm
84 84 while lo < hi:
85 85 mid = (lo + hi) // 2
86 86 start = mid
87 87 while start > 0 and m[start-1] != '\n':
88 88 start -= 1
89 89 end = advance(start, '\0')
90 90 if m[start:end] < s:
91 91 # we know that after the null there are 40 bytes of sha1
92 92 # this translates to the bisect lo = mid + 1
93 93 lo = advance(end + 40, '\n') + 1
94 94 else:
95 95 # this translates to the bisect hi = mid
96 96 hi = start
97 97 end = advance(lo, '\0')
98 98 found = m[lo:end]
99 99 if cmp(s, found) == 0:
100 100 # we know that after the null there are 40 bytes of sha1
101 101 end = advance(end + 40, '\n')
102 102 return (lo, end+1)
103 103 else:
104 104 return (lo, lo)
105 105
106 106 def find(self, node, f):
107 107 '''look up entry for a single file efficiently.
108 108 return (node, flags) pair if found, (None, None) if not.'''
109 109 if self.mapcache and node == self.mapcache[0]:
110 110 return self.mapcache[1].get(f), self.mapcache[1].flags(f)
111 111 text = self.revision(node)
112 112 start, end = self._search(text, f)
113 113 if start == end:
114 114 return None, None
115 115 l = text[start:end]
116 116 f, n = l.split('\0')
117 117 return bin(n[:40]), n[40:-1]
118 118
119 119 def add(self, map, transaction, link, p1=None, p2=None,
120 120 changed=None):
121 121 # apply the changes collected during the bisect loop to our addlist
122 122 # return a delta suitable for addrevision
123 123 def addlistdelta(addlist, x):
124 124 # start from the bottom up
125 125 # so changes to the offsets don't mess things up.
126 126 i = len(x)
127 127 while i > 0:
128 128 i -= 1
129 129 start = x[i][0]
130 130 end = x[i][1]
131 131 if x[i][2]:
132 132 addlist[start:end] = array.array('c', x[i][2])
133 133 else:
134 134 del addlist[start:end]
135 135 return "".join([struct.pack(">lll", d[0], d[1], len(d[2])) + d[2] \
136 136 for d in x ])
137 137
138 138 def checkforbidden(f):
139 139 if '\n' in f or '\r' in f:
140 140 raise RevlogError(_("'\\n' and '\\r' disallowed in filenames"))
141 141
142 142 # if we're using the listcache, make sure it is valid and
143 143 # parented by the same node we're diffing against
144 144 if not changed or not self.listcache or not p1 or \
145 145 self.mapcache[0] != p1:
146 146 files = map.keys()
147 147 files.sort()
148 148
149 149 for f in files:
150 150 checkforbidden(f)
151 151
152 152 # if this is changed to support newlines in filenames,
153 153 # be sure to check the templates/ dir again (especially *-raw.tmpl)
154 154 text = ["%s\000%s%s\n" % (f, hex(map[f]), map.flags(f)) for f in files]
155 155 self.listcache = array.array('c', "".join(text))
156 156 cachedelta = None
157 157 else:
158 158 addlist = self.listcache
159 159
160 160 for f in changed[0]:
161 161 checkforbidden(f)
162 162 # combine the changed lists into one list for sorting
163 163 work = [[x, 0] for x in changed[0]]
164 164 work[len(work):] = [[x, 1] for x in changed[1]]
165 165 work.sort()
166 166
167 167 delta = []
168 168 dstart = None
169 169 dend = None
170 170 dline = [""]
171 171 start = 0
172 172 # zero copy representation of addlist as a buffer
173 173 addbuf = buffer(addlist)
174 174
175 175 # start with a readonly loop that finds the offset of
176 176 # each line and creates the deltas
177 177 for w in work:
178 178 f = w[0]
179 179 # bs will either be the index of the item or the insert point
180 180 start, end = self._search(addbuf, f, start)
181 181 if w[1] == 0:
182 182 l = "%s\000%s%s\n" % (f, hex(map[f]), map.flags(f))
183 183 else:
184 184 l = ""
185 185 if start == end and w[1] == 1:
186 186 # item we want to delete was not found, error out
187 187 raise AssertionError(
188 188 _("failed to remove %s from manifest") % f)
189 189 if dstart != None and dstart <= start and dend >= start:
190 190 if dend < end:
191 191 dend = end
192 192 if l:
193 193 dline.append(l)
194 194 else:
195 195 if dstart != None:
196 196 delta.append([dstart, dend, "".join(dline)])
197 197 dstart = start
198 198 dend = end
199 199 dline = [l]
200 200
201 201 if dstart != None:
202 202 delta.append([dstart, dend, "".join(dline)])
203 203 # apply the delta to the addlist, and get a delta for addrevision
204 204 cachedelta = addlistdelta(addlist, delta)
205 205
206 206 # the delta is only valid if we've been processing the tip revision
207 207 if self.mapcache[0] != self.tip():
208 208 cachedelta = None
209 209 self.listcache = addlist
210 210
211 211 n = self.addrevision(buffer(self.listcache), transaction, link, p1, \
212 212 p2, cachedelta)
213 213 self.mapcache = (n, map)
214 214
215 215 return n
@@ -1,1297 +1,1299
1 1 """
2 2 revlog.py - storage back-end for mercurial
3 3
4 4 This provides efficient delta storage with O(1) retrieve and append
5 5 and O(changes) merge between branches
6 6
7 7 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
8 8
9 9 This software may be used and distributed according to the terms
10 10 of the GNU General Public License, incorporated herein by reference.
11 11 """
12 12
13 13 from node import *
14 14 from i18n import _
15 15 import binascii, changegroup, errno, ancestor, mdiff, os
16 16 import sha, struct, util, zlib
17 17
18 18 # revlog version strings
19 19 REVLOGV0 = 0
20 20 REVLOGNG = 1
21 21
22 22 # revlog flags
23 23 REVLOGNGINLINEDATA = (1 << 16)
24 24 REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
25 25
26 26 REVLOG_DEFAULT_FORMAT = REVLOGNG
27 27 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
28 28
29 29 def flagstr(flag):
30 30 if flag == "inline":
31 31 return REVLOGNGINLINEDATA
32 32 raise RevlogError(_("unknown revlog flag %s") % flag)
33 33
34 34 def hash(text, p1, p2):
35 35 """generate a hash from the given text and its parent hashes
36 36
37 37 This hash combines both the current file contents and its history
38 38 in a manner that makes it easy to distinguish nodes with the same
39 39 content in the revision graph.
40 40 """
41 41 l = [p1, p2]
42 42 l.sort()
43 43 s = sha.new(l[0])
44 44 s.update(l[1])
45 45 s.update(text)
46 46 return s.digest()
47 47
48 48 def compress(text):
49 49 """ generate a possibly-compressed representation of text """
50 50 if not text: return ("", text)
51 51 if len(text) < 44:
52 52 if text[0] == '\0': return ("", text)
53 53 return ('u', text)
54 54 bin = zlib.compress(text)
55 55 if len(bin) > len(text):
56 56 if text[0] == '\0': return ("", text)
57 57 return ('u', text)
58 58 return ("", bin)
59 59
60 60 def decompress(bin):
61 61 """ decompress the given input """
62 62 if not bin: return bin
63 63 t = bin[0]
64 64 if t == '\0': return bin
65 65 if t == 'x': return zlib.decompress(bin)
66 66 if t == 'u': return bin[1:]
67 67 raise RevlogError(_("unknown compression type %r") % t)
68 68
69 69 indexformatv0 = ">4l20s20s20s"
70 70 v0shaoffset = 56
71 71 # index ng:
72 72 # 6 bytes offset
73 73 # 2 bytes flags
74 74 # 4 bytes compressed length
75 75 # 4 bytes uncompressed length
76 76 # 4 bytes: base rev
77 77 # 4 bytes link rev
78 78 # 4 bytes parent 1 rev
79 79 # 4 bytes parent 2 rev
80 80 # 32 bytes: nodeid
81 81 indexformatng = ">Qiiiiii20s12x"
82 82 ngshaoffset = 32
83 83 versionformat = ">I"
84 84
85 85 class lazyparser(object):
86 86 """
87 87 this class avoids the need to parse the entirety of large indices
88 88 """
89 89
90 90 # lazyparser is not safe to use on windows if win32 extensions not
91 91 # available. it keeps file handle open, which make it not possible
92 92 # to break hardlinks on local cloned repos.
93 93 safe_to_use = os.name != 'nt' or (not util.is_win_9x() and
94 94 hasattr(util, 'win32api'))
95 95
96 96 def __init__(self, dataf, size, indexformat, shaoffset):
97 97 self.dataf = dataf
98 98 self.format = indexformat
99 99 self.s = struct.calcsize(indexformat)
100 100 self.indexformat = indexformat
101 101 self.datasize = size
102 102 self.l = size/self.s
103 103 self.index = [None] * self.l
104 104 self.map = {nullid: nullrev}
105 105 self.allmap = 0
106 106 self.all = 0
107 107 self.mapfind_count = 0
108 108 self.shaoffset = shaoffset
109 109
110 110 def loadmap(self):
111 111 """
112 112 during a commit, we need to make sure the rev being added is
113 113 not a duplicate. This requires loading the entire index,
114 114 which is fairly slow. loadmap can load up just the node map,
115 115 which takes much less time.
116 116 """
117 117 if self.allmap: return
118 118 end = self.datasize
119 119 self.allmap = 1
120 120 cur = 0
121 121 count = 0
122 122 blocksize = self.s * 256
123 123 self.dataf.seek(0)
124 124 while cur < end:
125 125 data = self.dataf.read(blocksize)
126 126 off = 0
127 127 for x in xrange(256):
128 128 n = data[off + self.shaoffset:off + self.shaoffset + 20]
129 129 self.map[n] = count
130 130 count += 1
131 131 if count >= self.l:
132 132 break
133 133 off += self.s
134 134 cur += blocksize
135 135
136 136 def loadblock(self, blockstart, blocksize, data=None):
137 137 if self.all: return
138 138 if data is None:
139 139 self.dataf.seek(blockstart)
140 140 if blockstart + blocksize > self.datasize:
141 141 # the revlog may have grown since we've started running,
142 142 # but we don't have space in self.index for more entries.
143 143 # limit blocksize so that we don't get too much data.
144 144 blocksize = max(self.datasize - blockstart, 0)
145 145 data = self.dataf.read(blocksize)
146 146 lend = len(data) / self.s
147 147 i = blockstart / self.s
148 148 off = 0
149 149 # lazyindex supports __delitem__
150 150 if lend > len(self.index) - i:
151 151 lend = len(self.index) - i
152 152 for x in xrange(lend):
153 153 if self.index[i + x] == None:
154 154 b = data[off : off + self.s]
155 155 self.index[i + x] = b
156 156 n = b[self.shaoffset:self.shaoffset + 20]
157 157 self.map[n] = i + x
158 158 off += self.s
159 159
160 160 def findnode(self, node):
161 161 """search backwards through the index file for a specific node"""
162 162 if self.allmap: return None
163 163
164 164 # hg log will cause many many searches for the manifest
165 165 # nodes. After we get called a few times, just load the whole
166 166 # thing.
167 167 if self.mapfind_count > 8:
168 168 self.loadmap()
169 169 if node in self.map:
170 170 return node
171 171 return None
172 172 self.mapfind_count += 1
173 173 last = self.l - 1
174 174 while self.index[last] != None:
175 175 if last == 0:
176 176 self.all = 1
177 177 self.allmap = 1
178 178 return None
179 179 last -= 1
180 180 end = (last + 1) * self.s
181 181 blocksize = self.s * 256
182 182 while end >= 0:
183 183 start = max(end - blocksize, 0)
184 184 self.dataf.seek(start)
185 185 data = self.dataf.read(end - start)
186 186 findend = end - start
187 187 while True:
188 188 # we're searching backwards, so weh have to make sure
189 189 # we don't find a changeset where this node is a parent
190 190 off = data.rfind(node, 0, findend)
191 191 findend = off
192 192 if off >= 0:
193 193 i = off / self.s
194 194 off = i * self.s
195 195 n = data[off + self.shaoffset:off + self.shaoffset + 20]
196 196 if n == node:
197 197 self.map[n] = i + start / self.s
198 198 return node
199 199 else:
200 200 break
201 201 end -= blocksize
202 202 return None
203 203
204 204 def loadindex(self, i=None, end=None):
205 205 if self.all: return
206 206 all = False
207 207 if i == None:
208 208 blockstart = 0
209 209 blocksize = (512 / self.s) * self.s
210 210 end = self.datasize
211 211 all = True
212 212 else:
213 213 if end:
214 214 blockstart = i * self.s
215 215 end = end * self.s
216 216 blocksize = end - blockstart
217 217 else:
218 218 blockstart = (i & ~(32)) * self.s
219 219 blocksize = self.s * 64
220 220 end = blockstart + blocksize
221 221 while blockstart < end:
222 222 self.loadblock(blockstart, blocksize)
223 223 blockstart += blocksize
224 224 if all: self.all = True
225 225
226 226 class lazyindex(object):
227 227 """a lazy version of the index array"""
228 228 def __init__(self, parser):
229 229 self.p = parser
230 230 def __len__(self):
231 231 return len(self.p.index)
232 232 def load(self, pos):
233 233 if pos < 0:
234 234 pos += len(self.p.index)
235 235 self.p.loadindex(pos)
236 236 return self.p.index[pos]
237 237 def __getitem__(self, pos):
238 238 ret = self.p.index[pos] or self.load(pos)
239 239 if isinstance(ret, str):
240 240 ret = struct.unpack(self.p.indexformat, ret)
241 241 return ret
242 242 def __setitem__(self, pos, item):
243 243 self.p.index[pos] = item
244 244 def __delitem__(self, pos):
245 245 del self.p.index[pos]
246 246 def append(self, e):
247 247 self.p.index.append(e)
248 248
249 249 class lazymap(object):
250 250 """a lazy version of the node map"""
251 251 def __init__(self, parser):
252 252 self.p = parser
253 253 def load(self, key):
254 254 n = self.p.findnode(key)
255 255 if n == None:
256 256 raise KeyError(key)
257 257 def __contains__(self, key):
258 258 if key in self.p.map:
259 259 return True
260 260 self.p.loadmap()
261 261 return key in self.p.map
262 262 def __iter__(self):
263 263 yield nullid
264 264 for i in xrange(self.p.l):
265 265 ret = self.p.index[i]
266 266 if not ret:
267 267 self.p.loadindex(i)
268 268 ret = self.p.index[i]
269 269 if isinstance(ret, str):
270 270 ret = struct.unpack(self.p.indexformat, ret)
271 271 yield ret[-1]
272 272 def __getitem__(self, key):
273 273 try:
274 274 return self.p.map[key]
275 275 except KeyError:
276 276 try:
277 277 self.load(key)
278 278 return self.p.map[key]
279 279 except KeyError:
280 280 raise KeyError("node " + hex(key))
281 281 def __setitem__(self, key, val):
282 282 self.p.map[key] = val
283 283 def __delitem__(self, key):
284 284 del self.p.map[key]
285 285
286 286 class RevlogError(Exception): pass
287 287 class LookupError(RevlogError): pass
288 288
289 289 class revlog(object):
290 290 """
291 291 the underlying revision storage object
292 292
293 293 A revlog consists of two parts, an index and the revision data.
294 294
295 295 The index is a file with a fixed record size containing
296 296 information on each revision, includings its nodeid (hash), the
297 297 nodeids of its parents, the position and offset of its data within
298 298 the data file, and the revision it's based on. Finally, each entry
299 299 contains a linkrev entry that can serve as a pointer to external
300 300 data.
301 301
302 302 The revision data itself is a linear collection of data chunks.
303 303 Each chunk represents a revision and is usually represented as a
304 304 delta against the previous chunk. To bound lookup time, runs of
305 305 deltas are limited to about 2 times the length of the original
306 306 version data. This makes retrieval of a version proportional to
307 307 its size, or O(1) relative to the number of revisions.
308 308
309 309 Both pieces of the revlog are written to in an append-only
310 310 fashion, which means we never need to rewrite a file to insert or
311 311 remove data, and can use some simple techniques to avoid the need
312 312 for locking while reading.
313 313 """
314 def __init__(self, opener, indexfile, defversion=REVLOG_DEFAULT_VERSION):
314 def __init__(self, opener, indexfile):
315 315 """
316 316 create a revlog object
317 317
318 318 opener is a function that abstracts the file opening operation
319 319 and can be used to implement COW semantics or the like.
320 320 """
321 321 self.indexfile = indexfile
322 322 self.datafile = indexfile[:-2] + ".d"
323 323 self.opener = opener
324 324
325 325 self.indexstat = None
326 326 self.cache = None
327 327 self.chunkcache = None
328 self.defversion = defversion
328 self.defversion=REVLOG_DEFAULT_VERSION
329 if hasattr(opener, "defversion"):
330 self.defversion = opener.defversion
329 331 self.load()
330 332
331 333 def load(self):
332 334 v = self.defversion
333 335 try:
334 336 f = self.opener(self.indexfile)
335 337 i = f.read(4)
336 338 f.seek(0)
337 339 except IOError, inst:
338 340 if inst.errno != errno.ENOENT:
339 341 raise
340 342 i = ""
341 343 else:
342 344 try:
343 345 st = util.fstat(f)
344 346 except AttributeError, inst:
345 347 st = None
346 348 else:
347 349 oldst = self.indexstat
348 350 if (oldst and st.st_dev == oldst.st_dev
349 351 and st.st_ino == oldst.st_ino
350 352 and st.st_mtime == oldst.st_mtime
351 353 and st.st_ctime == oldst.st_ctime):
352 354 return
353 355 self.indexstat = st
354 356 if len(i) > 0:
355 357 v = struct.unpack(versionformat, i)[0]
356 358 flags = v & ~0xFFFF
357 359 fmt = v & 0xFFFF
358 360 if fmt == REVLOGV0:
359 361 if flags:
360 362 raise RevlogError(_("index %s unknown flags %#04x for format v0")
361 363 % (self.indexfile, flags >> 16))
362 364 elif fmt == REVLOGNG:
363 365 if flags & ~REVLOGNGINLINEDATA:
364 366 raise RevlogError(_("index %s unknown flags %#04x for revlogng")
365 367 % (self.indexfile, flags >> 16))
366 368 else:
367 369 raise RevlogError(_("index %s unknown format %d")
368 370 % (self.indexfile, fmt))
369 371 self.version = v
370 372 if v == REVLOGV0:
371 373 self.indexformat = indexformatv0
372 374 shaoffset = v0shaoffset
373 375 else:
374 376 self.indexformat = indexformatng
375 377 shaoffset = ngshaoffset
376 378
377 379 if i:
378 380 if (lazyparser.safe_to_use and not self.inlinedata() and
379 381 st and st.st_size > 10000):
380 382 # big index, let's parse it on demand
381 383 parser = lazyparser(f, st.st_size, self.indexformat, shaoffset)
382 384 self.index = lazyindex(parser)
383 385 self.nodemap = lazymap(parser)
384 386 else:
385 387 self.parseindex(f, st)
386 388 if self.version != REVLOGV0:
387 389 e = list(self.index[0])
388 390 type = self.ngtype(e[0])
389 391 e[0] = self.offset_type(0, type)
390 392 self.index[0] = e
391 393 else:
392 394 self.nodemap = {nullid: nullrev}
393 395 self.index = []
394 396
395 397
396 398 def parseindex(self, fp, st):
397 399 s = struct.calcsize(self.indexformat)
398 400 self.index = []
399 401 self.nodemap = {nullid: nullrev}
400 402 inline = self.inlinedata()
401 403 n = 0
402 404 leftover = None
403 405 while True:
404 406 if st:
405 407 data = fp.read(65536)
406 408 else:
407 409 # hack for httprangereader, it doesn't do partial reads well
408 410 data = fp.read()
409 411 if not data:
410 412 break
411 413 if n == 0 and self.inlinedata():
412 414 # cache the first chunk
413 415 self.chunkcache = (0, data)
414 416 if leftover:
415 417 data = leftover + data
416 418 leftover = None
417 419 off = 0
418 420 l = len(data)
419 421 while off < l:
420 422 if l - off < s:
421 423 leftover = data[off:]
422 424 break
423 425 cur = data[off:off + s]
424 426 off += s
425 427 e = struct.unpack(self.indexformat, cur)
426 428 self.index.append(e)
427 429 self.nodemap[e[-1]] = n
428 430 n += 1
429 431 if inline:
430 432 if e[1] < 0:
431 433 break
432 434 off += e[1]
433 435 if off > l:
434 436 # some things don't seek well, just read it
435 437 fp.read(off - l)
436 438 break
437 439 if not st:
438 440 break
439 441
440 442
441 443 def ngoffset(self, q):
442 444 if q & 0xFFFF:
443 445 raise RevlogError(_('%s: incompatible revision flag %x') %
444 446 (self.indexfile, q))
445 447 return long(q >> 16)
446 448
447 449 def ngtype(self, q):
448 450 return int(q & 0xFFFF)
449 451
450 452 def offset_type(self, offset, type):
451 453 return long(long(offset) << 16 | type)
452 454
453 455 def loadindex(self, start, end):
454 456 """load a block of indexes all at once from the lazy parser"""
455 457 if isinstance(self.index, lazyindex):
456 458 self.index.p.loadindex(start, end)
457 459
458 460 def loadindexmap(self):
459 461 """loads both the map and the index from the lazy parser"""
460 462 if isinstance(self.index, lazyindex):
461 463 p = self.index.p
462 464 p.loadindex()
463 465 self.nodemap = p.map
464 466
465 467 def loadmap(self):
466 468 """loads the map from the lazy parser"""
467 469 if isinstance(self.nodemap, lazymap):
468 470 self.nodemap.p.loadmap()
469 471 self.nodemap = self.nodemap.p.map
470 472
471 473 def inlinedata(self): return self.version & REVLOGNGINLINEDATA
472 474 def tip(self): return self.node(len(self.index) - 1)
473 475 def count(self): return len(self.index)
474 476 def node(self, rev):
475 477 return rev == nullrev and nullid or self.index[rev][-1]
476 478 def rev(self, node):
477 479 try:
478 480 return self.nodemap[node]
479 481 except KeyError:
480 482 raise LookupError(_('%s: no node %s') % (self.indexfile, hex(node)))
481 483 def linkrev(self, node):
482 484 return (node == nullid) and nullrev or self.index[self.rev(node)][-4]
483 485 def parents(self, node):
484 486 if node == nullid: return (nullid, nullid)
485 487 r = self.rev(node)
486 488 d = self.index[r][-3:-1]
487 489 if self.version == REVLOGV0:
488 490 return d
489 491 return (self.node(d[0]), self.node(d[1]))
490 492 def parentrevs(self, rev):
491 493 if rev == nullrev:
492 494 return (nullrev, nullrev)
493 495 d = self.index[rev][-3:-1]
494 496 if self.version == REVLOGV0:
495 497 return (self.rev(d[0]), self.rev(d[1]))
496 498 return d
497 499 def start(self, rev):
498 500 if rev == nullrev:
499 501 return 0
500 502 if self.version != REVLOGV0:
501 503 return self.ngoffset(self.index[rev][0])
502 504 return self.index[rev][0]
503 505
504 506 def end(self, rev): return self.start(rev) + self.length(rev)
505 507
506 508 def size(self, rev):
507 509 """return the length of the uncompressed text for a given revision"""
508 510 if rev == nullrev:
509 511 return 0
510 512 l = -1
511 513 if self.version != REVLOGV0:
512 514 l = self.index[rev][2]
513 515 if l >= 0:
514 516 return l
515 517
516 518 t = self.revision(self.node(rev))
517 519 return len(t)
518 520
519 521 # alternate implementation, The advantage to this code is it
520 522 # will be faster for a single revision. But, the results are not
521 523 # cached, so finding the size of every revision will be slower.
522 524 """
523 525 if self.cache and self.cache[1] == rev:
524 526 return len(self.cache[2])
525 527
526 528 base = self.base(rev)
527 529 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
528 530 base = self.cache[1]
529 531 text = self.cache[2]
530 532 else:
531 533 text = self.revision(self.node(base))
532 534
533 535 l = len(text)
534 536 for x in xrange(base + 1, rev + 1):
535 537 l = mdiff.patchedsize(l, self.chunk(x))
536 538 return l
537 539 """
538 540
539 541 def length(self, rev):
540 542 if rev == nullrev:
541 543 return 0
542 544 else:
543 545 return self.index[rev][1]
544 546 def base(self, rev):
545 547 if (rev == nullrev):
546 548 return nullrev
547 549 else:
548 550 return self.index[rev][-5]
549 551
550 552 def reachable(self, node, stop=None):
551 553 """return a hash of all nodes ancestral to a given node, including
552 554 the node itself, stopping when stop is matched"""
553 555 reachable = {}
554 556 visit = [node]
555 557 reachable[node] = 1
556 558 if stop:
557 559 stopn = self.rev(stop)
558 560 else:
559 561 stopn = 0
560 562 while visit:
561 563 n = visit.pop(0)
562 564 if n == stop:
563 565 continue
564 566 if n == nullid:
565 567 continue
566 568 for p in self.parents(n):
567 569 if self.rev(p) < stopn:
568 570 continue
569 571 if p not in reachable:
570 572 reachable[p] = 1
571 573 visit.append(p)
572 574 return reachable
573 575
574 576 def nodesbetween(self, roots=None, heads=None):
575 577 """Return a tuple containing three elements. Elements 1 and 2 contain
576 578 a final list bases and heads after all the unreachable ones have been
577 579 pruned. Element 0 contains a topologically sorted list of all
578 580
579 581 nodes that satisfy these constraints:
580 582 1. All nodes must be descended from a node in roots (the nodes on
581 583 roots are considered descended from themselves).
582 584 2. All nodes must also be ancestors of a node in heads (the nodes in
583 585 heads are considered to be their own ancestors).
584 586
585 587 If roots is unspecified, nullid is assumed as the only root.
586 588 If heads is unspecified, it is taken to be the output of the
587 589 heads method (i.e. a list of all nodes in the repository that
588 590 have no children)."""
589 591 nonodes = ([], [], [])
590 592 if roots is not None:
591 593 roots = list(roots)
592 594 if not roots:
593 595 return nonodes
594 596 lowestrev = min([self.rev(n) for n in roots])
595 597 else:
596 598 roots = [nullid] # Everybody's a descendent of nullid
597 599 lowestrev = nullrev
598 600 if (lowestrev == nullrev) and (heads is None):
599 601 # We want _all_ the nodes!
600 602 return ([self.node(r) for r in xrange(0, self.count())],
601 603 [nullid], list(self.heads()))
602 604 if heads is None:
603 605 # All nodes are ancestors, so the latest ancestor is the last
604 606 # node.
605 607 highestrev = self.count() - 1
606 608 # Set ancestors to None to signal that every node is an ancestor.
607 609 ancestors = None
608 610 # Set heads to an empty dictionary for later discovery of heads
609 611 heads = {}
610 612 else:
611 613 heads = list(heads)
612 614 if not heads:
613 615 return nonodes
614 616 ancestors = {}
615 617 # Turn heads into a dictionary so we can remove 'fake' heads.
616 618 # Also, later we will be using it to filter out the heads we can't
617 619 # find from roots.
618 620 heads = dict.fromkeys(heads, 0)
619 621 # Start at the top and keep marking parents until we're done.
620 622 nodestotag = heads.keys()
621 623 # Remember where the top was so we can use it as a limit later.
622 624 highestrev = max([self.rev(n) for n in nodestotag])
623 625 while nodestotag:
624 626 # grab a node to tag
625 627 n = nodestotag.pop()
626 628 # Never tag nullid
627 629 if n == nullid:
628 630 continue
629 631 # A node's revision number represents its place in a
630 632 # topologically sorted list of nodes.
631 633 r = self.rev(n)
632 634 if r >= lowestrev:
633 635 if n not in ancestors:
634 636 # If we are possibly a descendent of one of the roots
635 637 # and we haven't already been marked as an ancestor
636 638 ancestors[n] = 1 # Mark as ancestor
637 639 # Add non-nullid parents to list of nodes to tag.
638 640 nodestotag.extend([p for p in self.parents(n) if
639 641 p != nullid])
640 642 elif n in heads: # We've seen it before, is it a fake head?
641 643 # So it is, real heads should not be the ancestors of
642 644 # any other heads.
643 645 heads.pop(n)
644 646 if not ancestors:
645 647 return nonodes
646 648 # Now that we have our set of ancestors, we want to remove any
647 649 # roots that are not ancestors.
648 650
649 651 # If one of the roots was nullid, everything is included anyway.
650 652 if lowestrev > nullrev:
651 653 # But, since we weren't, let's recompute the lowest rev to not
652 654 # include roots that aren't ancestors.
653 655
654 656 # Filter out roots that aren't ancestors of heads
655 657 roots = [n for n in roots if n in ancestors]
656 658 # Recompute the lowest revision
657 659 if roots:
658 660 lowestrev = min([self.rev(n) for n in roots])
659 661 else:
660 662 # No more roots? Return empty list
661 663 return nonodes
662 664 else:
663 665 # We are descending from nullid, and don't need to care about
664 666 # any other roots.
665 667 lowestrev = nullrev
666 668 roots = [nullid]
667 669 # Transform our roots list into a 'set' (i.e. a dictionary where the
668 670 # values don't matter.
669 671 descendents = dict.fromkeys(roots, 1)
670 672 # Also, keep the original roots so we can filter out roots that aren't
671 673 # 'real' roots (i.e. are descended from other roots).
672 674 roots = descendents.copy()
673 675 # Our topologically sorted list of output nodes.
674 676 orderedout = []
675 677 # Don't start at nullid since we don't want nullid in our output list,
676 678 # and if nullid shows up in descedents, empty parents will look like
677 679 # they're descendents.
678 680 for r in xrange(max(lowestrev, 0), highestrev + 1):
679 681 n = self.node(r)
680 682 isdescendent = False
681 683 if lowestrev == nullrev: # Everybody is a descendent of nullid
682 684 isdescendent = True
683 685 elif n in descendents:
684 686 # n is already a descendent
685 687 isdescendent = True
686 688 # This check only needs to be done here because all the roots
687 689 # will start being marked is descendents before the loop.
688 690 if n in roots:
689 691 # If n was a root, check if it's a 'real' root.
690 692 p = tuple(self.parents(n))
691 693 # If any of its parents are descendents, it's not a root.
692 694 if (p[0] in descendents) or (p[1] in descendents):
693 695 roots.pop(n)
694 696 else:
695 697 p = tuple(self.parents(n))
696 698 # A node is a descendent if either of its parents are
697 699 # descendents. (We seeded the dependents list with the roots
698 700 # up there, remember?)
699 701 if (p[0] in descendents) or (p[1] in descendents):
700 702 descendents[n] = 1
701 703 isdescendent = True
702 704 if isdescendent and ((ancestors is None) or (n in ancestors)):
703 705 # Only include nodes that are both descendents and ancestors.
704 706 orderedout.append(n)
705 707 if (ancestors is not None) and (n in heads):
706 708 # We're trying to figure out which heads are reachable
707 709 # from roots.
708 710 # Mark this head as having been reached
709 711 heads[n] = 1
710 712 elif ancestors is None:
711 713 # Otherwise, we're trying to discover the heads.
712 714 # Assume this is a head because if it isn't, the next step
713 715 # will eventually remove it.
714 716 heads[n] = 1
715 717 # But, obviously its parents aren't.
716 718 for p in self.parents(n):
717 719 heads.pop(p, None)
718 720 heads = [n for n in heads.iterkeys() if heads[n] != 0]
719 721 roots = roots.keys()
720 722 assert orderedout
721 723 assert roots
722 724 assert heads
723 725 return (orderedout, roots, heads)
724 726
725 727 def heads(self, start=None, stop=None):
726 728 """return the list of all nodes that have no children
727 729
728 730 if start is specified, only heads that are descendants of
729 731 start will be returned
730 732 if stop is specified, it will consider all the revs from stop
731 733 as if they had no children
732 734 """
733 735 if start is None:
734 736 start = nullid
735 737 if stop is None:
736 738 stop = []
737 739 stoprevs = dict.fromkeys([self.rev(n) for n in stop])
738 740 startrev = self.rev(start)
739 741 reachable = {startrev: 1}
740 742 heads = {startrev: 1}
741 743
742 744 parentrevs = self.parentrevs
743 745 for r in xrange(startrev + 1, self.count()):
744 746 for p in parentrevs(r):
745 747 if p in reachable:
746 748 if r not in stoprevs:
747 749 reachable[r] = 1
748 750 heads[r] = 1
749 751 if p in heads and p not in stoprevs:
750 752 del heads[p]
751 753
752 754 return [self.node(r) for r in heads]
753 755
754 756 def children(self, node):
755 757 """find the children of a given node"""
756 758 c = []
757 759 p = self.rev(node)
758 760 for r in range(p + 1, self.count()):
759 761 for pr in self.parentrevs(r):
760 762 if pr == p:
761 763 c.append(self.node(r))
762 764 return c
763 765
764 766 def _match(self, id):
765 767 if isinstance(id, (long, int)):
766 768 # rev
767 769 return self.node(id)
768 770 if len(id) == 20:
769 771 # possibly a binary node
770 772 # odds of a binary node being all hex in ASCII are 1 in 10**25
771 773 try:
772 774 node = id
773 775 r = self.rev(node) # quick search the index
774 776 return node
775 777 except LookupError:
776 778 pass # may be partial hex id
777 779 try:
778 780 # str(rev)
779 781 rev = int(id)
780 782 if str(rev) != id: raise ValueError
781 783 if rev < 0: rev = self.count() + rev
782 784 if rev < 0 or rev >= self.count(): raise ValueError
783 785 return self.node(rev)
784 786 except (ValueError, OverflowError):
785 787 pass
786 788 if len(id) == 40:
787 789 try:
788 790 # a full hex nodeid?
789 791 node = bin(id)
790 792 r = self.rev(node)
791 793 return node
792 794 except TypeError:
793 795 pass
794 796
795 797 def _partialmatch(self, id):
796 798 if len(id) < 40:
797 799 try:
798 800 # hex(node)[:...]
799 801 bin_id = bin(id[:len(id) & ~1]) # grab an even number of digits
800 802 node = None
801 803 for n in self.nodemap:
802 804 if n.startswith(bin_id) and hex(n).startswith(id):
803 805 if node is not None:
804 806 raise LookupError(_("Ambiguous identifier"))
805 807 node = n
806 808 if node is not None:
807 809 return node
808 810 except TypeError:
809 811 pass
810 812
811 813 def lookup(self, id):
812 814 """locate a node based on:
813 815 - revision number or str(revision number)
814 816 - nodeid or subset of hex nodeid
815 817 """
816 818
817 819 n = self._match(id)
818 820 if n is not None:
819 821 return n
820 822 n = self._partialmatch(id)
821 823 if n:
822 824 return n
823 825
824 826 raise LookupError(_("No match found"))
825 827
826 828 def cmp(self, node, text):
827 829 """compare text with a given file revision"""
828 830 p1, p2 = self.parents(node)
829 831 return hash(text, p1, p2) != node
830 832
831 833 def makenode(self, node, text):
832 834 """calculate a file nodeid for text, descended or possibly
833 835 unchanged from node"""
834 836
835 837 if self.cmp(node, text):
836 838 return hash(text, node, nullid)
837 839 return node
838 840
839 841 def diff(self, a, b):
840 842 """return a delta between two revisions"""
841 843 return mdiff.textdiff(a, b)
842 844
843 845 def patches(self, t, pl):
844 846 """apply a list of patches to a string"""
845 847 return mdiff.patches(t, pl)
846 848
847 849 def chunk(self, rev, df=None, cachelen=4096):
848 850 start, length = self.start(rev), self.length(rev)
849 851 inline = self.inlinedata()
850 852 if inline:
851 853 start += (rev + 1) * struct.calcsize(self.indexformat)
852 854 end = start + length
853 855 def loadcache(df):
854 856 cache_length = max(cachelen, length) # 4k
855 857 if not df:
856 858 if inline:
857 859 df = self.opener(self.indexfile)
858 860 else:
859 861 df = self.opener(self.datafile)
860 862 df.seek(start)
861 863 self.chunkcache = (start, df.read(cache_length))
862 864
863 865 if not self.chunkcache:
864 866 loadcache(df)
865 867
866 868 cache_start = self.chunkcache[0]
867 869 cache_end = cache_start + len(self.chunkcache[1])
868 870 if start >= cache_start and end <= cache_end:
869 871 # it is cached
870 872 offset = start - cache_start
871 873 else:
872 874 loadcache(df)
873 875 offset = 0
874 876
875 877 #def checkchunk():
876 878 # df = self.opener(self.datafile)
877 879 # df.seek(start)
878 880 # return df.read(length)
879 881 #assert s == checkchunk()
880 882 return decompress(self.chunkcache[1][offset:offset + length])
881 883
882 884 def delta(self, node):
883 885 """return or calculate a delta between a node and its predecessor"""
884 886 r = self.rev(node)
885 887 return self.revdiff(r - 1, r)
886 888
887 889 def revdiff(self, rev1, rev2):
888 890 """return or calculate a delta between two revisions"""
889 891 b1 = self.base(rev1)
890 892 b2 = self.base(rev2)
891 893 if b1 == b2 and rev1 + 1 == rev2:
892 894 return self.chunk(rev2)
893 895 else:
894 896 return self.diff(self.revision(self.node(rev1)),
895 897 self.revision(self.node(rev2)))
896 898
897 899 def revision(self, node):
898 900 """return an uncompressed revision of a given"""
899 901 if node == nullid: return ""
900 902 if self.cache and self.cache[0] == node: return self.cache[2]
901 903
902 904 # look up what we need to read
903 905 text = None
904 906 rev = self.rev(node)
905 907 base = self.base(rev)
906 908
907 909 if self.inlinedata():
908 910 # we probably have the whole chunk cached
909 911 df = None
910 912 else:
911 913 df = self.opener(self.datafile)
912 914
913 915 # do we have useful data cached?
914 916 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
915 917 base = self.cache[1]
916 918 text = self.cache[2]
917 919 self.loadindex(base, rev + 1)
918 920 else:
919 921 self.loadindex(base, rev + 1)
920 922 text = self.chunk(base, df=df)
921 923
922 924 bins = []
923 925 for r in xrange(base + 1, rev + 1):
924 926 bins.append(self.chunk(r, df=df))
925 927
926 928 text = self.patches(text, bins)
927 929
928 930 p1, p2 = self.parents(node)
929 931 if node != hash(text, p1, p2):
930 932 raise RevlogError(_("integrity check failed on %s:%d")
931 933 % (self.datafile, rev))
932 934
933 935 self.cache = (node, rev, text)
934 936 return text
935 937
936 938 def checkinlinesize(self, tr, fp=None):
937 939 if not self.inlinedata():
938 940 return
939 941 if not fp:
940 942 fp = self.opener(self.indexfile, 'r')
941 943 fp.seek(0, 2)
942 944 size = fp.tell()
943 945 if size < 131072:
944 946 return
945 947 trinfo = tr.find(self.indexfile)
946 948 if trinfo == None:
947 949 raise RevlogError(_("%s not found in the transaction")
948 950 % self.indexfile)
949 951
950 952 trindex = trinfo[2]
951 953 dataoff = self.start(trindex)
952 954
953 955 tr.add(self.datafile, dataoff)
954 956 df = self.opener(self.datafile, 'w')
955 957 calc = struct.calcsize(self.indexformat)
956 958 for r in xrange(self.count()):
957 959 start = self.start(r) + (r + 1) * calc
958 960 length = self.length(r)
959 961 fp.seek(start)
960 962 d = fp.read(length)
961 963 df.write(d)
962 964 fp.close()
963 965 df.close()
964 966 fp = self.opener(self.indexfile, 'w', atomictemp=True)
965 967 self.version &= ~(REVLOGNGINLINEDATA)
966 968 if self.count():
967 969 x = self.index[0]
968 970 e = struct.pack(self.indexformat, *x)[4:]
969 971 l = struct.pack(versionformat, self.version)
970 972 fp.write(l)
971 973 fp.write(e)
972 974
973 975 for i in xrange(1, self.count()):
974 976 x = self.index[i]
975 977 e = struct.pack(self.indexformat, *x)
976 978 fp.write(e)
977 979
978 980 # if we don't call rename, the temp file will never replace the
979 981 # real index
980 982 fp.rename()
981 983
982 984 tr.replace(self.indexfile, trindex * calc)
983 985 self.chunkcache = None
984 986
985 987 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
986 988 """add a revision to the log
987 989
988 990 text - the revision data to add
989 991 transaction - the transaction object used for rollback
990 992 link - the linkrev data to add
991 993 p1, p2 - the parent nodeids of the revision
992 994 d - an optional precomputed delta
993 995 """
994 996 if not self.inlinedata():
995 997 dfh = self.opener(self.datafile, "a")
996 998 else:
997 999 dfh = None
998 1000 ifh = self.opener(self.indexfile, "a+")
999 1001 return self._addrevision(text, transaction, link, p1, p2, d, ifh, dfh)
1000 1002
1001 1003 def _addrevision(self, text, transaction, link, p1, p2, d, ifh, dfh):
1002 1004 if text is None: text = ""
1003 1005 if p1 is None: p1 = self.tip()
1004 1006 if p2 is None: p2 = nullid
1005 1007
1006 1008 node = hash(text, p1, p2)
1007 1009
1008 1010 if node in self.nodemap:
1009 1011 return node
1010 1012
1011 1013 n = self.count()
1012 1014 t = n - 1
1013 1015
1014 1016 if n:
1015 1017 base = self.base(t)
1016 1018 start = self.start(base)
1017 1019 end = self.end(t)
1018 1020 if not d:
1019 1021 prev = self.revision(self.tip())
1020 1022 d = self.diff(prev, text)
1021 1023 data = compress(d)
1022 1024 l = len(data[1]) + len(data[0])
1023 1025 dist = end - start + l
1024 1026
1025 1027 # full versions are inserted when the needed deltas
1026 1028 # become comparable to the uncompressed text
1027 1029 if not n or dist > len(text) * 2:
1028 1030 data = compress(text)
1029 1031 l = len(data[1]) + len(data[0])
1030 1032 base = n
1031 1033 else:
1032 1034 base = self.base(t)
1033 1035
1034 1036 offset = 0
1035 1037 if t >= 0:
1036 1038 offset = self.end(t)
1037 1039
1038 1040 if self.version == REVLOGV0:
1039 1041 e = (offset, l, base, link, p1, p2, node)
1040 1042 else:
1041 1043 e = (self.offset_type(offset, 0), l, len(text),
1042 1044 base, link, self.rev(p1), self.rev(p2), node)
1043 1045
1044 1046 self.index.append(e)
1045 1047 self.nodemap[node] = n
1046 1048 entry = struct.pack(self.indexformat, *e)
1047 1049
1048 1050 if not self.inlinedata():
1049 1051 transaction.add(self.datafile, offset)
1050 1052 transaction.add(self.indexfile, n * len(entry))
1051 1053 if data[0]:
1052 1054 dfh.write(data[0])
1053 1055 dfh.write(data[1])
1054 1056 dfh.flush()
1055 1057 else:
1056 1058 ifh.seek(0, 2)
1057 1059 transaction.add(self.indexfile, ifh.tell(), self.count() - 1)
1058 1060
1059 1061 if len(self.index) == 1 and self.version != REVLOGV0:
1060 1062 l = struct.pack(versionformat, self.version)
1061 1063 ifh.write(l)
1062 1064 entry = entry[4:]
1063 1065
1064 1066 ifh.write(entry)
1065 1067
1066 1068 if self.inlinedata():
1067 1069 ifh.write(data[0])
1068 1070 ifh.write(data[1])
1069 1071 self.checkinlinesize(transaction, ifh)
1070 1072
1071 1073 self.cache = (node, n, text)
1072 1074 return node
1073 1075
1074 1076 def ancestor(self, a, b):
1075 1077 """calculate the least common ancestor of nodes a and b"""
1076 1078
1077 1079 def parents(rev):
1078 1080 return [p for p in self.parentrevs(rev) if p != nullrev]
1079 1081
1080 1082 c = ancestor.ancestor(self.rev(a), self.rev(b), parents)
1081 1083 if c is None:
1082 1084 return nullid
1083 1085
1084 1086 return self.node(c)
1085 1087
1086 1088 def group(self, nodelist, lookup, infocollect=None):
1087 1089 """calculate a delta group
1088 1090
1089 1091 Given a list of changeset revs, return a set of deltas and
1090 1092 metadata corresponding to nodes. the first delta is
1091 1093 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
1092 1094 have this parent as it has all history before these
1093 1095 changesets. parent is parent[0]
1094 1096 """
1095 1097 revs = [self.rev(n) for n in nodelist]
1096 1098
1097 1099 # if we don't have any revisions touched by these changesets, bail
1098 1100 if not revs:
1099 1101 yield changegroup.closechunk()
1100 1102 return
1101 1103
1102 1104 # add the parent of the first rev
1103 1105 p = self.parents(self.node(revs[0]))[0]
1104 1106 revs.insert(0, self.rev(p))
1105 1107
1106 1108 # build deltas
1107 1109 for d in xrange(0, len(revs) - 1):
1108 1110 a, b = revs[d], revs[d + 1]
1109 1111 nb = self.node(b)
1110 1112
1111 1113 if infocollect is not None:
1112 1114 infocollect(nb)
1113 1115
1114 1116 d = self.revdiff(a, b)
1115 1117 p = self.parents(nb)
1116 1118 meta = nb + p[0] + p[1] + lookup(nb)
1117 1119 yield changegroup.genchunk("%s%s" % (meta, d))
1118 1120
1119 1121 yield changegroup.closechunk()
1120 1122
1121 1123 def addgroup(self, revs, linkmapper, transaction, unique=0):
1122 1124 """
1123 1125 add a delta group
1124 1126
1125 1127 given a set of deltas, add them to the revision log. the
1126 1128 first delta is against its parent, which should be in our
1127 1129 log, the rest are against the previous delta.
1128 1130 """
1129 1131
1130 1132 #track the base of the current delta log
1131 1133 r = self.count()
1132 1134 t = r - 1
1133 1135 node = None
1134 1136
1135 1137 base = prev = nullrev
1136 1138 start = end = textlen = 0
1137 1139 if r:
1138 1140 end = self.end(t)
1139 1141
1140 1142 ifh = self.opener(self.indexfile, "a+")
1141 1143 ifh.seek(0, 2)
1142 1144 transaction.add(self.indexfile, ifh.tell(), self.count())
1143 1145 if self.inlinedata():
1144 1146 dfh = None
1145 1147 else:
1146 1148 transaction.add(self.datafile, end)
1147 1149 dfh = self.opener(self.datafile, "a")
1148 1150
1149 1151 # loop through our set of deltas
1150 1152 chain = None
1151 1153 for chunk in revs:
1152 1154 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
1153 1155 link = linkmapper(cs)
1154 1156 if node in self.nodemap:
1155 1157 # this can happen if two branches make the same change
1156 1158 # if unique:
1157 1159 # raise RevlogError(_("already have %s") % hex(node[:4]))
1158 1160 chain = node
1159 1161 continue
1160 1162 delta = chunk[80:]
1161 1163
1162 1164 for p in (p1, p2):
1163 1165 if not p in self.nodemap:
1164 1166 raise LookupError(_("unknown parent %s") % short(p))
1165 1167
1166 1168 if not chain:
1167 1169 # retrieve the parent revision of the delta chain
1168 1170 chain = p1
1169 1171 if not chain in self.nodemap:
1170 1172 raise LookupError(_("unknown base %s") % short(chain[:4]))
1171 1173
1172 1174 # full versions are inserted when the needed deltas become
1173 1175 # comparable to the uncompressed text or when the previous
1174 1176 # version is not the one we have a delta against. We use
1175 1177 # the size of the previous full rev as a proxy for the
1176 1178 # current size.
1177 1179
1178 1180 if chain == prev:
1179 1181 tempd = compress(delta)
1180 1182 cdelta = tempd[0] + tempd[1]
1181 1183 textlen = mdiff.patchedsize(textlen, delta)
1182 1184
1183 1185 if chain != prev or (end - start + len(cdelta)) > textlen * 2:
1184 1186 # flush our writes here so we can read it in revision
1185 1187 if dfh:
1186 1188 dfh.flush()
1187 1189 ifh.flush()
1188 1190 text = self.revision(chain)
1189 1191 text = self.patches(text, [delta])
1190 1192 chk = self._addrevision(text, transaction, link, p1, p2, None,
1191 1193 ifh, dfh)
1192 1194 if not dfh and not self.inlinedata():
1193 1195 # addrevision switched from inline to conventional
1194 1196 # reopen the index
1195 1197 dfh = self.opener(self.datafile, "a")
1196 1198 ifh = self.opener(self.indexfile, "a")
1197 1199 if chk != node:
1198 1200 raise RevlogError(_("consistency error adding group"))
1199 1201 textlen = len(text)
1200 1202 else:
1201 1203 if self.version == REVLOGV0:
1202 1204 e = (end, len(cdelta), base, link, p1, p2, node)
1203 1205 else:
1204 1206 e = (self.offset_type(end, 0), len(cdelta), textlen, base,
1205 1207 link, self.rev(p1), self.rev(p2), node)
1206 1208 self.index.append(e)
1207 1209 self.nodemap[node] = r
1208 1210 if self.inlinedata():
1209 1211 ifh.write(struct.pack(self.indexformat, *e))
1210 1212 ifh.write(cdelta)
1211 1213 self.checkinlinesize(transaction, ifh)
1212 1214 if not self.inlinedata():
1213 1215 dfh = self.opener(self.datafile, "a")
1214 1216 ifh = self.opener(self.indexfile, "a")
1215 1217 else:
1216 1218 dfh.write(cdelta)
1217 1219 ifh.write(struct.pack(self.indexformat, *e))
1218 1220
1219 1221 t, r, chain, prev = r, r + 1, node, node
1220 1222 base = self.base(t)
1221 1223 start = self.start(base)
1222 1224 end = self.end(t)
1223 1225
1224 1226 return node
1225 1227
1226 1228 def strip(self, rev, minlink):
1227 1229 if self.count() == 0 or rev >= self.count():
1228 1230 return
1229 1231
1230 1232 if isinstance(self.index, lazyindex):
1231 1233 self.loadindexmap()
1232 1234
1233 1235 # When stripping away a revision, we need to make sure it
1234 1236 # does not actually belong to an older changeset.
1235 1237 # The minlink parameter defines the oldest revision
1236 1238 # we're allowed to strip away.
1237 1239 while minlink > self.index[rev][-4]:
1238 1240 rev += 1
1239 1241 if rev >= self.count():
1240 1242 return
1241 1243
1242 1244 # first truncate the files on disk
1243 1245 end = self.start(rev)
1244 1246 if not self.inlinedata():
1245 1247 df = self.opener(self.datafile, "a")
1246 1248 df.truncate(end)
1247 1249 end = rev * struct.calcsize(self.indexformat)
1248 1250 else:
1249 1251 end += rev * struct.calcsize(self.indexformat)
1250 1252
1251 1253 indexf = self.opener(self.indexfile, "a")
1252 1254 indexf.truncate(end)
1253 1255
1254 1256 # then reset internal state in memory to forget those revisions
1255 1257 self.cache = None
1256 1258 self.chunkcache = None
1257 1259 for x in xrange(rev, self.count()):
1258 1260 del self.nodemap[self.node(x)]
1259 1261
1260 1262 del self.index[rev:]
1261 1263
1262 1264 def checksize(self):
1263 1265 expected = 0
1264 1266 if self.count():
1265 1267 expected = self.end(self.count() - 1)
1266 1268
1267 1269 try:
1268 1270 f = self.opener(self.datafile)
1269 1271 f.seek(0, 2)
1270 1272 actual = f.tell()
1271 1273 dd = actual - expected
1272 1274 except IOError, inst:
1273 1275 if inst.errno != errno.ENOENT:
1274 1276 raise
1275 1277 dd = 0
1276 1278
1277 1279 try:
1278 1280 f = self.opener(self.indexfile)
1279 1281 f.seek(0, 2)
1280 1282 actual = f.tell()
1281 1283 s = struct.calcsize(self.indexformat)
1282 1284 i = actual / s
1283 1285 di = actual - (i * s)
1284 1286 if self.inlinedata():
1285 1287 databytes = 0
1286 1288 for r in xrange(self.count()):
1287 1289 databytes += self.length(r)
1288 1290 dd = 0
1289 1291 di = actual - self.count() * s - databytes
1290 1292 except IOError, inst:
1291 1293 if inst.errno != errno.ENOENT:
1292 1294 raise
1293 1295 di = 0
1294 1296
1295 1297 return (dd, di)
1296 1298
1297 1299
@@ -1,204 +1,204
1 1 # sshserver.py - ssh protocol server support for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 from i18n import _
10 10 from node import *
11 11 import os, streamclone, sys, tempfile, util
12 12
13 13 class sshserver(object):
14 14 def __init__(self, ui, repo):
15 15 self.ui = ui
16 16 self.repo = repo
17 17 self.lock = None
18 18 self.fin = sys.stdin
19 19 self.fout = sys.stdout
20 20
21 21 sys.stdout = sys.stderr
22 22
23 23 # Prevent insertion/deletion of CRs
24 24 util.set_binary(self.fin)
25 25 util.set_binary(self.fout)
26 26
27 27 def getarg(self):
28 28 argline = self.fin.readline()[:-1]
29 29 arg, l = argline.split()
30 30 val = self.fin.read(int(l))
31 31 return arg, val
32 32
33 33 def respond(self, v):
34 34 self.fout.write("%d\n" % len(v))
35 35 self.fout.write(v)
36 36 self.fout.flush()
37 37
38 38 def serve_forever(self):
39 39 while self.serve_one(): pass
40 40 sys.exit(0)
41 41
42 42 def serve_one(self):
43 43 cmd = self.fin.readline()[:-1]
44 44 if cmd:
45 45 impl = getattr(self, 'do_' + cmd, None)
46 46 if impl: impl()
47 47 else: self.respond("")
48 48 return cmd != ''
49 49
50 50 def do_lookup(self):
51 51 arg, key = self.getarg()
52 52 assert arg == 'key'
53 53 try:
54 54 r = hex(self.repo.lookup(key))
55 55 success = 1
56 56 except Exception,inst:
57 57 r = str(inst)
58 58 success = 0
59 59 self.respond("%s %s\n" % (success, r))
60 60
61 61 def do_heads(self):
62 62 h = self.repo.heads()
63 63 self.respond(" ".join(map(hex, h)) + "\n")
64 64
65 65 def do_hello(self):
66 66 '''the hello command returns a set of lines describing various
67 67 interesting things about the server, in an RFC822-like format.
68 68 Currently the only one defined is "capabilities", which
69 69 consists of a line in the form:
70 70
71 71 capabilities: space separated list of tokens
72 72 '''
73 73
74 74 caps = ['unbundle', 'lookup', 'changegroupsubset']
75 75 if self.ui.configbool('server', 'uncompressed'):
76 caps.append('stream=%d' % self.repo.revlogversion)
76 caps.append('stream=%d' % self.repo.changelog.version)
77 77 self.respond("capabilities: %s\n" % (' '.join(caps),))
78 78
79 79 def do_lock(self):
80 80 '''DEPRECATED - allowing remote client to lock repo is not safe'''
81 81
82 82 self.lock = self.repo.lock()
83 83 self.respond("")
84 84
85 85 def do_unlock(self):
86 86 '''DEPRECATED'''
87 87
88 88 if self.lock:
89 89 self.lock.release()
90 90 self.lock = None
91 91 self.respond("")
92 92
93 93 def do_branches(self):
94 94 arg, nodes = self.getarg()
95 95 nodes = map(bin, nodes.split(" "))
96 96 r = []
97 97 for b in self.repo.branches(nodes):
98 98 r.append(" ".join(map(hex, b)) + "\n")
99 99 self.respond("".join(r))
100 100
101 101 def do_between(self):
102 102 arg, pairs = self.getarg()
103 103 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
104 104 r = []
105 105 for b in self.repo.between(pairs):
106 106 r.append(" ".join(map(hex, b)) + "\n")
107 107 self.respond("".join(r))
108 108
109 109 def do_changegroup(self):
110 110 nodes = []
111 111 arg, roots = self.getarg()
112 112 nodes = map(bin, roots.split(" "))
113 113
114 114 cg = self.repo.changegroup(nodes, 'serve')
115 115 while True:
116 116 d = cg.read(4096)
117 117 if not d:
118 118 break
119 119 self.fout.write(d)
120 120
121 121 self.fout.flush()
122 122
123 123 def do_changegroupsubset(self):
124 124 bases = []
125 125 heads = []
126 126 argmap = dict([self.getarg(), self.getarg()])
127 127 bases = [bin(n) for n in argmap['bases'].split(' ')]
128 128 heads = [bin(n) for n in argmap['heads'].split(' ')]
129 129
130 130 cg = self.repo.changegroupsubset(bases, heads, 'serve')
131 131 while True:
132 132 d = cg.read(4096)
133 133 if not d:
134 134 break
135 135 self.fout.write(d)
136 136
137 137 self.fout.flush()
138 138
139 139 def do_addchangegroup(self):
140 140 '''DEPRECATED'''
141 141
142 142 if not self.lock:
143 143 self.respond("not locked")
144 144 return
145 145
146 146 self.respond("")
147 147 r = self.repo.addchangegroup(self.fin, 'serve', self.client_url())
148 148 self.respond(str(r))
149 149
150 150 def client_url(self):
151 151 client = os.environ.get('SSH_CLIENT', '').split(' ', 1)[0]
152 152 return 'remote:ssh:' + client
153 153
154 154 def do_unbundle(self):
155 155 their_heads = self.getarg()[1].split()
156 156
157 157 def check_heads():
158 158 heads = map(hex, self.repo.heads())
159 159 return their_heads == [hex('force')] or their_heads == heads
160 160
161 161 # fail early if possible
162 162 if not check_heads():
163 163 self.respond(_('unsynced changes'))
164 164 return
165 165
166 166 self.respond('')
167 167
168 168 # write bundle data to temporary file because it can be big
169 169
170 170 try:
171 171 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
172 172 fp = os.fdopen(fd, 'wb+')
173 173
174 174 count = int(self.fin.readline())
175 175 while count:
176 176 fp.write(self.fin.read(count))
177 177 count = int(self.fin.readline())
178 178
179 179 was_locked = self.lock is not None
180 180 if not was_locked:
181 181 self.lock = self.repo.lock()
182 182 try:
183 183 if not check_heads():
184 184 # someone else committed/pushed/unbundled while we
185 185 # were transferring data
186 186 self.respond(_('unsynced changes'))
187 187 return
188 188 self.respond('')
189 189
190 190 # push can proceed
191 191
192 192 fp.seek(0)
193 193 r = self.repo.addchangegroup(fp, 'serve', self.client_url())
194 194 self.respond(str(r))
195 195 finally:
196 196 if not was_locked:
197 197 self.lock.release()
198 198 self.lock = None
199 199 finally:
200 200 fp.close()
201 201 os.unlink(tempname)
202 202
203 203 def do_stream_out(self):
204 204 streamclone.stream_out(self.repo, self.fout)
@@ -1,85 +1,84
1 1 # statichttprepo.py - simple http repository class for mercurial
2 2 #
3 3 # This provides read-only repo access to repositories exported via static http
4 4 #
5 5 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
6 6 #
7 7 # This software may be used and distributed according to the terms
8 8 # of the GNU General Public License, incorporated herein by reference.
9 9
10 10 from i18n import _
11 11 import changelog, filelog, httprangereader
12 12 import repo, localrepo, manifest, os, urllib, urllib2, util
13 13
14 14 class rangereader(httprangereader.httprangereader):
15 15 def read(self, size=None):
16 16 try:
17 17 return httprangereader.httprangereader.read(self, size)
18 18 except urllib2.HTTPError, inst:
19 19 raise IOError(None, inst)
20 20 except urllib2.URLError, inst:
21 21 raise IOError(None, inst.reason[1])
22 22
23 23 def opener(base):
24 24 """return a function that opens files over http"""
25 25 p = base
26 26 def o(path, mode="r"):
27 27 f = "/".join((p, urllib.quote(path)))
28 28 return rangereader(f)
29 29 return o
30 30
31 31 class statichttprepository(localrepo.localrepository):
32 32 def __init__(self, ui, path):
33 33 self._url = path
34 34 self.ui = ui
35 self.revlogversion = 0
36 35
37 36 self.path = (path + "/.hg")
38 37 self.opener = opener(self.path)
39 38 # find requirements
40 39 try:
41 40 requirements = self.opener("requires").read().splitlines()
42 41 except IOError:
43 42 requirements = []
44 43 # check them
45 44 for r in requirements:
46 45 if r not in self.supported:
47 46 raise repo.RepoError(_("requirement '%s' not supported") % r)
48 47
49 48 # setup store
50 49 if "store" in requirements:
51 50 self.encodefn = util.encodefilename
52 51 self.decodefn = util.decodefilename
53 52 self.spath = self.path + "/store"
54 53 else:
55 54 self.encodefn = lambda x: x
56 55 self.decodefn = lambda x: x
57 56 self.spath = self.path
58 57 self.sopener = util.encodedopener(opener(self.spath), self.encodefn)
59 58
60 59 self.manifest = manifest.manifest(self.sopener)
61 60 self.changelog = changelog.changelog(self.sopener)
62 61 self.tagscache = None
63 62 self.nodetagscache = None
64 63 self.encodepats = None
65 64 self.decodepats = None
66 65
67 66 def url(self):
68 67 return 'static-' + self._url
69 68
70 69 def dev(self):
71 70 return -1
72 71
73 72 def local(self):
74 73 return False
75 74
76 75 def instance(ui, path, create):
77 76 if create:
78 77 raise util.Abort(_('cannot create new static-http repository'))
79 78 if path.startswith('old-http:'):
80 79 ui.warn(_("old-http:// syntax is deprecated, "
81 80 "please use static-http:// instead\n"))
82 81 path = path[4:]
83 82 else:
84 83 path = path[7:]
85 84 return statichttprepository(ui, path)
@@ -1,456 +1,450
1 1 # ui.py - user interface bits for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from i18n import _
9 9 import errno, getpass, os, re, socket, sys, tempfile
10 10 import ConfigParser, traceback, util
11 11
12 12 def dupconfig(orig):
13 13 new = util.configparser(orig.defaults())
14 14 updateconfig(orig, new)
15 15 return new
16 16
17 17 def updateconfig(source, dest, sections=None):
18 18 if not sections:
19 19 sections = source.sections()
20 20 for section in sections:
21 21 if not dest.has_section(section):
22 22 dest.add_section(section)
23 23 for name, value in source.items(section, raw=True):
24 24 dest.set(section, name, value)
25 25
26 26 class ui(object):
27 27 def __init__(self, verbose=False, debug=False, quiet=False,
28 28 interactive=True, traceback=False, report_untrusted=True,
29 29 parentui=None):
30 30 self.overlay = None
31 31 self.buffers = []
32 32 if parentui is None:
33 33 # this is the parent of all ui children
34 34 self.parentui = None
35 35 self.readhooks = []
36 36 self.quiet = quiet
37 37 self.verbose = verbose
38 38 self.debugflag = debug
39 39 self.interactive = interactive
40 40 self.traceback = traceback
41 41 self.report_untrusted = report_untrusted
42 42 self.trusted_users = {}
43 43 self.trusted_groups = {}
44 44 # if ucdata is not None, its keys must be a superset of cdata's
45 45 self.cdata = util.configparser()
46 46 self.ucdata = None
47 47 # we always trust global config files
48 48 self.check_trusted = False
49 49 self.readconfig(util.rcpath())
50 50 self.check_trusted = True
51 51 self.updateopts(verbose, debug, quiet, interactive)
52 52 else:
53 53 # parentui may point to an ui object which is already a child
54 54 self.parentui = parentui.parentui or parentui
55 55 self.readhooks = self.parentui.readhooks[:]
56 56 self.trusted_users = parentui.trusted_users.copy()
57 57 self.trusted_groups = parentui.trusted_groups.copy()
58 58 self.cdata = dupconfig(self.parentui.cdata)
59 59 if self.parentui.ucdata:
60 60 self.ucdata = dupconfig(self.parentui.ucdata)
61 61 if self.parentui.overlay:
62 62 self.overlay = dupconfig(self.parentui.overlay)
63 63
64 64 def __getattr__(self, key):
65 65 return getattr(self.parentui, key)
66 66
67 67 def updateopts(self, verbose=False, debug=False, quiet=False,
68 68 interactive=True, traceback=False, config=[]):
69 69 for section, name, value in config:
70 70 self.setconfig(section, name, value)
71 71
72 72 if quiet or verbose or debug:
73 73 self.setconfig('ui', 'quiet', str(bool(quiet)))
74 74 self.setconfig('ui', 'verbose', str(bool(verbose)))
75 75 self.setconfig('ui', 'debug', str(bool(debug)))
76 76
77 77 self.verbosity_constraints()
78 78
79 79 if not interactive:
80 80 self.setconfig('ui', 'interactive', 'False')
81 81 self.interactive = False
82 82
83 83 self.traceback = self.traceback or traceback
84 84
85 85 def verbosity_constraints(self):
86 86 self.quiet = self.configbool('ui', 'quiet')
87 87 self.verbose = self.configbool('ui', 'verbose')
88 88 self.debugflag = self.configbool('ui', 'debug')
89 89
90 90 if self.debugflag:
91 91 self.verbose = True
92 92 self.quiet = False
93 93 elif self.verbose and self.quiet:
94 94 self.quiet = self.verbose = False
95 95
96 96 def _is_trusted(self, fp, f, warn=True):
97 97 if not self.check_trusted:
98 98 return True
99 99 st = util.fstat(fp)
100 100 if util.isowner(fp, st):
101 101 return True
102 102 tusers = self.trusted_users
103 103 tgroups = self.trusted_groups
104 104 if not tusers:
105 105 user = util.username()
106 106 if user is not None:
107 107 self.trusted_users[user] = 1
108 108 self.fixconfig(section='trusted')
109 109 if (tusers or tgroups) and '*' not in tusers and '*' not in tgroups:
110 110 user = util.username(st.st_uid)
111 111 group = util.groupname(st.st_gid)
112 112 if user not in tusers and group not in tgroups:
113 113 if warn and self.report_untrusted:
114 114 self.warn(_('Not trusting file %s from untrusted '
115 115 'user %s, group %s\n') % (f, user, group))
116 116 return False
117 117 return True
118 118
119 119 def readconfig(self, fn, root=None):
120 120 if isinstance(fn, basestring):
121 121 fn = [fn]
122 122 for f in fn:
123 123 try:
124 124 fp = open(f)
125 125 except IOError:
126 126 continue
127 127 cdata = self.cdata
128 128 trusted = self._is_trusted(fp, f)
129 129 if not trusted:
130 130 if self.ucdata is None:
131 131 self.ucdata = dupconfig(self.cdata)
132 132 cdata = self.ucdata
133 133 elif self.ucdata is not None:
134 134 # use a separate configparser, so that we don't accidentally
135 135 # override ucdata settings later on.
136 136 cdata = util.configparser()
137 137
138 138 try:
139 139 cdata.readfp(fp, f)
140 140 except ConfigParser.ParsingError, inst:
141 141 msg = _("Failed to parse %s\n%s") % (f, inst)
142 142 if trusted:
143 143 raise util.Abort(msg)
144 144 self.warn(_("Ignored: %s\n") % msg)
145 145
146 146 if trusted:
147 147 if cdata != self.cdata:
148 148 updateconfig(cdata, self.cdata)
149 149 if self.ucdata is not None:
150 150 updateconfig(cdata, self.ucdata)
151 151 # override data from config files with data set with ui.setconfig
152 152 if self.overlay:
153 153 updateconfig(self.overlay, self.cdata)
154 154 if root is None:
155 155 root = os.path.expanduser('~')
156 156 self.fixconfig(root=root)
157 157 for hook in self.readhooks:
158 158 hook(self)
159 159
160 160 def addreadhook(self, hook):
161 161 self.readhooks.append(hook)
162 162
163 163 def readsections(self, filename, *sections):
164 164 """Read filename and add only the specified sections to the config data
165 165
166 166 The settings are added to the trusted config data.
167 167 """
168 168 if not sections:
169 169 return
170 170
171 171 cdata = util.configparser()
172 172 try:
173 173 cdata.read(filename)
174 174 except ConfigParser.ParsingError, inst:
175 175 raise util.Abort(_("failed to parse %s\n%s") % (filename,
176 176 inst))
177 177
178 178 for section in sections:
179 179 if not cdata.has_section(section):
180 180 cdata.add_section(section)
181 181
182 182 updateconfig(cdata, self.cdata, sections)
183 183 if self.ucdata:
184 184 updateconfig(cdata, self.ucdata, sections)
185 185
186 186 def fixconfig(self, section=None, name=None, value=None, root=None):
187 187 # translate paths relative to root (or home) into absolute paths
188 188 if section is None or section == 'paths':
189 189 if root is None:
190 190 root = os.getcwd()
191 191 items = section and [(name, value)] or []
192 192 for cdata in self.cdata, self.ucdata, self.overlay:
193 193 if not cdata: continue
194 194 if not items and cdata.has_section('paths'):
195 195 pathsitems = cdata.items('paths')
196 196 else:
197 197 pathsitems = items
198 198 for n, path in pathsitems:
199 199 if path and "://" not in path and not os.path.isabs(path):
200 200 cdata.set("paths", n, os.path.join(root, path))
201 201
202 202 # update quiet/verbose/debug and interactive status
203 203 if section is None or section == 'ui':
204 204 if name is None or name in ('quiet', 'verbose', 'debug'):
205 205 self.verbosity_constraints()
206 206
207 207 if name is None or name == 'interactive':
208 208 self.interactive = self.configbool("ui", "interactive", True)
209 209
210 210 # update trust information
211 211 if (section is None or section == 'trusted') and self.trusted_users:
212 212 for user in self.configlist('trusted', 'users'):
213 213 self.trusted_users[user] = 1
214 214 for group in self.configlist('trusted', 'groups'):
215 215 self.trusted_groups[group] = 1
216 216
217 217 def setconfig(self, section, name, value):
218 218 if not self.overlay:
219 219 self.overlay = util.configparser()
220 220 for cdata in (self.overlay, self.cdata, self.ucdata):
221 221 if not cdata: continue
222 222 if not cdata.has_section(section):
223 223 cdata.add_section(section)
224 224 cdata.set(section, name, value)
225 225 self.fixconfig(section, name, value)
226 226
227 227 def _get_cdata(self, untrusted):
228 228 if untrusted and self.ucdata:
229 229 return self.ucdata
230 230 return self.cdata
231 231
232 232 def _config(self, section, name, default, funcname, untrusted, abort):
233 233 cdata = self._get_cdata(untrusted)
234 234 if cdata.has_option(section, name):
235 235 try:
236 236 func = getattr(cdata, funcname)
237 237 return func(section, name)
238 238 except ConfigParser.InterpolationError, inst:
239 239 msg = _("Error in configuration section [%s] "
240 240 "parameter '%s':\n%s") % (section, name, inst)
241 241 if abort:
242 242 raise util.Abort(msg)
243 243 self.warn(_("Ignored: %s\n") % msg)
244 244 return default
245 245
246 246 def _configcommon(self, section, name, default, funcname, untrusted):
247 247 value = self._config(section, name, default, funcname,
248 248 untrusted, abort=True)
249 249 if self.debugflag and not untrusted and self.ucdata:
250 250 uvalue = self._config(section, name, None, funcname,
251 251 untrusted=True, abort=False)
252 252 if uvalue is not None and uvalue != value:
253 253 self.warn(_("Ignoring untrusted configuration option "
254 254 "%s.%s = %s\n") % (section, name, uvalue))
255 255 return value
256 256
257 257 def config(self, section, name, default=None, untrusted=False):
258 258 return self._configcommon(section, name, default, 'get', untrusted)
259 259
260 260 def configbool(self, section, name, default=False, untrusted=False):
261 261 return self._configcommon(section, name, default, 'getboolean',
262 262 untrusted)
263 263
264 264 def configlist(self, section, name, default=None, untrusted=False):
265 265 """Return a list of comma/space separated strings"""
266 266 result = self.config(section, name, untrusted=untrusted)
267 267 if result is None:
268 268 result = default or []
269 269 if isinstance(result, basestring):
270 270 result = result.replace(",", " ").split()
271 271 return result
272 272
273 273 def has_config(self, section, untrusted=False):
274 274 '''tell whether section exists in config.'''
275 275 cdata = self._get_cdata(untrusted)
276 276 return cdata.has_section(section)
277 277
278 278 def _configitems(self, section, untrusted, abort):
279 279 items = {}
280 280 cdata = self._get_cdata(untrusted)
281 281 if cdata.has_section(section):
282 282 try:
283 283 items.update(dict(cdata.items(section)))
284 284 except ConfigParser.InterpolationError, inst:
285 285 msg = _("Error in configuration section [%s]:\n"
286 286 "%s") % (section, inst)
287 287 if abort:
288 288 raise util.Abort(msg)
289 289 self.warn(_("Ignored: %s\n") % msg)
290 290 return items
291 291
292 292 def configitems(self, section, untrusted=False):
293 293 items = self._configitems(section, untrusted=untrusted, abort=True)
294 294 if self.debugflag and not untrusted and self.ucdata:
295 295 uitems = self._configitems(section, untrusted=True, abort=False)
296 296 keys = uitems.keys()
297 297 keys.sort()
298 298 for k in keys:
299 299 if uitems[k] != items.get(k):
300 300 self.warn(_("Ignoring untrusted configuration option "
301 301 "%s.%s = %s\n") % (section, k, uitems[k]))
302 302 x = items.items()
303 303 x.sort()
304 304 return x
305 305
306 306 def walkconfig(self, untrusted=False):
307 307 cdata = self._get_cdata(untrusted)
308 308 sections = cdata.sections()
309 309 sections.sort()
310 310 for section in sections:
311 311 for name, value in self.configitems(section, untrusted):
312 312 yield section, name, str(value).replace('\n', '\\n')
313 313
314 314 def extensions(self):
315 315 result = self.configitems("extensions")
316 316 for i, (key, value) in enumerate(result):
317 317 if value:
318 318 result[i] = (key, os.path.expanduser(value))
319 319 return result
320 320
321 321 def hgignorefiles(self):
322 322 result = []
323 323 for key, value in self.configitems("ui"):
324 324 if key == 'ignore' or key.startswith('ignore.'):
325 325 result.append(os.path.expanduser(value))
326 326 return result
327 327
328 def configrevlog(self):
329 result = {}
330 for key, value in self.configitems("revlog"):
331 result[key.lower()] = value
332 return result
333
334 328 def username(self):
335 329 """Return default username to be used in commits.
336 330
337 331 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
338 332 and stop searching if one of these is set.
339 333 If not found, use ($LOGNAME or $USER or $LNAME or
340 334 $USERNAME) +"@full.hostname".
341 335 """
342 336 user = os.environ.get("HGUSER")
343 337 if user is None:
344 338 user = self.config("ui", "username")
345 339 if user is None:
346 340 user = os.environ.get("EMAIL")
347 341 if user is None:
348 342 try:
349 343 user = '%s@%s' % (util.getuser(), socket.getfqdn())
350 344 self.warn(_("No username found, using '%s' instead\n") % user)
351 345 except KeyError:
352 346 pass
353 347 if not user:
354 348 raise util.Abort(_("Please specify a username."))
355 349 return user
356 350
357 351 def shortuser(self, user):
358 352 """Return a short representation of a user name or email address."""
359 353 if not self.verbose: user = util.shortuser(user)
360 354 return user
361 355
362 356 def expandpath(self, loc, default=None):
363 357 """Return repository location relative to cwd or from [paths]"""
364 358 if "://" in loc or os.path.isdir(os.path.join(loc, '.hg')):
365 359 return loc
366 360
367 361 path = self.config("paths", loc)
368 362 if not path and default is not None:
369 363 path = self.config("paths", default)
370 364 return path or loc
371 365
372 366 def pushbuffer(self):
373 367 self.buffers.append([])
374 368
375 369 def popbuffer(self):
376 370 return "".join(self.buffers.pop())
377 371
378 372 def write(self, *args):
379 373 if self.buffers:
380 374 self.buffers[-1].extend([str(a) for a in args])
381 375 else:
382 376 for a in args:
383 377 sys.stdout.write(str(a))
384 378
385 379 def write_err(self, *args):
386 380 try:
387 381 if not sys.stdout.closed: sys.stdout.flush()
388 382 for a in args:
389 383 sys.stderr.write(str(a))
390 384 # stderr may be buffered under win32 when redirected to files,
391 385 # including stdout.
392 386 if not sys.stderr.closed: sys.stderr.flush()
393 387 except IOError, inst:
394 388 if inst.errno != errno.EPIPE:
395 389 raise
396 390
397 391 def flush(self):
398 392 try: sys.stdout.flush()
399 393 except: pass
400 394 try: sys.stderr.flush()
401 395 except: pass
402 396
403 397 def readline(self):
404 398 return sys.stdin.readline()[:-1]
405 399 def prompt(self, msg, pat=None, default="y"):
406 400 if not self.interactive: return default
407 401 while 1:
408 402 self.write(msg, " ")
409 403 r = self.readline()
410 404 if not pat or re.match(pat, r):
411 405 return r
412 406 else:
413 407 self.write(_("unrecognized response\n"))
414 408 def getpass(self, prompt=None, default=None):
415 409 if not self.interactive: return default
416 410 return getpass.getpass(prompt or _('password: '))
417 411 def status(self, *msg):
418 412 if not self.quiet: self.write(*msg)
419 413 def warn(self, *msg):
420 414 self.write_err(*msg)
421 415 def note(self, *msg):
422 416 if self.verbose: self.write(*msg)
423 417 def debug(self, *msg):
424 418 if self.debugflag: self.write(*msg)
425 419 def edit(self, text, user):
426 420 (fd, name) = tempfile.mkstemp(prefix="hg-editor-", suffix=".txt",
427 421 text=True)
428 422 try:
429 423 f = os.fdopen(fd, "w")
430 424 f.write(text)
431 425 f.close()
432 426
433 427 editor = (os.environ.get("HGEDITOR") or
434 428 self.config("ui", "editor") or
435 429 os.environ.get("EDITOR", "vi"))
436 430
437 431 util.system("%s \"%s\"" % (editor, name),
438 432 environ={'HGUSER': user},
439 433 onerr=util.Abort, errprefix=_("edit failed"))
440 434
441 435 f = open(name)
442 436 t = f.read()
443 437 f.close()
444 438 t = re.sub("(?m)^HG:.*\n", "", t)
445 439 finally:
446 440 os.unlink(name)
447 441
448 442 return t
449 443
450 444 def print_exc(self):
451 445 '''print exception traceback if traceback printing enabled.
452 446 only to call in exception handler. returns true if traceback
453 447 printed.'''
454 448 if self.traceback:
455 449 traceback.print_exc()
456 450 return self.traceback
@@ -1,206 +1,206
1 1 # verify.py - repository integrity checking for Mercurial
2 2 #
3 3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import _
10 10 import revlog, mdiff
11 11
12 12 def verify(repo):
13 13 filelinkrevs = {}
14 14 filenodes = {}
15 15 changesets = revisions = files = 0
16 16 errors = [0]
17 17 warnings = [0]
18 18 neededmanifests = {}
19 19
20 20 def err(msg):
21 21 repo.ui.warn(msg + "\n")
22 22 errors[0] += 1
23 23
24 24 def warn(msg):
25 25 repo.ui.warn(msg + "\n")
26 26 warnings[0] += 1
27 27
28 28 def checksize(obj, name):
29 29 d = obj.checksize()
30 30 if d[0]:
31 31 err(_("%s data length off by %d bytes") % (name, d[0]))
32 32 if d[1]:
33 33 err(_("%s index contains %d extra bytes") % (name, d[1]))
34 34
35 35 def checkversion(obj, name):
36 36 if obj.version != revlog.REVLOGV0:
37 37 if not revlogv1:
38 38 warn(_("warning: `%s' uses revlog format 1") % name)
39 39 elif revlogv1:
40 40 warn(_("warning: `%s' uses revlog format 0") % name)
41 41
42 revlogv1 = repo.revlogversion != revlog.REVLOGV0
43 if repo.ui.verbose or revlogv1 != repo.revlogv1:
42 revlogv1 = repo.changelog.version != revlog.REVLOGV0
43 if repo.ui.verbose or not revlogv1:
44 44 repo.ui.status(_("repository uses revlog format %d\n") %
45 45 (revlogv1 and 1 or 0))
46 46
47 47 seen = {}
48 48 repo.ui.status(_("checking changesets\n"))
49 49 checksize(repo.changelog, "changelog")
50 50
51 51 for i in xrange(repo.changelog.count()):
52 52 changesets += 1
53 53 n = repo.changelog.node(i)
54 54 l = repo.changelog.linkrev(n)
55 55 if l != i:
56 56 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
57 57 if n in seen:
58 58 err(_("duplicate changeset at revision %d") % i)
59 59 seen[n] = 1
60 60
61 61 for p in repo.changelog.parents(n):
62 62 if p not in repo.changelog.nodemap:
63 63 err(_("changeset %s has unknown parent %s") %
64 64 (short(n), short(p)))
65 65 try:
66 66 changes = repo.changelog.read(n)
67 67 except KeyboardInterrupt:
68 68 repo.ui.warn(_("interrupted"))
69 69 raise
70 70 except Exception, inst:
71 71 err(_("unpacking changeset %s: %s") % (short(n), inst))
72 72 continue
73 73
74 74 neededmanifests[changes[0]] = n
75 75
76 76 for f in changes[3]:
77 77 filelinkrevs.setdefault(f, []).append(i)
78 78
79 79 seen = {}
80 80 repo.ui.status(_("checking manifests\n"))
81 81 checkversion(repo.manifest, "manifest")
82 82 checksize(repo.manifest, "manifest")
83 83
84 84 for i in xrange(repo.manifest.count()):
85 85 n = repo.manifest.node(i)
86 86 l = repo.manifest.linkrev(n)
87 87
88 88 if l < 0 or l >= repo.changelog.count():
89 89 err(_("bad manifest link (%d) at revision %d") % (l, i))
90 90
91 91 if n in neededmanifests:
92 92 del neededmanifests[n]
93 93
94 94 if n in seen:
95 95 err(_("duplicate manifest at revision %d") % i)
96 96
97 97 seen[n] = 1
98 98
99 99 for p in repo.manifest.parents(n):
100 100 if p not in repo.manifest.nodemap:
101 101 err(_("manifest %s has unknown parent %s") %
102 102 (short(n), short(p)))
103 103
104 104 try:
105 105 for f, fn in repo.manifest.readdelta(n).iteritems():
106 106 filenodes.setdefault(f, {})[fn] = 1
107 107 except KeyboardInterrupt:
108 108 repo.ui.warn(_("interrupted"))
109 109 raise
110 110 except Exception, inst:
111 111 err(_("reading delta for manifest %s: %s") % (short(n), inst))
112 112 continue
113 113
114 114 repo.ui.status(_("crosschecking files in changesets and manifests\n"))
115 115
116 116 for m, c in neededmanifests.items():
117 117 err(_("Changeset %s refers to unknown manifest %s") %
118 118 (short(m), short(c)))
119 119 del neededmanifests
120 120
121 121 for f in filenodes:
122 122 if f not in filelinkrevs:
123 123 err(_("file %s in manifest but not in changesets") % f)
124 124
125 125 for f in filelinkrevs:
126 126 if f not in filenodes:
127 127 err(_("file %s in changeset but not in manifest") % f)
128 128
129 129 repo.ui.status(_("checking files\n"))
130 130 ff = filenodes.keys()
131 131 ff.sort()
132 132 for f in ff:
133 133 if f == "/dev/null":
134 134 continue
135 135 files += 1
136 136 if not f:
137 137 err(_("file without name in manifest %s") % short(n))
138 138 continue
139 139 fl = repo.file(f)
140 140 checkversion(fl, f)
141 141 checksize(fl, f)
142 142
143 143 nodes = {nullid: 1}
144 144 seen = {}
145 145 for i in xrange(fl.count()):
146 146 revisions += 1
147 147 n = fl.node(i)
148 148
149 149 if n in seen:
150 150 err(_("%s: duplicate revision %d") % (f, i))
151 151 if n not in filenodes[f]:
152 152 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
153 153 else:
154 154 del filenodes[f][n]
155 155
156 156 flr = fl.linkrev(n)
157 157 if flr not in filelinkrevs.get(f, []):
158 158 err(_("%s:%s points to unexpected changeset %d")
159 159 % (f, short(n), flr))
160 160 else:
161 161 filelinkrevs[f].remove(flr)
162 162
163 163 # verify contents
164 164 try:
165 165 t = fl.read(n)
166 166 except KeyboardInterrupt:
167 167 repo.ui.warn(_("interrupted"))
168 168 raise
169 169 except Exception, inst:
170 170 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
171 171
172 172 # verify parents
173 173 (p1, p2) = fl.parents(n)
174 174 if p1 not in nodes:
175 175 err(_("file %s:%s unknown parent 1 %s") %
176 176 (f, short(n), short(p1)))
177 177 if p2 not in nodes:
178 178 err(_("file %s:%s unknown parent 2 %s") %
179 179 (f, short(n), short(p1)))
180 180 nodes[n] = 1
181 181
182 182 # check renames
183 183 try:
184 184 rp = fl.renamed(n)
185 185 if rp:
186 186 fl2 = repo.file(rp[0])
187 187 rev = fl2.rev(rp[1])
188 188 except KeyboardInterrupt:
189 189 repo.ui.warn(_("interrupted"))
190 190 raise
191 191 except Exception, inst:
192 192 err(_("checking rename on file %s %s: %s") % (f, short(n), inst))
193 193
194 194 # cross-check
195 195 for node in filenodes[f]:
196 196 err(_("node %s in manifests not in %s") % (hex(node), f))
197 197
198 198 repo.ui.status(_("%d files, %d changesets, %d total revisions\n") %
199 199 (files, changesets, revisions))
200 200
201 201 if warnings[0]:
202 202 repo.ui.warn(_("%d warnings encountered!\n") % warnings[0])
203 203 if errors[0]:
204 204 repo.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
205 205 return 1
206 206
General Comments 0
You need to be logged in to leave comments. Login now