##// END OF EJS Templates
revlog: simplify revlog version handling...
Matt Mackall -
r4258:b11a2fb5 default
parent child Browse files
Show More
@@ -1,161 +1,161 b''
1 # appendfile.py - special classes to make repo updates atomic
1 # appendfile.py - special classes to make repo updates atomic
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import cStringIO, changelog, errno, manifest, os, tempfile, util
8 import cStringIO, changelog, errno, manifest, os, tempfile, util
9
9
10 # writes to metadata files are ordered. reads: changelog, manifest,
10 # writes to metadata files are ordered. reads: changelog, manifest,
11 # normal files. writes: normal files, manifest, changelog.
11 # normal files. writes: normal files, manifest, changelog.
12
12
13 # manifest contains pointers to offsets in normal files. changelog
13 # manifest contains pointers to offsets in normal files. changelog
14 # contains pointers to offsets in manifest. if reader reads old
14 # contains pointers to offsets in manifest. if reader reads old
15 # changelog while manifest or normal files are written, it has no
15 # changelog while manifest or normal files are written, it has no
16 # pointers into new parts of those files that are maybe not consistent
16 # pointers into new parts of those files that are maybe not consistent
17 # yet, so will not read them.
17 # yet, so will not read them.
18
18
19 # localrepo.addchangegroup thinks it writes changelog first, then
19 # localrepo.addchangegroup thinks it writes changelog first, then
20 # manifest, then normal files (this is order they are available, and
20 # manifest, then normal files (this is order they are available, and
21 # needed for computing linkrev fields), but uses appendfile to hide
21 # needed for computing linkrev fields), but uses appendfile to hide
22 # updates from readers. data not written to manifest or changelog
22 # updates from readers. data not written to manifest or changelog
23 # until all normal files updated. write manifest first, then
23 # until all normal files updated. write manifest first, then
24 # changelog.
24 # changelog.
25
25
26 # with this write ordering, readers cannot see inconsistent view of
26 # with this write ordering, readers cannot see inconsistent view of
27 # repo during update.
27 # repo during update.
28
28
29 class appendfile(object):
29 class appendfile(object):
30 '''implement enough of file protocol to append to revlog file.
30 '''implement enough of file protocol to append to revlog file.
31 appended data is written to temp file. reads and seeks span real
31 appended data is written to temp file. reads and seeks span real
32 file and temp file. readers cannot see appended data until
32 file and temp file. readers cannot see appended data until
33 writedata called.'''
33 writedata called.'''
34
34
35 def __init__(self, fp, tmpname):
35 def __init__(self, fp, tmpname):
36 if tmpname:
36 if tmpname:
37 self.tmpname = tmpname
37 self.tmpname = tmpname
38 self.tmpfp = util.posixfile(self.tmpname, 'ab+')
38 self.tmpfp = util.posixfile(self.tmpname, 'ab+')
39 else:
39 else:
40 fd, self.tmpname = tempfile.mkstemp(prefix="hg-appendfile-")
40 fd, self.tmpname = tempfile.mkstemp(prefix="hg-appendfile-")
41 os.close(fd)
41 os.close(fd)
42 self.tmpfp = util.posixfile(self.tmpname, 'ab+')
42 self.tmpfp = util.posixfile(self.tmpname, 'ab+')
43 self.realfp = fp
43 self.realfp = fp
44 self.offset = fp.tell()
44 self.offset = fp.tell()
45 # real file is not written by anyone else. cache its size so
45 # real file is not written by anyone else. cache its size so
46 # seek and read can be fast.
46 # seek and read can be fast.
47 self.realsize = util.fstat(fp).st_size
47 self.realsize = util.fstat(fp).st_size
48 self.name = fp.name
48 self.name = fp.name
49
49
50 def end(self):
50 def end(self):
51 self.tmpfp.flush() # make sure the stat is correct
51 self.tmpfp.flush() # make sure the stat is correct
52 return self.realsize + util.fstat(self.tmpfp).st_size
52 return self.realsize + util.fstat(self.tmpfp).st_size
53
53
54 def tell(self):
54 def tell(self):
55 return self.offset
55 return self.offset
56
56
57 def flush(self):
57 def flush(self):
58 self.tmpfp.flush()
58 self.tmpfp.flush()
59
59
60 def close(self):
60 def close(self):
61 self.realfp.close()
61 self.realfp.close()
62 self.tmpfp.close()
62 self.tmpfp.close()
63
63
64 def seek(self, offset, whence=0):
64 def seek(self, offset, whence=0):
65 '''virtual file offset spans real file and temp file.'''
65 '''virtual file offset spans real file and temp file.'''
66 if whence == 0:
66 if whence == 0:
67 self.offset = offset
67 self.offset = offset
68 elif whence == 1:
68 elif whence == 1:
69 self.offset += offset
69 self.offset += offset
70 elif whence == 2:
70 elif whence == 2:
71 self.offset = self.end() + offset
71 self.offset = self.end() + offset
72
72
73 if self.offset < self.realsize:
73 if self.offset < self.realsize:
74 self.realfp.seek(self.offset)
74 self.realfp.seek(self.offset)
75 else:
75 else:
76 self.tmpfp.seek(self.offset - self.realsize)
76 self.tmpfp.seek(self.offset - self.realsize)
77
77
78 def read(self, count=-1):
78 def read(self, count=-1):
79 '''only trick here is reads that span real file and temp file.'''
79 '''only trick here is reads that span real file and temp file.'''
80 fp = cStringIO.StringIO()
80 fp = cStringIO.StringIO()
81 old_offset = self.offset
81 old_offset = self.offset
82 if self.offset < self.realsize:
82 if self.offset < self.realsize:
83 s = self.realfp.read(count)
83 s = self.realfp.read(count)
84 fp.write(s)
84 fp.write(s)
85 self.offset += len(s)
85 self.offset += len(s)
86 if count > 0:
86 if count > 0:
87 count -= len(s)
87 count -= len(s)
88 if count != 0:
88 if count != 0:
89 if old_offset != self.offset:
89 if old_offset != self.offset:
90 self.tmpfp.seek(self.offset - self.realsize)
90 self.tmpfp.seek(self.offset - self.realsize)
91 s = self.tmpfp.read(count)
91 s = self.tmpfp.read(count)
92 fp.write(s)
92 fp.write(s)
93 self.offset += len(s)
93 self.offset += len(s)
94 return fp.getvalue()
94 return fp.getvalue()
95
95
96 def write(self, s):
96 def write(self, s):
97 '''append to temp file.'''
97 '''append to temp file.'''
98 self.tmpfp.seek(0, 2)
98 self.tmpfp.seek(0, 2)
99 self.tmpfp.write(s)
99 self.tmpfp.write(s)
100 # all writes are appends, so offset must go to end of file.
100 # all writes are appends, so offset must go to end of file.
101 self.offset = self.realsize + self.tmpfp.tell()
101 self.offset = self.realsize + self.tmpfp.tell()
102
102
103 class appendopener(object):
103 class appendopener(object):
104 '''special opener for files that only read or append.'''
104 '''special opener for files that only read or append.'''
105
105
106 def __init__(self, opener):
106 def __init__(self, opener):
107 self.realopener = opener
107 self.realopener = opener
108 # key: file name, value: appendfile name
108 # key: file name, value: appendfile name
109 self.tmpnames = {}
109 self.tmpnames = {}
110
110
111 def __call__(self, name, mode='r'):
111 def __call__(self, name, mode='r'):
112 '''open file.'''
112 '''open file.'''
113
113
114 assert mode in 'ra+'
114 assert mode in 'ra+'
115 try:
115 try:
116 realfp = self.realopener(name, 'r')
116 realfp = self.realopener(name, 'r')
117 except IOError, err:
117 except IOError, err:
118 if err.errno != errno.ENOENT: raise
118 if err.errno != errno.ENOENT: raise
119 realfp = self.realopener(name, 'w+')
119 realfp = self.realopener(name, 'w+')
120 tmpname = self.tmpnames.get(name)
120 tmpname = self.tmpnames.get(name)
121 fp = appendfile(realfp, tmpname)
121 fp = appendfile(realfp, tmpname)
122 if tmpname is None:
122 if tmpname is None:
123 self.tmpnames[name] = fp.tmpname
123 self.tmpnames[name] = fp.tmpname
124 return fp
124 return fp
125
125
126 def writedata(self):
126 def writedata(self):
127 '''copy data from temp files to real files.'''
127 '''copy data from temp files to real files.'''
128 # write .d file before .i file.
128 # write .d file before .i file.
129 tmpnames = self.tmpnames.items()
129 tmpnames = self.tmpnames.items()
130 tmpnames.sort()
130 tmpnames.sort()
131 for name, tmpname in tmpnames:
131 for name, tmpname in tmpnames:
132 ifp = open(tmpname, 'rb')
132 ifp = open(tmpname, 'rb')
133 ofp = self.realopener(name, 'a')
133 ofp = self.realopener(name, 'a')
134 for chunk in util.filechunkiter(ifp):
134 for chunk in util.filechunkiter(ifp):
135 ofp.write(chunk)
135 ofp.write(chunk)
136 ifp.close()
136 ifp.close()
137 os.unlink(tmpname)
137 os.unlink(tmpname)
138 del self.tmpnames[name]
138 del self.tmpnames[name]
139 ofp.close()
139 ofp.close()
140
140
141 def cleanup(self):
141 def cleanup(self):
142 '''delete temp files (this discards unwritten data!)'''
142 '''delete temp files (this discards unwritten data!)'''
143 for tmpname in self.tmpnames.values():
143 for tmpname in self.tmpnames.values():
144 os.unlink(tmpname)
144 os.unlink(tmpname)
145
145
146 # files for changelog and manifest are in different appendopeners, so
146 # files for changelog and manifest are in different appendopeners, so
147 # not mixed up together.
147 # not mixed up together.
148
148
149 class appendchangelog(changelog.changelog, appendopener):
149 class appendchangelog(changelog.changelog, appendopener):
150 def __init__(self, opener, version):
150 def __init__(self, opener):
151 appendopener.__init__(self, opener)
151 appendopener.__init__(self, opener)
152 changelog.changelog.__init__(self, self, version)
152 changelog.changelog.__init__(self, self)
153 def checkinlinesize(self, fp, tr):
153 def checkinlinesize(self, fp, tr):
154 return
154 return
155
155
156 class appendmanifest(manifest.manifest, appendopener):
156 class appendmanifest(manifest.manifest, appendopener):
157 def __init__(self, opener, version):
157 def __init__(self, opener):
158 appendopener.__init__(self, opener)
158 appendopener.__init__(self, opener)
159 manifest.manifest.__init__(self, self, version)
159 manifest.manifest.__init__(self, self)
160 def checkinlinesize(self, fp, tr):
160 def checkinlinesize(self, fp, tr):
161 return
161 return
@@ -1,105 +1,105 b''
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from revlog import *
8 from revlog import *
9 from i18n import _
9 from i18n import _
10 import os, time, util
10 import os, time, util
11
11
12 def _string_escape(text):
12 def _string_escape(text):
13 """
13 """
14 >>> d = {'nl': chr(10), 'bs': chr(92), 'cr': chr(13), 'nul': chr(0)}
14 >>> d = {'nl': chr(10), 'bs': chr(92), 'cr': chr(13), 'nul': chr(0)}
15 >>> s = "ab%(nl)scd%(bs)s%(bs)sn%(nul)sab%(cr)scd%(bs)s%(nl)s" % d
15 >>> s = "ab%(nl)scd%(bs)s%(bs)sn%(nul)sab%(cr)scd%(bs)s%(nl)s" % d
16 >>> s
16 >>> s
17 'ab\\ncd\\\\\\\\n\\x00ab\\rcd\\\\\\n'
17 'ab\\ncd\\\\\\\\n\\x00ab\\rcd\\\\\\n'
18 >>> res = _string_escape(s)
18 >>> res = _string_escape(s)
19 >>> s == _string_unescape(res)
19 >>> s == _string_unescape(res)
20 True
20 True
21 """
21 """
22 # subset of the string_escape codec
22 # subset of the string_escape codec
23 text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r')
23 text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r')
24 return text.replace('\0', '\\0')
24 return text.replace('\0', '\\0')
25
25
26 def _string_unescape(text):
26 def _string_unescape(text):
27 return text.decode('string_escape')
27 return text.decode('string_escape')
28
28
29 class changelog(revlog):
29 class changelog(revlog):
30 def __init__(self, opener, defversion=REVLOGV0):
30 def __init__(self, opener):
31 revlog.__init__(self, opener, "00changelog.i", defversion)
31 revlog.__init__(self, opener, "00changelog.i")
32
32
33 def decode_extra(self, text):
33 def decode_extra(self, text):
34 extra = {}
34 extra = {}
35 for l in text.split('\0'):
35 for l in text.split('\0'):
36 if not l:
36 if not l:
37 continue
37 continue
38 k, v = _string_unescape(l).split(':', 1)
38 k, v = _string_unescape(l).split(':', 1)
39 extra[k] = v
39 extra[k] = v
40 return extra
40 return extra
41
41
42 def encode_extra(self, d):
42 def encode_extra(self, d):
43 items = [_string_escape(":".join(t)) for t in d.iteritems()]
43 items = [_string_escape(":".join(t)) for t in d.iteritems()]
44 return "\0".join(items)
44 return "\0".join(items)
45
45
46 def extract(self, text):
46 def extract(self, text):
47 """
47 """
48 format used:
48 format used:
49 nodeid\n : manifest node in ascii
49 nodeid\n : manifest node in ascii
50 user\n : user, no \n or \r allowed
50 user\n : user, no \n or \r allowed
51 time tz extra\n : date (time is int or float, timezone is int)
51 time tz extra\n : date (time is int or float, timezone is int)
52 : extra is metadatas, encoded and separated by '\0'
52 : extra is metadatas, encoded and separated by '\0'
53 : older versions ignore it
53 : older versions ignore it
54 files\n\n : files modified by the cset, no \n or \r allowed
54 files\n\n : files modified by the cset, no \n or \r allowed
55 (.*) : comment (free text, ideally utf-8)
55 (.*) : comment (free text, ideally utf-8)
56
56
57 changelog v0 doesn't use extra
57 changelog v0 doesn't use extra
58 """
58 """
59 if not text:
59 if not text:
60 return (nullid, "", (0, 0), [], "", {'branch': 'default'})
60 return (nullid, "", (0, 0), [], "", {'branch': 'default'})
61 last = text.index("\n\n")
61 last = text.index("\n\n")
62 desc = util.tolocal(text[last + 2:])
62 desc = util.tolocal(text[last + 2:])
63 l = text[:last].split('\n')
63 l = text[:last].split('\n')
64 manifest = bin(l[0])
64 manifest = bin(l[0])
65 user = util.tolocal(l[1])
65 user = util.tolocal(l[1])
66
66
67 extra_data = l[2].split(' ', 2)
67 extra_data = l[2].split(' ', 2)
68 if len(extra_data) != 3:
68 if len(extra_data) != 3:
69 time = float(extra_data.pop(0))
69 time = float(extra_data.pop(0))
70 try:
70 try:
71 # various tools did silly things with the time zone field.
71 # various tools did silly things with the time zone field.
72 timezone = int(extra_data[0])
72 timezone = int(extra_data[0])
73 except:
73 except:
74 timezone = 0
74 timezone = 0
75 extra = {}
75 extra = {}
76 else:
76 else:
77 time, timezone, extra = extra_data
77 time, timezone, extra = extra_data
78 time, timezone = float(time), int(timezone)
78 time, timezone = float(time), int(timezone)
79 extra = self.decode_extra(extra)
79 extra = self.decode_extra(extra)
80 if not extra.get('branch'):
80 if not extra.get('branch'):
81 extra['branch'] = 'default'
81 extra['branch'] = 'default'
82 files = l[3:]
82 files = l[3:]
83 return (manifest, user, (time, timezone), files, desc, extra)
83 return (manifest, user, (time, timezone), files, desc, extra)
84
84
85 def read(self, node):
85 def read(self, node):
86 return self.extract(self.revision(node))
86 return self.extract(self.revision(node))
87
87
88 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
88 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
89 user=None, date=None, extra={}):
89 user=None, date=None, extra={}):
90
90
91 user, desc = util.fromlocal(user), util.fromlocal(desc)
91 user, desc = util.fromlocal(user), util.fromlocal(desc)
92
92
93 if date:
93 if date:
94 parseddate = "%d %d" % util.parsedate(date)
94 parseddate = "%d %d" % util.parsedate(date)
95 else:
95 else:
96 parseddate = "%d %d" % util.makedate()
96 parseddate = "%d %d" % util.makedate()
97 if extra and extra.get("branch") in ("default", ""):
97 if extra and extra.get("branch") in ("default", ""):
98 del extra["branch"]
98 del extra["branch"]
99 if extra:
99 if extra:
100 extra = self.encode_extra(extra)
100 extra = self.encode_extra(extra)
101 parseddate = "%s %s" % (parseddate, extra)
101 parseddate = "%s %s" % (parseddate, extra)
102 list.sort()
102 list.sort()
103 l = [hex(manifest), user, parseddate] + list + ["", desc]
103 l = [hex(manifest), user, parseddate] + list + ["", desc]
104 text = "\n".join(l)
104 text = "\n".join(l)
105 return self.addrevision(text, transaction, self.count(), p1, p2)
105 return self.addrevision(text, transaction, self.count(), p1, p2)
@@ -1,3342 +1,3341 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import demandimport; demandimport.enable()
8 import demandimport; demandimport.enable()
9 from node import *
9 from node import *
10 from i18n import _
10 from i18n import _
11 import bisect, os, re, sys, signal, imp, urllib, pdb, shlex, stat
11 import bisect, os, re, sys, signal, imp, urllib, pdb, shlex, stat
12 import fancyopts, ui, hg, util, lock, revlog, bundlerepo
12 import fancyopts, ui, hg, util, lock, revlog, bundlerepo
13 import difflib, patch, time, help, mdiff, tempfile
13 import difflib, patch, time, help, mdiff, tempfile
14 import traceback, errno, version, atexit, socket
14 import traceback, errno, version, atexit, socket
15 import archival, changegroup, cmdutil, hgweb.server, sshserver
15 import archival, changegroup, cmdutil, hgweb.server, sshserver
16
16
17 class UnknownCommand(Exception):
17 class UnknownCommand(Exception):
18 """Exception raised if command is not in the command table."""
18 """Exception raised if command is not in the command table."""
19 class AmbiguousCommand(Exception):
19 class AmbiguousCommand(Exception):
20 """Exception raised if command shortcut matches more than one command."""
20 """Exception raised if command shortcut matches more than one command."""
21
21
22 def bail_if_changed(repo):
22 def bail_if_changed(repo):
23 modified, added, removed, deleted = repo.status()[:4]
23 modified, added, removed, deleted = repo.status()[:4]
24 if modified or added or removed or deleted:
24 if modified or added or removed or deleted:
25 raise util.Abort(_("outstanding uncommitted changes"))
25 raise util.Abort(_("outstanding uncommitted changes"))
26
26
27 def logmessage(opts):
27 def logmessage(opts):
28 """ get the log message according to -m and -l option """
28 """ get the log message according to -m and -l option """
29 message = opts['message']
29 message = opts['message']
30 logfile = opts['logfile']
30 logfile = opts['logfile']
31
31
32 if message and logfile:
32 if message and logfile:
33 raise util.Abort(_('options --message and --logfile are mutually '
33 raise util.Abort(_('options --message and --logfile are mutually '
34 'exclusive'))
34 'exclusive'))
35 if not message and logfile:
35 if not message and logfile:
36 try:
36 try:
37 if logfile == '-':
37 if logfile == '-':
38 message = sys.stdin.read()
38 message = sys.stdin.read()
39 else:
39 else:
40 message = open(logfile).read()
40 message = open(logfile).read()
41 except IOError, inst:
41 except IOError, inst:
42 raise util.Abort(_("can't read commit message '%s': %s") %
42 raise util.Abort(_("can't read commit message '%s': %s") %
43 (logfile, inst.strerror))
43 (logfile, inst.strerror))
44 return message
44 return message
45
45
46 def setremoteconfig(ui, opts):
46 def setremoteconfig(ui, opts):
47 "copy remote options to ui tree"
47 "copy remote options to ui tree"
48 if opts.get('ssh'):
48 if opts.get('ssh'):
49 ui.setconfig("ui", "ssh", opts['ssh'])
49 ui.setconfig("ui", "ssh", opts['ssh'])
50 if opts.get('remotecmd'):
50 if opts.get('remotecmd'):
51 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
51 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
52
52
53 # Commands start here, listed alphabetically
53 # Commands start here, listed alphabetically
54
54
55 def add(ui, repo, *pats, **opts):
55 def add(ui, repo, *pats, **opts):
56 """add the specified files on the next commit
56 """add the specified files on the next commit
57
57
58 Schedule files to be version controlled and added to the repository.
58 Schedule files to be version controlled and added to the repository.
59
59
60 The files will be added to the repository at the next commit. To
60 The files will be added to the repository at the next commit. To
61 undo an add before that, see hg revert.
61 undo an add before that, see hg revert.
62
62
63 If no names are given, add all files in the repository.
63 If no names are given, add all files in the repository.
64 """
64 """
65
65
66 names = []
66 names = []
67 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
67 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
68 if exact:
68 if exact:
69 if ui.verbose:
69 if ui.verbose:
70 ui.status(_('adding %s\n') % rel)
70 ui.status(_('adding %s\n') % rel)
71 names.append(abs)
71 names.append(abs)
72 elif repo.dirstate.state(abs) == '?':
72 elif repo.dirstate.state(abs) == '?':
73 ui.status(_('adding %s\n') % rel)
73 ui.status(_('adding %s\n') % rel)
74 names.append(abs)
74 names.append(abs)
75 if not opts.get('dry_run'):
75 if not opts.get('dry_run'):
76 repo.add(names)
76 repo.add(names)
77
77
78 def addremove(ui, repo, *pats, **opts):
78 def addremove(ui, repo, *pats, **opts):
79 """add all new files, delete all missing files
79 """add all new files, delete all missing files
80
80
81 Add all new files and remove all missing files from the repository.
81 Add all new files and remove all missing files from the repository.
82
82
83 New files are ignored if they match any of the patterns in .hgignore. As
83 New files are ignored if they match any of the patterns in .hgignore. As
84 with add, these changes take effect at the next commit.
84 with add, these changes take effect at the next commit.
85
85
86 Use the -s option to detect renamed files. With a parameter > 0,
86 Use the -s option to detect renamed files. With a parameter > 0,
87 this compares every removed file with every added file and records
87 this compares every removed file with every added file and records
88 those similar enough as renames. This option takes a percentage
88 those similar enough as renames. This option takes a percentage
89 between 0 (disabled) and 100 (files must be identical) as its
89 between 0 (disabled) and 100 (files must be identical) as its
90 parameter. Detecting renamed files this way can be expensive.
90 parameter. Detecting renamed files this way can be expensive.
91 """
91 """
92 sim = float(opts.get('similarity') or 0)
92 sim = float(opts.get('similarity') or 0)
93 if sim < 0 or sim > 100:
93 if sim < 0 or sim > 100:
94 raise util.Abort(_('similarity must be between 0 and 100'))
94 raise util.Abort(_('similarity must be between 0 and 100'))
95 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
95 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
96
96
97 def annotate(ui, repo, *pats, **opts):
97 def annotate(ui, repo, *pats, **opts):
98 """show changeset information per file line
98 """show changeset information per file line
99
99
100 List changes in files, showing the revision id responsible for each line
100 List changes in files, showing the revision id responsible for each line
101
101
102 This command is useful to discover who did a change or when a change took
102 This command is useful to discover who did a change or when a change took
103 place.
103 place.
104
104
105 Without the -a option, annotate will avoid processing files it
105 Without the -a option, annotate will avoid processing files it
106 detects as binary. With -a, annotate will generate an annotation
106 detects as binary. With -a, annotate will generate an annotation
107 anyway, probably with undesirable results.
107 anyway, probably with undesirable results.
108 """
108 """
109 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
109 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
110
110
111 if not pats:
111 if not pats:
112 raise util.Abort(_('at least one file name or pattern required'))
112 raise util.Abort(_('at least one file name or pattern required'))
113
113
114 opmap = [['user', lambda x: ui.shortuser(x.user())],
114 opmap = [['user', lambda x: ui.shortuser(x.user())],
115 ['number', lambda x: str(x.rev())],
115 ['number', lambda x: str(x.rev())],
116 ['changeset', lambda x: short(x.node())],
116 ['changeset', lambda x: short(x.node())],
117 ['date', getdate], ['follow', lambda x: x.path()]]
117 ['date', getdate], ['follow', lambda x: x.path()]]
118 if (not opts['user'] and not opts['changeset'] and not opts['date']
118 if (not opts['user'] and not opts['changeset'] and not opts['date']
119 and not opts['follow']):
119 and not opts['follow']):
120 opts['number'] = 1
120 opts['number'] = 1
121
121
122 ctx = repo.changectx(opts['rev'])
122 ctx = repo.changectx(opts['rev'])
123
123
124 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
124 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
125 node=ctx.node()):
125 node=ctx.node()):
126 fctx = ctx.filectx(abs)
126 fctx = ctx.filectx(abs)
127 if not opts['text'] and util.binary(fctx.data()):
127 if not opts['text'] and util.binary(fctx.data()):
128 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
128 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
129 continue
129 continue
130
130
131 lines = fctx.annotate(follow=opts.get('follow'))
131 lines = fctx.annotate(follow=opts.get('follow'))
132 pieces = []
132 pieces = []
133
133
134 for o, f in opmap:
134 for o, f in opmap:
135 if opts[o]:
135 if opts[o]:
136 l = [f(n) for n, dummy in lines]
136 l = [f(n) for n, dummy in lines]
137 if l:
137 if l:
138 m = max(map(len, l))
138 m = max(map(len, l))
139 pieces.append(["%*s" % (m, x) for x in l])
139 pieces.append(["%*s" % (m, x) for x in l])
140
140
141 if pieces:
141 if pieces:
142 for p, l in zip(zip(*pieces), lines):
142 for p, l in zip(zip(*pieces), lines):
143 ui.write("%s: %s" % (" ".join(p), l[1]))
143 ui.write("%s: %s" % (" ".join(p), l[1]))
144
144
145 def archive(ui, repo, dest, **opts):
145 def archive(ui, repo, dest, **opts):
146 '''create unversioned archive of a repository revision
146 '''create unversioned archive of a repository revision
147
147
148 By default, the revision used is the parent of the working
148 By default, the revision used is the parent of the working
149 directory; use "-r" to specify a different revision.
149 directory; use "-r" to specify a different revision.
150
150
151 To specify the type of archive to create, use "-t". Valid
151 To specify the type of archive to create, use "-t". Valid
152 types are:
152 types are:
153
153
154 "files" (default): a directory full of files
154 "files" (default): a directory full of files
155 "tar": tar archive, uncompressed
155 "tar": tar archive, uncompressed
156 "tbz2": tar archive, compressed using bzip2
156 "tbz2": tar archive, compressed using bzip2
157 "tgz": tar archive, compressed using gzip
157 "tgz": tar archive, compressed using gzip
158 "uzip": zip archive, uncompressed
158 "uzip": zip archive, uncompressed
159 "zip": zip archive, compressed using deflate
159 "zip": zip archive, compressed using deflate
160
160
161 The exact name of the destination archive or directory is given
161 The exact name of the destination archive or directory is given
162 using a format string; see "hg help export" for details.
162 using a format string; see "hg help export" for details.
163
163
164 Each member added to an archive file has a directory prefix
164 Each member added to an archive file has a directory prefix
165 prepended. Use "-p" to specify a format string for the prefix.
165 prepended. Use "-p" to specify a format string for the prefix.
166 The default is the basename of the archive, with suffixes removed.
166 The default is the basename of the archive, with suffixes removed.
167 '''
167 '''
168
168
169 node = repo.changectx(opts['rev']).node()
169 node = repo.changectx(opts['rev']).node()
170 dest = cmdutil.make_filename(repo, dest, node)
170 dest = cmdutil.make_filename(repo, dest, node)
171 if os.path.realpath(dest) == repo.root:
171 if os.path.realpath(dest) == repo.root:
172 raise util.Abort(_('repository root cannot be destination'))
172 raise util.Abort(_('repository root cannot be destination'))
173 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
173 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
174 kind = opts.get('type') or 'files'
174 kind = opts.get('type') or 'files'
175 prefix = opts['prefix']
175 prefix = opts['prefix']
176 if dest == '-':
176 if dest == '-':
177 if kind == 'files':
177 if kind == 'files':
178 raise util.Abort(_('cannot archive plain files to stdout'))
178 raise util.Abort(_('cannot archive plain files to stdout'))
179 dest = sys.stdout
179 dest = sys.stdout
180 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
180 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
181 prefix = cmdutil.make_filename(repo, prefix, node)
181 prefix = cmdutil.make_filename(repo, prefix, node)
182 archival.archive(repo, dest, node, kind, not opts['no_decode'],
182 archival.archive(repo, dest, node, kind, not opts['no_decode'],
183 matchfn, prefix)
183 matchfn, prefix)
184
184
185 def backout(ui, repo, rev, **opts):
185 def backout(ui, repo, rev, **opts):
186 '''reverse effect of earlier changeset
186 '''reverse effect of earlier changeset
187
187
188 Commit the backed out changes as a new changeset. The new
188 Commit the backed out changes as a new changeset. The new
189 changeset is a child of the backed out changeset.
189 changeset is a child of the backed out changeset.
190
190
191 If you back out a changeset other than the tip, a new head is
191 If you back out a changeset other than the tip, a new head is
192 created. This head is the parent of the working directory. If
192 created. This head is the parent of the working directory. If
193 you back out an old changeset, your working directory will appear
193 you back out an old changeset, your working directory will appear
194 old after the backout. You should merge the backout changeset
194 old after the backout. You should merge the backout changeset
195 with another head.
195 with another head.
196
196
197 The --merge option remembers the parent of the working directory
197 The --merge option remembers the parent of the working directory
198 before starting the backout, then merges the new head with that
198 before starting the backout, then merges the new head with that
199 changeset afterwards. This saves you from doing the merge by
199 changeset afterwards. This saves you from doing the merge by
200 hand. The result of this merge is not committed, as for a normal
200 hand. The result of this merge is not committed, as for a normal
201 merge.'''
201 merge.'''
202
202
203 bail_if_changed(repo)
203 bail_if_changed(repo)
204 op1, op2 = repo.dirstate.parents()
204 op1, op2 = repo.dirstate.parents()
205 if op2 != nullid:
205 if op2 != nullid:
206 raise util.Abort(_('outstanding uncommitted merge'))
206 raise util.Abort(_('outstanding uncommitted merge'))
207 node = repo.lookup(rev)
207 node = repo.lookup(rev)
208 p1, p2 = repo.changelog.parents(node)
208 p1, p2 = repo.changelog.parents(node)
209 if p1 == nullid:
209 if p1 == nullid:
210 raise util.Abort(_('cannot back out a change with no parents'))
210 raise util.Abort(_('cannot back out a change with no parents'))
211 if p2 != nullid:
211 if p2 != nullid:
212 if not opts['parent']:
212 if not opts['parent']:
213 raise util.Abort(_('cannot back out a merge changeset without '
213 raise util.Abort(_('cannot back out a merge changeset without '
214 '--parent'))
214 '--parent'))
215 p = repo.lookup(opts['parent'])
215 p = repo.lookup(opts['parent'])
216 if p not in (p1, p2):
216 if p not in (p1, p2):
217 raise util.Abort(_('%s is not a parent of %s') %
217 raise util.Abort(_('%s is not a parent of %s') %
218 (short(p), short(node)))
218 (short(p), short(node)))
219 parent = p
219 parent = p
220 else:
220 else:
221 if opts['parent']:
221 if opts['parent']:
222 raise util.Abort(_('cannot use --parent on non-merge changeset'))
222 raise util.Abort(_('cannot use --parent on non-merge changeset'))
223 parent = p1
223 parent = p1
224 hg.clean(repo, node, show_stats=False)
224 hg.clean(repo, node, show_stats=False)
225 revert_opts = opts.copy()
225 revert_opts = opts.copy()
226 revert_opts['date'] = None
226 revert_opts['date'] = None
227 revert_opts['all'] = True
227 revert_opts['all'] = True
228 revert_opts['rev'] = hex(parent)
228 revert_opts['rev'] = hex(parent)
229 revert(ui, repo, **revert_opts)
229 revert(ui, repo, **revert_opts)
230 commit_opts = opts.copy()
230 commit_opts = opts.copy()
231 commit_opts['addremove'] = False
231 commit_opts['addremove'] = False
232 if not commit_opts['message'] and not commit_opts['logfile']:
232 if not commit_opts['message'] and not commit_opts['logfile']:
233 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
233 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
234 commit_opts['force_editor'] = True
234 commit_opts['force_editor'] = True
235 commit(ui, repo, **commit_opts)
235 commit(ui, repo, **commit_opts)
236 def nice(node):
236 def nice(node):
237 return '%d:%s' % (repo.changelog.rev(node), short(node))
237 return '%d:%s' % (repo.changelog.rev(node), short(node))
238 ui.status(_('changeset %s backs out changeset %s\n') %
238 ui.status(_('changeset %s backs out changeset %s\n') %
239 (nice(repo.changelog.tip()), nice(node)))
239 (nice(repo.changelog.tip()), nice(node)))
240 if op1 != node:
240 if op1 != node:
241 if opts['merge']:
241 if opts['merge']:
242 ui.status(_('merging with changeset %s\n') % nice(op1))
242 ui.status(_('merging with changeset %s\n') % nice(op1))
243 hg.merge(repo, hex(op1))
243 hg.merge(repo, hex(op1))
244 else:
244 else:
245 ui.status(_('the backout changeset is a new head - '
245 ui.status(_('the backout changeset is a new head - '
246 'do not forget to merge\n'))
246 'do not forget to merge\n'))
247 ui.status(_('(use "backout --merge" '
247 ui.status(_('(use "backout --merge" '
248 'if you want to auto-merge)\n'))
248 'if you want to auto-merge)\n'))
249
249
250 def branch(ui, repo, label=None, **opts):
250 def branch(ui, repo, label=None, **opts):
251 """set or show the current branch name
251 """set or show the current branch name
252
252
253 With <name>, set the current branch name. Otherwise, show the
253 With <name>, set the current branch name. Otherwise, show the
254 current branch name.
254 current branch name.
255
255
256 Unless --force is specified, branch will not let you set a
256 Unless --force is specified, branch will not let you set a
257 branch name that shadows an existing branch.
257 branch name that shadows an existing branch.
258 """
258 """
259
259
260 if label:
260 if label:
261 if not opts.get('force') and label in repo.branchtags():
261 if not opts.get('force') and label in repo.branchtags():
262 if label not in [p.branch() for p in repo.workingctx().parents()]:
262 if label not in [p.branch() for p in repo.workingctx().parents()]:
263 raise util.Abort(_('a branch of the same name already exists'
263 raise util.Abort(_('a branch of the same name already exists'
264 ' (use --force to override)'))
264 ' (use --force to override)'))
265 repo.dirstate.setbranch(util.fromlocal(label))
265 repo.dirstate.setbranch(util.fromlocal(label))
266 else:
266 else:
267 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
267 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
268
268
269 def branches(ui, repo):
269 def branches(ui, repo):
270 """list repository named branches
270 """list repository named branches
271
271
272 List the repository's named branches.
272 List the repository's named branches.
273 """
273 """
274 b = repo.branchtags()
274 b = repo.branchtags()
275 l = [(-repo.changelog.rev(n), n, t) for t, n in b.items()]
275 l = [(-repo.changelog.rev(n), n, t) for t, n in b.items()]
276 l.sort()
276 l.sort()
277 for r, n, t in l:
277 for r, n, t in l:
278 hexfunc = ui.debugflag and hex or short
278 hexfunc = ui.debugflag and hex or short
279 if ui.quiet:
279 if ui.quiet:
280 ui.write("%s\n" % t)
280 ui.write("%s\n" % t)
281 else:
281 else:
282 spaces = " " * (30 - util.locallen(t))
282 spaces = " " * (30 - util.locallen(t))
283 ui.write("%s%s %s:%s\n" % (t, spaces, -r, hexfunc(n)))
283 ui.write("%s%s %s:%s\n" % (t, spaces, -r, hexfunc(n)))
284
284
285 def bundle(ui, repo, fname, dest=None, **opts):
285 def bundle(ui, repo, fname, dest=None, **opts):
286 """create a changegroup file
286 """create a changegroup file
287
287
288 Generate a compressed changegroup file collecting changesets not
288 Generate a compressed changegroup file collecting changesets not
289 found in the other repository.
289 found in the other repository.
290
290
291 If no destination repository is specified the destination is assumed
291 If no destination repository is specified the destination is assumed
292 to have all the nodes specified by one or more --base parameters.
292 to have all the nodes specified by one or more --base parameters.
293
293
294 The bundle file can then be transferred using conventional means and
294 The bundle file can then be transferred using conventional means and
295 applied to another repository with the unbundle or pull command.
295 applied to another repository with the unbundle or pull command.
296 This is useful when direct push and pull are not available or when
296 This is useful when direct push and pull are not available or when
297 exporting an entire repository is undesirable.
297 exporting an entire repository is undesirable.
298
298
299 Applying bundles preserves all changeset contents including
299 Applying bundles preserves all changeset contents including
300 permissions, copy/rename information, and revision history.
300 permissions, copy/rename information, and revision history.
301 """
301 """
302 revs = opts.get('rev') or None
302 revs = opts.get('rev') or None
303 if revs:
303 if revs:
304 revs = [repo.lookup(rev) for rev in revs]
304 revs = [repo.lookup(rev) for rev in revs]
305 base = opts.get('base')
305 base = opts.get('base')
306 if base:
306 if base:
307 if dest:
307 if dest:
308 raise util.Abort(_("--base is incompatible with specifiying "
308 raise util.Abort(_("--base is incompatible with specifiying "
309 "a destination"))
309 "a destination"))
310 base = [repo.lookup(rev) for rev in base]
310 base = [repo.lookup(rev) for rev in base]
311 # create the right base
311 # create the right base
312 # XXX: nodesbetween / changegroup* should be "fixed" instead
312 # XXX: nodesbetween / changegroup* should be "fixed" instead
313 o = []
313 o = []
314 has = {nullid: None}
314 has = {nullid: None}
315 for n in base:
315 for n in base:
316 has.update(repo.changelog.reachable(n))
316 has.update(repo.changelog.reachable(n))
317 if revs:
317 if revs:
318 visit = list(revs)
318 visit = list(revs)
319 else:
319 else:
320 visit = repo.changelog.heads()
320 visit = repo.changelog.heads()
321 seen = {}
321 seen = {}
322 while visit:
322 while visit:
323 n = visit.pop(0)
323 n = visit.pop(0)
324 parents = [p for p in repo.changelog.parents(n) if p not in has]
324 parents = [p for p in repo.changelog.parents(n) if p not in has]
325 if len(parents) == 0:
325 if len(parents) == 0:
326 o.insert(0, n)
326 o.insert(0, n)
327 else:
327 else:
328 for p in parents:
328 for p in parents:
329 if p not in seen:
329 if p not in seen:
330 seen[p] = 1
330 seen[p] = 1
331 visit.append(p)
331 visit.append(p)
332 else:
332 else:
333 setremoteconfig(ui, opts)
333 setremoteconfig(ui, opts)
334 dest = ui.expandpath(dest or 'default-push', dest or 'default')
334 dest = ui.expandpath(dest or 'default-push', dest or 'default')
335 other = hg.repository(ui, dest)
335 other = hg.repository(ui, dest)
336 o = repo.findoutgoing(other, force=opts['force'])
336 o = repo.findoutgoing(other, force=opts['force'])
337
337
338 if revs:
338 if revs:
339 cg = repo.changegroupsubset(o, revs, 'bundle')
339 cg = repo.changegroupsubset(o, revs, 'bundle')
340 else:
340 else:
341 cg = repo.changegroup(o, 'bundle')
341 cg = repo.changegroup(o, 'bundle')
342 changegroup.writebundle(cg, fname, "HG10BZ")
342 changegroup.writebundle(cg, fname, "HG10BZ")
343
343
344 def cat(ui, repo, file1, *pats, **opts):
344 def cat(ui, repo, file1, *pats, **opts):
345 """output the current or given revision of files
345 """output the current or given revision of files
346
346
347 Print the specified files as they were at the given revision.
347 Print the specified files as they were at the given revision.
348 If no revision is given, the parent of the working directory is used,
348 If no revision is given, the parent of the working directory is used,
349 or tip if no revision is checked out.
349 or tip if no revision is checked out.
350
350
351 Output may be to a file, in which case the name of the file is
351 Output may be to a file, in which case the name of the file is
352 given using a format string. The formatting rules are the same as
352 given using a format string. The formatting rules are the same as
353 for the export command, with the following additions:
353 for the export command, with the following additions:
354
354
355 %s basename of file being printed
355 %s basename of file being printed
356 %d dirname of file being printed, or '.' if in repo root
356 %d dirname of file being printed, or '.' if in repo root
357 %p root-relative path name of file being printed
357 %p root-relative path name of file being printed
358 """
358 """
359 ctx = repo.changectx(opts['rev'])
359 ctx = repo.changectx(opts['rev'])
360 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
360 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
361 ctx.node()):
361 ctx.node()):
362 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
362 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
363 fp.write(ctx.filectx(abs).data())
363 fp.write(ctx.filectx(abs).data())
364
364
365 def clone(ui, source, dest=None, **opts):
365 def clone(ui, source, dest=None, **opts):
366 """make a copy of an existing repository
366 """make a copy of an existing repository
367
367
368 Create a copy of an existing repository in a new directory.
368 Create a copy of an existing repository in a new directory.
369
369
370 If no destination directory name is specified, it defaults to the
370 If no destination directory name is specified, it defaults to the
371 basename of the source.
371 basename of the source.
372
372
373 The location of the source is added to the new repository's
373 The location of the source is added to the new repository's
374 .hg/hgrc file, as the default to be used for future pulls.
374 .hg/hgrc file, as the default to be used for future pulls.
375
375
376 For efficiency, hardlinks are used for cloning whenever the source
376 For efficiency, hardlinks are used for cloning whenever the source
377 and destination are on the same filesystem (note this applies only
377 and destination are on the same filesystem (note this applies only
378 to the repository data, not to the checked out files). Some
378 to the repository data, not to the checked out files). Some
379 filesystems, such as AFS, implement hardlinking incorrectly, but
379 filesystems, such as AFS, implement hardlinking incorrectly, but
380 do not report errors. In these cases, use the --pull option to
380 do not report errors. In these cases, use the --pull option to
381 avoid hardlinking.
381 avoid hardlinking.
382
382
383 You can safely clone repositories and checked out files using full
383 You can safely clone repositories and checked out files using full
384 hardlinks with
384 hardlinks with
385
385
386 $ cp -al REPO REPOCLONE
386 $ cp -al REPO REPOCLONE
387
387
388 which is the fastest way to clone. However, the operation is not
388 which is the fastest way to clone. However, the operation is not
389 atomic (making sure REPO is not modified during the operation is
389 atomic (making sure REPO is not modified during the operation is
390 up to you) and you have to make sure your editor breaks hardlinks
390 up to you) and you have to make sure your editor breaks hardlinks
391 (Emacs and most Linux Kernel tools do so).
391 (Emacs and most Linux Kernel tools do so).
392
392
393 If you use the -r option to clone up to a specific revision, no
393 If you use the -r option to clone up to a specific revision, no
394 subsequent revisions will be present in the cloned repository.
394 subsequent revisions will be present in the cloned repository.
395 This option implies --pull, even on local repositories.
395 This option implies --pull, even on local repositories.
396
396
397 See pull for valid source format details.
397 See pull for valid source format details.
398
398
399 It is possible to specify an ssh:// URL as the destination, but no
399 It is possible to specify an ssh:// URL as the destination, but no
400 .hg/hgrc and working directory will be created on the remote side.
400 .hg/hgrc and working directory will be created on the remote side.
401 Look at the help text for the pull command for important details
401 Look at the help text for the pull command for important details
402 about ssh:// URLs.
402 about ssh:// URLs.
403 """
403 """
404 setremoteconfig(ui, opts)
404 setremoteconfig(ui, opts)
405 hg.clone(ui, ui.expandpath(source), dest,
405 hg.clone(ui, ui.expandpath(source), dest,
406 pull=opts['pull'],
406 pull=opts['pull'],
407 stream=opts['uncompressed'],
407 stream=opts['uncompressed'],
408 rev=opts['rev'],
408 rev=opts['rev'],
409 update=not opts['noupdate'])
409 update=not opts['noupdate'])
410
410
411 def commit(ui, repo, *pats, **opts):
411 def commit(ui, repo, *pats, **opts):
412 """commit the specified files or all outstanding changes
412 """commit the specified files or all outstanding changes
413
413
414 Commit changes to the given files into the repository.
414 Commit changes to the given files into the repository.
415
415
416 If a list of files is omitted, all changes reported by "hg status"
416 If a list of files is omitted, all changes reported by "hg status"
417 will be committed.
417 will be committed.
418
418
419 If no commit message is specified, the editor configured in your hgrc
419 If no commit message is specified, the editor configured in your hgrc
420 or in the EDITOR environment variable is started to enter a message.
420 or in the EDITOR environment variable is started to enter a message.
421 """
421 """
422 message = logmessage(opts)
422 message = logmessage(opts)
423
423
424 if opts['addremove']:
424 if opts['addremove']:
425 cmdutil.addremove(repo, pats, opts)
425 cmdutil.addremove(repo, pats, opts)
426 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
426 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
427 if pats:
427 if pats:
428 status = repo.status(files=fns, match=match)
428 status = repo.status(files=fns, match=match)
429 modified, added, removed, deleted, unknown = status[:5]
429 modified, added, removed, deleted, unknown = status[:5]
430 files = modified + added + removed
430 files = modified + added + removed
431 slist = None
431 slist = None
432 for f in fns:
432 for f in fns:
433 if f == '.':
433 if f == '.':
434 continue
434 continue
435 if f not in files:
435 if f not in files:
436 rf = repo.wjoin(f)
436 rf = repo.wjoin(f)
437 if f in unknown:
437 if f in unknown:
438 raise util.Abort(_("file %s not tracked!") % rf)
438 raise util.Abort(_("file %s not tracked!") % rf)
439 try:
439 try:
440 mode = os.lstat(rf)[stat.ST_MODE]
440 mode = os.lstat(rf)[stat.ST_MODE]
441 except OSError:
441 except OSError:
442 raise util.Abort(_("file %s not found!") % rf)
442 raise util.Abort(_("file %s not found!") % rf)
443 if stat.S_ISDIR(mode):
443 if stat.S_ISDIR(mode):
444 name = f + '/'
444 name = f + '/'
445 if slist is None:
445 if slist is None:
446 slist = list(files)
446 slist = list(files)
447 slist.sort()
447 slist.sort()
448 i = bisect.bisect(slist, name)
448 i = bisect.bisect(slist, name)
449 if i >= len(slist) or not slist[i].startswith(name):
449 if i >= len(slist) or not slist[i].startswith(name):
450 raise util.Abort(_("no match under directory %s!")
450 raise util.Abort(_("no match under directory %s!")
451 % rf)
451 % rf)
452 elif not stat.S_ISREG(mode):
452 elif not stat.S_ISREG(mode):
453 raise util.Abort(_("can't commit %s: "
453 raise util.Abort(_("can't commit %s: "
454 "unsupported file type!") % rf)
454 "unsupported file type!") % rf)
455 else:
455 else:
456 files = []
456 files = []
457 try:
457 try:
458 repo.commit(files, message, opts['user'], opts['date'], match,
458 repo.commit(files, message, opts['user'], opts['date'], match,
459 force_editor=opts.get('force_editor'))
459 force_editor=opts.get('force_editor'))
460 except ValueError, inst:
460 except ValueError, inst:
461 raise util.Abort(str(inst))
461 raise util.Abort(str(inst))
462
462
463 def docopy(ui, repo, pats, opts, wlock):
463 def docopy(ui, repo, pats, opts, wlock):
464 # called with the repo lock held
464 # called with the repo lock held
465 #
465 #
466 # hgsep => pathname that uses "/" to separate directories
466 # hgsep => pathname that uses "/" to separate directories
467 # ossep => pathname that uses os.sep to separate directories
467 # ossep => pathname that uses os.sep to separate directories
468 cwd = repo.getcwd()
468 cwd = repo.getcwd()
469 errors = 0
469 errors = 0
470 copied = []
470 copied = []
471 targets = {}
471 targets = {}
472
472
473 # abs: hgsep
473 # abs: hgsep
474 # rel: ossep
474 # rel: ossep
475 # return: hgsep
475 # return: hgsep
476 def okaytocopy(abs, rel, exact):
476 def okaytocopy(abs, rel, exact):
477 reasons = {'?': _('is not managed'),
477 reasons = {'?': _('is not managed'),
478 'a': _('has been marked for add'),
478 'a': _('has been marked for add'),
479 'r': _('has been marked for remove')}
479 'r': _('has been marked for remove')}
480 state = repo.dirstate.state(abs)
480 state = repo.dirstate.state(abs)
481 reason = reasons.get(state)
481 reason = reasons.get(state)
482 if reason:
482 if reason:
483 if state == 'a':
483 if state == 'a':
484 origsrc = repo.dirstate.copied(abs)
484 origsrc = repo.dirstate.copied(abs)
485 if origsrc is not None:
485 if origsrc is not None:
486 return origsrc
486 return origsrc
487 if exact:
487 if exact:
488 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
488 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
489 else:
489 else:
490 return abs
490 return abs
491
491
492 # origsrc: hgsep
492 # origsrc: hgsep
493 # abssrc: hgsep
493 # abssrc: hgsep
494 # relsrc: ossep
494 # relsrc: ossep
495 # target: ossep
495 # target: ossep
496 def copy(origsrc, abssrc, relsrc, target, exact):
496 def copy(origsrc, abssrc, relsrc, target, exact):
497 abstarget = util.canonpath(repo.root, cwd, target)
497 abstarget = util.canonpath(repo.root, cwd, target)
498 reltarget = util.pathto(repo.root, cwd, abstarget)
498 reltarget = util.pathto(repo.root, cwd, abstarget)
499 prevsrc = targets.get(abstarget)
499 prevsrc = targets.get(abstarget)
500 if prevsrc is not None:
500 if prevsrc is not None:
501 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
501 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
502 (reltarget, util.localpath(abssrc),
502 (reltarget, util.localpath(abssrc),
503 util.localpath(prevsrc)))
503 util.localpath(prevsrc)))
504 return
504 return
505 if (not opts['after'] and os.path.exists(reltarget) or
505 if (not opts['after'] and os.path.exists(reltarget) or
506 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
506 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
507 if not opts['force']:
507 if not opts['force']:
508 ui.warn(_('%s: not overwriting - file exists\n') %
508 ui.warn(_('%s: not overwriting - file exists\n') %
509 reltarget)
509 reltarget)
510 return
510 return
511 if not opts['after'] and not opts.get('dry_run'):
511 if not opts['after'] and not opts.get('dry_run'):
512 os.unlink(reltarget)
512 os.unlink(reltarget)
513 if opts['after']:
513 if opts['after']:
514 if not os.path.exists(reltarget):
514 if not os.path.exists(reltarget):
515 return
515 return
516 else:
516 else:
517 targetdir = os.path.dirname(reltarget) or '.'
517 targetdir = os.path.dirname(reltarget) or '.'
518 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
518 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
519 os.makedirs(targetdir)
519 os.makedirs(targetdir)
520 try:
520 try:
521 restore = repo.dirstate.state(abstarget) == 'r'
521 restore = repo.dirstate.state(abstarget) == 'r'
522 if restore and not opts.get('dry_run'):
522 if restore and not opts.get('dry_run'):
523 repo.undelete([abstarget], wlock)
523 repo.undelete([abstarget], wlock)
524 try:
524 try:
525 if not opts.get('dry_run'):
525 if not opts.get('dry_run'):
526 util.copyfile(relsrc, reltarget)
526 util.copyfile(relsrc, reltarget)
527 restore = False
527 restore = False
528 finally:
528 finally:
529 if restore:
529 if restore:
530 repo.remove([abstarget], wlock)
530 repo.remove([abstarget], wlock)
531 except IOError, inst:
531 except IOError, inst:
532 if inst.errno == errno.ENOENT:
532 if inst.errno == errno.ENOENT:
533 ui.warn(_('%s: deleted in working copy\n') % relsrc)
533 ui.warn(_('%s: deleted in working copy\n') % relsrc)
534 else:
534 else:
535 ui.warn(_('%s: cannot copy - %s\n') %
535 ui.warn(_('%s: cannot copy - %s\n') %
536 (relsrc, inst.strerror))
536 (relsrc, inst.strerror))
537 errors += 1
537 errors += 1
538 return
538 return
539 if ui.verbose or not exact:
539 if ui.verbose or not exact:
540 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
540 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
541 targets[abstarget] = abssrc
541 targets[abstarget] = abssrc
542 if abstarget != origsrc and not opts.get('dry_run'):
542 if abstarget != origsrc and not opts.get('dry_run'):
543 repo.copy(origsrc, abstarget, wlock)
543 repo.copy(origsrc, abstarget, wlock)
544 copied.append((abssrc, relsrc, exact))
544 copied.append((abssrc, relsrc, exact))
545
545
546 # pat: ossep
546 # pat: ossep
547 # dest ossep
547 # dest ossep
548 # srcs: list of (hgsep, hgsep, ossep, bool)
548 # srcs: list of (hgsep, hgsep, ossep, bool)
549 # return: function that takes hgsep and returns ossep
549 # return: function that takes hgsep and returns ossep
550 def targetpathfn(pat, dest, srcs):
550 def targetpathfn(pat, dest, srcs):
551 if os.path.isdir(pat):
551 if os.path.isdir(pat):
552 abspfx = util.canonpath(repo.root, cwd, pat)
552 abspfx = util.canonpath(repo.root, cwd, pat)
553 abspfx = util.localpath(abspfx)
553 abspfx = util.localpath(abspfx)
554 if destdirexists:
554 if destdirexists:
555 striplen = len(os.path.split(abspfx)[0])
555 striplen = len(os.path.split(abspfx)[0])
556 else:
556 else:
557 striplen = len(abspfx)
557 striplen = len(abspfx)
558 if striplen:
558 if striplen:
559 striplen += len(os.sep)
559 striplen += len(os.sep)
560 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
560 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
561 elif destdirexists:
561 elif destdirexists:
562 res = lambda p: os.path.join(dest,
562 res = lambda p: os.path.join(dest,
563 os.path.basename(util.localpath(p)))
563 os.path.basename(util.localpath(p)))
564 else:
564 else:
565 res = lambda p: dest
565 res = lambda p: dest
566 return res
566 return res
567
567
568 # pat: ossep
568 # pat: ossep
569 # dest ossep
569 # dest ossep
570 # srcs: list of (hgsep, hgsep, ossep, bool)
570 # srcs: list of (hgsep, hgsep, ossep, bool)
571 # return: function that takes hgsep and returns ossep
571 # return: function that takes hgsep and returns ossep
572 def targetpathafterfn(pat, dest, srcs):
572 def targetpathafterfn(pat, dest, srcs):
573 if util.patkind(pat, None)[0]:
573 if util.patkind(pat, None)[0]:
574 # a mercurial pattern
574 # a mercurial pattern
575 res = lambda p: os.path.join(dest,
575 res = lambda p: os.path.join(dest,
576 os.path.basename(util.localpath(p)))
576 os.path.basename(util.localpath(p)))
577 else:
577 else:
578 abspfx = util.canonpath(repo.root, cwd, pat)
578 abspfx = util.canonpath(repo.root, cwd, pat)
579 if len(abspfx) < len(srcs[0][0]):
579 if len(abspfx) < len(srcs[0][0]):
580 # A directory. Either the target path contains the last
580 # A directory. Either the target path contains the last
581 # component of the source path or it does not.
581 # component of the source path or it does not.
582 def evalpath(striplen):
582 def evalpath(striplen):
583 score = 0
583 score = 0
584 for s in srcs:
584 for s in srcs:
585 t = os.path.join(dest, util.localpath(s[0])[striplen:])
585 t = os.path.join(dest, util.localpath(s[0])[striplen:])
586 if os.path.exists(t):
586 if os.path.exists(t):
587 score += 1
587 score += 1
588 return score
588 return score
589
589
590 abspfx = util.localpath(abspfx)
590 abspfx = util.localpath(abspfx)
591 striplen = len(abspfx)
591 striplen = len(abspfx)
592 if striplen:
592 if striplen:
593 striplen += len(os.sep)
593 striplen += len(os.sep)
594 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
594 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
595 score = evalpath(striplen)
595 score = evalpath(striplen)
596 striplen1 = len(os.path.split(abspfx)[0])
596 striplen1 = len(os.path.split(abspfx)[0])
597 if striplen1:
597 if striplen1:
598 striplen1 += len(os.sep)
598 striplen1 += len(os.sep)
599 if evalpath(striplen1) > score:
599 if evalpath(striplen1) > score:
600 striplen = striplen1
600 striplen = striplen1
601 res = lambda p: os.path.join(dest,
601 res = lambda p: os.path.join(dest,
602 util.localpath(p)[striplen:])
602 util.localpath(p)[striplen:])
603 else:
603 else:
604 # a file
604 # a file
605 if destdirexists:
605 if destdirexists:
606 res = lambda p: os.path.join(dest,
606 res = lambda p: os.path.join(dest,
607 os.path.basename(util.localpath(p)))
607 os.path.basename(util.localpath(p)))
608 else:
608 else:
609 res = lambda p: dest
609 res = lambda p: dest
610 return res
610 return res
611
611
612
612
613 pats = util.expand_glob(pats)
613 pats = util.expand_glob(pats)
614 if not pats:
614 if not pats:
615 raise util.Abort(_('no source or destination specified'))
615 raise util.Abort(_('no source or destination specified'))
616 if len(pats) == 1:
616 if len(pats) == 1:
617 raise util.Abort(_('no destination specified'))
617 raise util.Abort(_('no destination specified'))
618 dest = pats.pop()
618 dest = pats.pop()
619 destdirexists = os.path.isdir(dest)
619 destdirexists = os.path.isdir(dest)
620 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
620 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
621 raise util.Abort(_('with multiple sources, destination must be an '
621 raise util.Abort(_('with multiple sources, destination must be an '
622 'existing directory'))
622 'existing directory'))
623 if opts['after']:
623 if opts['after']:
624 tfn = targetpathafterfn
624 tfn = targetpathafterfn
625 else:
625 else:
626 tfn = targetpathfn
626 tfn = targetpathfn
627 copylist = []
627 copylist = []
628 for pat in pats:
628 for pat in pats:
629 srcs = []
629 srcs = []
630 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
630 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
631 globbed=True):
631 globbed=True):
632 origsrc = okaytocopy(abssrc, relsrc, exact)
632 origsrc = okaytocopy(abssrc, relsrc, exact)
633 if origsrc:
633 if origsrc:
634 srcs.append((origsrc, abssrc, relsrc, exact))
634 srcs.append((origsrc, abssrc, relsrc, exact))
635 if not srcs:
635 if not srcs:
636 continue
636 continue
637 copylist.append((tfn(pat, dest, srcs), srcs))
637 copylist.append((tfn(pat, dest, srcs), srcs))
638 if not copylist:
638 if not copylist:
639 raise util.Abort(_('no files to copy'))
639 raise util.Abort(_('no files to copy'))
640
640
641 for targetpath, srcs in copylist:
641 for targetpath, srcs in copylist:
642 for origsrc, abssrc, relsrc, exact in srcs:
642 for origsrc, abssrc, relsrc, exact in srcs:
643 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
643 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
644
644
645 if errors:
645 if errors:
646 ui.warn(_('(consider using --after)\n'))
646 ui.warn(_('(consider using --after)\n'))
647 return errors, copied
647 return errors, copied
648
648
649 def copy(ui, repo, *pats, **opts):
649 def copy(ui, repo, *pats, **opts):
650 """mark files as copied for the next commit
650 """mark files as copied for the next commit
651
651
652 Mark dest as having copies of source files. If dest is a
652 Mark dest as having copies of source files. If dest is a
653 directory, copies are put in that directory. If dest is a file,
653 directory, copies are put in that directory. If dest is a file,
654 there can only be one source.
654 there can only be one source.
655
655
656 By default, this command copies the contents of files as they
656 By default, this command copies the contents of files as they
657 stand in the working directory. If invoked with --after, the
657 stand in the working directory. If invoked with --after, the
658 operation is recorded, but no copying is performed.
658 operation is recorded, but no copying is performed.
659
659
660 This command takes effect in the next commit. To undo a copy
660 This command takes effect in the next commit. To undo a copy
661 before that, see hg revert.
661 before that, see hg revert.
662 """
662 """
663 wlock = repo.wlock(0)
663 wlock = repo.wlock(0)
664 errs, copied = docopy(ui, repo, pats, opts, wlock)
664 errs, copied = docopy(ui, repo, pats, opts, wlock)
665 return errs
665 return errs
666
666
667 def debugancestor(ui, index, rev1, rev2):
667 def debugancestor(ui, index, rev1, rev2):
668 """find the ancestor revision of two revisions in a given index"""
668 """find the ancestor revision of two revisions in a given index"""
669 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, 0)
669 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
670 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
670 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
671 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
671 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
672
672
673 def debugcomplete(ui, cmd='', **opts):
673 def debugcomplete(ui, cmd='', **opts):
674 """returns the completion list associated with the given command"""
674 """returns the completion list associated with the given command"""
675
675
676 if opts['options']:
676 if opts['options']:
677 options = []
677 options = []
678 otables = [globalopts]
678 otables = [globalopts]
679 if cmd:
679 if cmd:
680 aliases, entry = findcmd(ui, cmd)
680 aliases, entry = findcmd(ui, cmd)
681 otables.append(entry[1])
681 otables.append(entry[1])
682 for t in otables:
682 for t in otables:
683 for o in t:
683 for o in t:
684 if o[0]:
684 if o[0]:
685 options.append('-%s' % o[0])
685 options.append('-%s' % o[0])
686 options.append('--%s' % o[1])
686 options.append('--%s' % o[1])
687 ui.write("%s\n" % "\n".join(options))
687 ui.write("%s\n" % "\n".join(options))
688 return
688 return
689
689
690 clist = findpossible(ui, cmd).keys()
690 clist = findpossible(ui, cmd).keys()
691 clist.sort()
691 clist.sort()
692 ui.write("%s\n" % "\n".join(clist))
692 ui.write("%s\n" % "\n".join(clist))
693
693
694 def debugrebuildstate(ui, repo, rev=""):
694 def debugrebuildstate(ui, repo, rev=""):
695 """rebuild the dirstate as it would look like for the given revision"""
695 """rebuild the dirstate as it would look like for the given revision"""
696 if rev == "":
696 if rev == "":
697 rev = repo.changelog.tip()
697 rev = repo.changelog.tip()
698 ctx = repo.changectx(rev)
698 ctx = repo.changectx(rev)
699 files = ctx.manifest()
699 files = ctx.manifest()
700 wlock = repo.wlock()
700 wlock = repo.wlock()
701 repo.dirstate.rebuild(rev, files)
701 repo.dirstate.rebuild(rev, files)
702
702
703 def debugcheckstate(ui, repo):
703 def debugcheckstate(ui, repo):
704 """validate the correctness of the current dirstate"""
704 """validate the correctness of the current dirstate"""
705 parent1, parent2 = repo.dirstate.parents()
705 parent1, parent2 = repo.dirstate.parents()
706 repo.dirstate.read()
706 repo.dirstate.read()
707 dc = repo.dirstate.map
707 dc = repo.dirstate.map
708 keys = dc.keys()
708 keys = dc.keys()
709 keys.sort()
709 keys.sort()
710 m1 = repo.changectx(parent1).manifest()
710 m1 = repo.changectx(parent1).manifest()
711 m2 = repo.changectx(parent2).manifest()
711 m2 = repo.changectx(parent2).manifest()
712 errors = 0
712 errors = 0
713 for f in dc:
713 for f in dc:
714 state = repo.dirstate.state(f)
714 state = repo.dirstate.state(f)
715 if state in "nr" and f not in m1:
715 if state in "nr" and f not in m1:
716 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
716 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
717 errors += 1
717 errors += 1
718 if state in "a" and f in m1:
718 if state in "a" and f in m1:
719 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
719 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
720 errors += 1
720 errors += 1
721 if state in "m" and f not in m1 and f not in m2:
721 if state in "m" and f not in m1 and f not in m2:
722 ui.warn(_("%s in state %s, but not in either manifest\n") %
722 ui.warn(_("%s in state %s, but not in either manifest\n") %
723 (f, state))
723 (f, state))
724 errors += 1
724 errors += 1
725 for f in m1:
725 for f in m1:
726 state = repo.dirstate.state(f)
726 state = repo.dirstate.state(f)
727 if state not in "nrm":
727 if state not in "nrm":
728 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
728 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
729 errors += 1
729 errors += 1
730 if errors:
730 if errors:
731 error = _(".hg/dirstate inconsistent with current parent's manifest")
731 error = _(".hg/dirstate inconsistent with current parent's manifest")
732 raise util.Abort(error)
732 raise util.Abort(error)
733
733
734 def showconfig(ui, repo, *values, **opts):
734 def showconfig(ui, repo, *values, **opts):
735 """show combined config settings from all hgrc files
735 """show combined config settings from all hgrc files
736
736
737 With no args, print names and values of all config items.
737 With no args, print names and values of all config items.
738
738
739 With one arg of the form section.name, print just the value of
739 With one arg of the form section.name, print just the value of
740 that config item.
740 that config item.
741
741
742 With multiple args, print names and values of all config items
742 With multiple args, print names and values of all config items
743 with matching section names."""
743 with matching section names."""
744
744
745 untrusted = bool(opts.get('untrusted'))
745 untrusted = bool(opts.get('untrusted'))
746 if values:
746 if values:
747 if len([v for v in values if '.' in v]) > 1:
747 if len([v for v in values if '.' in v]) > 1:
748 raise util.Abort(_('only one config item permitted'))
748 raise util.Abort(_('only one config item permitted'))
749 for section, name, value in ui.walkconfig(untrusted=untrusted):
749 for section, name, value in ui.walkconfig(untrusted=untrusted):
750 sectname = section + '.' + name
750 sectname = section + '.' + name
751 if values:
751 if values:
752 for v in values:
752 for v in values:
753 if v == section:
753 if v == section:
754 ui.write('%s=%s\n' % (sectname, value))
754 ui.write('%s=%s\n' % (sectname, value))
755 elif v == sectname:
755 elif v == sectname:
756 ui.write(value, '\n')
756 ui.write(value, '\n')
757 else:
757 else:
758 ui.write('%s=%s\n' % (sectname, value))
758 ui.write('%s=%s\n' % (sectname, value))
759
759
760 def debugsetparents(ui, repo, rev1, rev2=None):
760 def debugsetparents(ui, repo, rev1, rev2=None):
761 """manually set the parents of the current working directory
761 """manually set the parents of the current working directory
762
762
763 This is useful for writing repository conversion tools, but should
763 This is useful for writing repository conversion tools, but should
764 be used with care.
764 be used with care.
765 """
765 """
766
766
767 if not rev2:
767 if not rev2:
768 rev2 = hex(nullid)
768 rev2 = hex(nullid)
769
769
770 wlock = repo.wlock()
770 wlock = repo.wlock()
771 try:
771 try:
772 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
772 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
773 finally:
773 finally:
774 wlock.release()
774 wlock.release()
775
775
776 def debugstate(ui, repo):
776 def debugstate(ui, repo):
777 """show the contents of the current dirstate"""
777 """show the contents of the current dirstate"""
778 repo.dirstate.read()
778 repo.dirstate.read()
779 dc = repo.dirstate.map
779 dc = repo.dirstate.map
780 keys = dc.keys()
780 keys = dc.keys()
781 keys.sort()
781 keys.sort()
782 for file_ in keys:
782 for file_ in keys:
783 if dc[file_][3] == -1:
783 if dc[file_][3] == -1:
784 # Pad or slice to locale representation
784 # Pad or slice to locale representation
785 locale_len = len(time.strftime("%x %X", time.localtime(0)))
785 locale_len = len(time.strftime("%x %X", time.localtime(0)))
786 timestr = 'unset'
786 timestr = 'unset'
787 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
787 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
788 else:
788 else:
789 timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
789 timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
790 ui.write("%c %3o %10d %s %s\n"
790 ui.write("%c %3o %10d %s %s\n"
791 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
791 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
792 timestr, file_))
792 timestr, file_))
793 for f in repo.dirstate.copies():
793 for f in repo.dirstate.copies():
794 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
794 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
795
795
796 def debugdata(ui, file_, rev):
796 def debugdata(ui, file_, rev):
797 """dump the contents of an data file revision"""
797 """dump the contents of a data file revision"""
798 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
798 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
799 file_[:-2] + ".i", 0)
800 try:
799 try:
801 ui.write(r.revision(r.lookup(rev)))
800 ui.write(r.revision(r.lookup(rev)))
802 except KeyError:
801 except KeyError:
803 raise util.Abort(_('invalid revision identifier %s') % rev)
802 raise util.Abort(_('invalid revision identifier %s') % rev)
804
803
805 def debugdate(ui, date, range=None, **opts):
804 def debugdate(ui, date, range=None, **opts):
806 """parse and display a date"""
805 """parse and display a date"""
807 if opts["extended"]:
806 if opts["extended"]:
808 d = util.parsedate(date, util.extendeddateformats)
807 d = util.parsedate(date, util.extendeddateformats)
809 else:
808 else:
810 d = util.parsedate(date)
809 d = util.parsedate(date)
811 ui.write("internal: %s %s\n" % d)
810 ui.write("internal: %s %s\n" % d)
812 ui.write("standard: %s\n" % util.datestr(d))
811 ui.write("standard: %s\n" % util.datestr(d))
813 if range:
812 if range:
814 m = util.matchdate(range)
813 m = util.matchdate(range)
815 ui.write("match: %s\n" % m(d[0]))
814 ui.write("match: %s\n" % m(d[0]))
816
815
817 def debugindex(ui, file_):
816 def debugindex(ui, file_):
818 """dump the contents of an index file"""
817 """dump the contents of an index file"""
819 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, 0)
818 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
820 ui.write(" rev offset length base linkrev" +
819 ui.write(" rev offset length base linkrev" +
821 " nodeid p1 p2\n")
820 " nodeid p1 p2\n")
822 for i in xrange(r.count()):
821 for i in xrange(r.count()):
823 node = r.node(i)
822 node = r.node(i)
824 pp = r.parents(node)
823 pp = r.parents(node)
825 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
824 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
826 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
825 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
827 short(node), short(pp[0]), short(pp[1])))
826 short(node), short(pp[0]), short(pp[1])))
828
827
829 def debugindexdot(ui, file_):
828 def debugindexdot(ui, file_):
830 """dump an index DAG as a .dot file"""
829 """dump an index DAG as a .dot file"""
831 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, 0)
830 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
832 ui.write("digraph G {\n")
831 ui.write("digraph G {\n")
833 for i in xrange(r.count()):
832 for i in xrange(r.count()):
834 node = r.node(i)
833 node = r.node(i)
835 pp = r.parents(node)
834 pp = r.parents(node)
836 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
835 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
837 if pp[1] != nullid:
836 if pp[1] != nullid:
838 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
837 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
839 ui.write("}\n")
838 ui.write("}\n")
840
839
841 def debuginstall(ui):
840 def debuginstall(ui):
842 '''test Mercurial installation'''
841 '''test Mercurial installation'''
843
842
844 def writetemp(contents):
843 def writetemp(contents):
845 (fd, name) = tempfile.mkstemp()
844 (fd, name) = tempfile.mkstemp()
846 f = os.fdopen(fd, "wb")
845 f = os.fdopen(fd, "wb")
847 f.write(contents)
846 f.write(contents)
848 f.close()
847 f.close()
849 return name
848 return name
850
849
851 problems = 0
850 problems = 0
852
851
853 # encoding
852 # encoding
854 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
853 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
855 try:
854 try:
856 util.fromlocal("test")
855 util.fromlocal("test")
857 except util.Abort, inst:
856 except util.Abort, inst:
858 ui.write(" %s\n" % inst)
857 ui.write(" %s\n" % inst)
859 ui.write(_(" (check that your locale is properly set)\n"))
858 ui.write(_(" (check that your locale is properly set)\n"))
860 problems += 1
859 problems += 1
861
860
862 # compiled modules
861 # compiled modules
863 ui.status(_("Checking extensions...\n"))
862 ui.status(_("Checking extensions...\n"))
864 try:
863 try:
865 import bdiff, mpatch, base85
864 import bdiff, mpatch, base85
866 except Exception, inst:
865 except Exception, inst:
867 ui.write(" %s\n" % inst)
866 ui.write(" %s\n" % inst)
868 ui.write(_(" One or more extensions could not be found"))
867 ui.write(_(" One or more extensions could not be found"))
869 ui.write(_(" (check that you compiled the extensions)\n"))
868 ui.write(_(" (check that you compiled the extensions)\n"))
870 problems += 1
869 problems += 1
871
870
872 # templates
871 # templates
873 ui.status(_("Checking templates...\n"))
872 ui.status(_("Checking templates...\n"))
874 try:
873 try:
875 import templater
874 import templater
876 t = templater.templater(templater.templatepath("map-cmdline.default"))
875 t = templater.templater(templater.templatepath("map-cmdline.default"))
877 except Exception, inst:
876 except Exception, inst:
878 ui.write(" %s\n" % inst)
877 ui.write(" %s\n" % inst)
879 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
878 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
880 problems += 1
879 problems += 1
881
880
882 # patch
881 # patch
883 ui.status(_("Checking patch...\n"))
882 ui.status(_("Checking patch...\n"))
884 path = os.environ.get('PATH', '')
883 path = os.environ.get('PATH', '')
885 patcher = util.find_in_path('gpatch', path,
884 patcher = util.find_in_path('gpatch', path,
886 util.find_in_path('patch', path, None))
885 util.find_in_path('patch', path, None))
887 if not patcher:
886 if not patcher:
888 ui.write(_(" Can't find patch or gpatch in PATH\n"))
887 ui.write(_(" Can't find patch or gpatch in PATH\n"))
889 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
888 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
890 problems += 1
889 problems += 1
891 else:
890 else:
892 # actually attempt a patch here
891 # actually attempt a patch here
893 a = "1\n2\n3\n4\n"
892 a = "1\n2\n3\n4\n"
894 b = "1\n2\n3\ninsert\n4\n"
893 b = "1\n2\n3\ninsert\n4\n"
895 d = mdiff.unidiff(a, None, b, None, "a")
894 d = mdiff.unidiff(a, None, b, None, "a")
896 fa = writetemp(a)
895 fa = writetemp(a)
897 fd = writetemp(d)
896 fd = writetemp(d)
898 fp = os.popen('%s %s %s' % (patcher, fa, fd))
897 fp = os.popen('%s %s %s' % (patcher, fa, fd))
899 files = []
898 files = []
900 output = ""
899 output = ""
901 for line in fp:
900 for line in fp:
902 output += line
901 output += line
903 if line.startswith('patching file '):
902 if line.startswith('patching file '):
904 pf = util.parse_patch_output(line.rstrip())
903 pf = util.parse_patch_output(line.rstrip())
905 files.append(pf)
904 files.append(pf)
906 if files != [fa]:
905 if files != [fa]:
907 ui.write(_(" unexpected patch output!"))
906 ui.write(_(" unexpected patch output!"))
908 ui.write(_(" (you may have an incompatible version of patch)\n"))
907 ui.write(_(" (you may have an incompatible version of patch)\n"))
909 ui.write(output)
908 ui.write(output)
910 problems += 1
909 problems += 1
911 a = file(fa).read()
910 a = file(fa).read()
912 if a != b:
911 if a != b:
913 ui.write(_(" patch test failed!"))
912 ui.write(_(" patch test failed!"))
914 ui.write(_(" (you may have an incompatible version of patch)\n"))
913 ui.write(_(" (you may have an incompatible version of patch)\n"))
915 problems += 1
914 problems += 1
916 os.unlink(fa)
915 os.unlink(fa)
917 os.unlink(fd)
916 os.unlink(fd)
918
917
919 # merge helper
918 # merge helper
920 ui.status(_("Checking merge helper...\n"))
919 ui.status(_("Checking merge helper...\n"))
921 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
920 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
922 or "hgmerge")
921 or "hgmerge")
923 cmdpath = util.find_in_path(cmd, path)
922 cmdpath = util.find_in_path(cmd, path)
924 if not cmdpath:
923 if not cmdpath:
925 cmdpath = util.find_in_path(cmd.split()[0], path)
924 cmdpath = util.find_in_path(cmd.split()[0], path)
926 if not cmdpath:
925 if not cmdpath:
927 if cmd == 'hgmerge':
926 if cmd == 'hgmerge':
928 ui.write(_(" No merge helper set and can't find default"
927 ui.write(_(" No merge helper set and can't find default"
929 " hgmerge script in PATH\n"))
928 " hgmerge script in PATH\n"))
930 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
929 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
931 else:
930 else:
932 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
931 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
933 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
932 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
934 problems += 1
933 problems += 1
935 else:
934 else:
936 # actually attempt a patch here
935 # actually attempt a patch here
937 fa = writetemp("1\n2\n3\n4\n")
936 fa = writetemp("1\n2\n3\n4\n")
938 fl = writetemp("1\n2\n3\ninsert\n4\n")
937 fl = writetemp("1\n2\n3\ninsert\n4\n")
939 fr = writetemp("begin\n1\n2\n3\n4\n")
938 fr = writetemp("begin\n1\n2\n3\n4\n")
940 r = os.system('%s %s %s %s' % (cmd, fl, fa, fr))
939 r = os.system('%s %s %s %s' % (cmd, fl, fa, fr))
941 if r:
940 if r:
942 ui.write(_(" got unexpected merge error %d!") % r)
941 ui.write(_(" got unexpected merge error %d!") % r)
943 problems += 1
942 problems += 1
944 m = file(fl).read()
943 m = file(fl).read()
945 if m != "begin\n1\n2\n3\ninsert\n4\n":
944 if m != "begin\n1\n2\n3\ninsert\n4\n":
946 ui.write(_(" got unexpected merge results!") % r)
945 ui.write(_(" got unexpected merge results!") % r)
947 ui.write(_(" (your merge helper may have the"
946 ui.write(_(" (your merge helper may have the"
948 " wrong argument order)\n"))
947 " wrong argument order)\n"))
949 ui.write(m)
948 ui.write(m)
950 os.unlink(fa)
949 os.unlink(fa)
951 os.unlink(fl)
950 os.unlink(fl)
952 os.unlink(fr)
951 os.unlink(fr)
953
952
954 # editor
953 # editor
955 ui.status(_("Checking commit editor...\n"))
954 ui.status(_("Checking commit editor...\n"))
956 editor = (os.environ.get("HGEDITOR") or
955 editor = (os.environ.get("HGEDITOR") or
957 ui.config("ui", "editor") or
956 ui.config("ui", "editor") or
958 os.environ.get("EDITOR", "vi"))
957 os.environ.get("EDITOR", "vi"))
959 cmdpath = util.find_in_path(editor, path)
958 cmdpath = util.find_in_path(editor, path)
960 if not cmdpath:
959 if not cmdpath:
961 cmdpath = util.find_in_path(editor.split()[0], path)
960 cmdpath = util.find_in_path(editor.split()[0], path)
962 if not cmdpath:
961 if not cmdpath:
963 if editor == 'vi':
962 if editor == 'vi':
964 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
963 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
965 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
964 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
966 else:
965 else:
967 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
966 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
968 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
967 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
969 problems += 1
968 problems += 1
970
969
971 # check username
970 # check username
972 ui.status(_("Checking username...\n"))
971 ui.status(_("Checking username...\n"))
973 user = os.environ.get("HGUSER")
972 user = os.environ.get("HGUSER")
974 if user is None:
973 if user is None:
975 user = ui.config("ui", "username")
974 user = ui.config("ui", "username")
976 if user is None:
975 if user is None:
977 user = os.environ.get("EMAIL")
976 user = os.environ.get("EMAIL")
978 if not user:
977 if not user:
979 ui.warn(" ")
978 ui.warn(" ")
980 ui.username()
979 ui.username()
981 ui.write(_(" (specify a username in your .hgrc file)\n"))
980 ui.write(_(" (specify a username in your .hgrc file)\n"))
982
981
983 if not problems:
982 if not problems:
984 ui.status(_("No problems detected\n"))
983 ui.status(_("No problems detected\n"))
985 else:
984 else:
986 ui.write(_("%s problems detected,"
985 ui.write(_("%s problems detected,"
987 " please check your install!\n") % problems)
986 " please check your install!\n") % problems)
988
987
989 return problems
988 return problems
990
989
991 def debugrename(ui, repo, file1, *pats, **opts):
990 def debugrename(ui, repo, file1, *pats, **opts):
992 """dump rename information"""
991 """dump rename information"""
993
992
994 ctx = repo.changectx(opts.get('rev', 'tip'))
993 ctx = repo.changectx(opts.get('rev', 'tip'))
995 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
994 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
996 ctx.node()):
995 ctx.node()):
997 m = ctx.filectx(abs).renamed()
996 m = ctx.filectx(abs).renamed()
998 if m:
997 if m:
999 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
998 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
1000 else:
999 else:
1001 ui.write(_("%s not renamed\n") % rel)
1000 ui.write(_("%s not renamed\n") % rel)
1002
1001
1003 def debugwalk(ui, repo, *pats, **opts):
1002 def debugwalk(ui, repo, *pats, **opts):
1004 """show how files match on given patterns"""
1003 """show how files match on given patterns"""
1005 items = list(cmdutil.walk(repo, pats, opts))
1004 items = list(cmdutil.walk(repo, pats, opts))
1006 if not items:
1005 if not items:
1007 return
1006 return
1008 fmt = '%%s %%-%ds %%-%ds %%s' % (
1007 fmt = '%%s %%-%ds %%-%ds %%s' % (
1009 max([len(abs) for (src, abs, rel, exact) in items]),
1008 max([len(abs) for (src, abs, rel, exact) in items]),
1010 max([len(rel) for (src, abs, rel, exact) in items]))
1009 max([len(rel) for (src, abs, rel, exact) in items]))
1011 for src, abs, rel, exact in items:
1010 for src, abs, rel, exact in items:
1012 line = fmt % (src, abs, rel, exact and 'exact' or '')
1011 line = fmt % (src, abs, rel, exact and 'exact' or '')
1013 ui.write("%s\n" % line.rstrip())
1012 ui.write("%s\n" % line.rstrip())
1014
1013
1015 def diff(ui, repo, *pats, **opts):
1014 def diff(ui, repo, *pats, **opts):
1016 """diff repository (or selected files)
1015 """diff repository (or selected files)
1017
1016
1018 Show differences between revisions for the specified files.
1017 Show differences between revisions for the specified files.
1019
1018
1020 Differences between files are shown using the unified diff format.
1019 Differences between files are shown using the unified diff format.
1021
1020
1022 NOTE: diff may generate unexpected results for merges, as it will
1021 NOTE: diff may generate unexpected results for merges, as it will
1023 default to comparing against the working directory's first parent
1022 default to comparing against the working directory's first parent
1024 changeset if no revisions are specified.
1023 changeset if no revisions are specified.
1025
1024
1026 When two revision arguments are given, then changes are shown
1025 When two revision arguments are given, then changes are shown
1027 between those revisions. If only one revision is specified then
1026 between those revisions. If only one revision is specified then
1028 that revision is compared to the working directory, and, when no
1027 that revision is compared to the working directory, and, when no
1029 revisions are specified, the working directory files are compared
1028 revisions are specified, the working directory files are compared
1030 to its parent.
1029 to its parent.
1031
1030
1032 Without the -a option, diff will avoid generating diffs of files
1031 Without the -a option, diff will avoid generating diffs of files
1033 it detects as binary. With -a, diff will generate a diff anyway,
1032 it detects as binary. With -a, diff will generate a diff anyway,
1034 probably with undesirable results.
1033 probably with undesirable results.
1035 """
1034 """
1036 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1035 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1037
1036
1038 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1037 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1039
1038
1040 patch.diff(repo, node1, node2, fns, match=matchfn,
1039 patch.diff(repo, node1, node2, fns, match=matchfn,
1041 opts=patch.diffopts(ui, opts))
1040 opts=patch.diffopts(ui, opts))
1042
1041
1043 def export(ui, repo, *changesets, **opts):
1042 def export(ui, repo, *changesets, **opts):
1044 """dump the header and diffs for one or more changesets
1043 """dump the header and diffs for one or more changesets
1045
1044
1046 Print the changeset header and diffs for one or more revisions.
1045 Print the changeset header and diffs for one or more revisions.
1047
1046
1048 The information shown in the changeset header is: author,
1047 The information shown in the changeset header is: author,
1049 changeset hash, parent(s) and commit comment.
1048 changeset hash, parent(s) and commit comment.
1050
1049
1051 NOTE: export may generate unexpected diff output for merge changesets,
1050 NOTE: export may generate unexpected diff output for merge changesets,
1052 as it will compare the merge changeset against its first parent only.
1051 as it will compare the merge changeset against its first parent only.
1053
1052
1054 Output may be to a file, in which case the name of the file is
1053 Output may be to a file, in which case the name of the file is
1055 given using a format string. The formatting rules are as follows:
1054 given using a format string. The formatting rules are as follows:
1056
1055
1057 %% literal "%" character
1056 %% literal "%" character
1058 %H changeset hash (40 bytes of hexadecimal)
1057 %H changeset hash (40 bytes of hexadecimal)
1059 %N number of patches being generated
1058 %N number of patches being generated
1060 %R changeset revision number
1059 %R changeset revision number
1061 %b basename of the exporting repository
1060 %b basename of the exporting repository
1062 %h short-form changeset hash (12 bytes of hexadecimal)
1061 %h short-form changeset hash (12 bytes of hexadecimal)
1063 %n zero-padded sequence number, starting at 1
1062 %n zero-padded sequence number, starting at 1
1064 %r zero-padded changeset revision number
1063 %r zero-padded changeset revision number
1065
1064
1066 Without the -a option, export will avoid generating diffs of files
1065 Without the -a option, export will avoid generating diffs of files
1067 it detects as binary. With -a, export will generate a diff anyway,
1066 it detects as binary. With -a, export will generate a diff anyway,
1068 probably with undesirable results.
1067 probably with undesirable results.
1069
1068
1070 With the --switch-parent option, the diff will be against the second
1069 With the --switch-parent option, the diff will be against the second
1071 parent. It can be useful to review a merge.
1070 parent. It can be useful to review a merge.
1072 """
1071 """
1073 if not changesets:
1072 if not changesets:
1074 raise util.Abort(_("export requires at least one changeset"))
1073 raise util.Abort(_("export requires at least one changeset"))
1075 revs = cmdutil.revrange(repo, changesets)
1074 revs = cmdutil.revrange(repo, changesets)
1076 if len(revs) > 1:
1075 if len(revs) > 1:
1077 ui.note(_('exporting patches:\n'))
1076 ui.note(_('exporting patches:\n'))
1078 else:
1077 else:
1079 ui.note(_('exporting patch:\n'))
1078 ui.note(_('exporting patch:\n'))
1080 patch.export(repo, revs, template=opts['output'],
1079 patch.export(repo, revs, template=opts['output'],
1081 switch_parent=opts['switch_parent'],
1080 switch_parent=opts['switch_parent'],
1082 opts=patch.diffopts(ui, opts))
1081 opts=patch.diffopts(ui, opts))
1083
1082
1084 def grep(ui, repo, pattern, *pats, **opts):
1083 def grep(ui, repo, pattern, *pats, **opts):
1085 """search for a pattern in specified files and revisions
1084 """search for a pattern in specified files and revisions
1086
1085
1087 Search revisions of files for a regular expression.
1086 Search revisions of files for a regular expression.
1088
1087
1089 This command behaves differently than Unix grep. It only accepts
1088 This command behaves differently than Unix grep. It only accepts
1090 Python/Perl regexps. It searches repository history, not the
1089 Python/Perl regexps. It searches repository history, not the
1091 working directory. It always prints the revision number in which
1090 working directory. It always prints the revision number in which
1092 a match appears.
1091 a match appears.
1093
1092
1094 By default, grep only prints output for the first revision of a
1093 By default, grep only prints output for the first revision of a
1095 file in which it finds a match. To get it to print every revision
1094 file in which it finds a match. To get it to print every revision
1096 that contains a change in match status ("-" for a match that
1095 that contains a change in match status ("-" for a match that
1097 becomes a non-match, or "+" for a non-match that becomes a match),
1096 becomes a non-match, or "+" for a non-match that becomes a match),
1098 use the --all flag.
1097 use the --all flag.
1099 """
1098 """
1100 reflags = 0
1099 reflags = 0
1101 if opts['ignore_case']:
1100 if opts['ignore_case']:
1102 reflags |= re.I
1101 reflags |= re.I
1103 regexp = re.compile(pattern, reflags)
1102 regexp = re.compile(pattern, reflags)
1104 sep, eol = ':', '\n'
1103 sep, eol = ':', '\n'
1105 if opts['print0']:
1104 if opts['print0']:
1106 sep = eol = '\0'
1105 sep = eol = '\0'
1107
1106
1108 fcache = {}
1107 fcache = {}
1109 def getfile(fn):
1108 def getfile(fn):
1110 if fn not in fcache:
1109 if fn not in fcache:
1111 fcache[fn] = repo.file(fn)
1110 fcache[fn] = repo.file(fn)
1112 return fcache[fn]
1111 return fcache[fn]
1113
1112
1114 def matchlines(body):
1113 def matchlines(body):
1115 begin = 0
1114 begin = 0
1116 linenum = 0
1115 linenum = 0
1117 while True:
1116 while True:
1118 match = regexp.search(body, begin)
1117 match = regexp.search(body, begin)
1119 if not match:
1118 if not match:
1120 break
1119 break
1121 mstart, mend = match.span()
1120 mstart, mend = match.span()
1122 linenum += body.count('\n', begin, mstart) + 1
1121 linenum += body.count('\n', begin, mstart) + 1
1123 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1122 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1124 lend = body.find('\n', mend)
1123 lend = body.find('\n', mend)
1125 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1124 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1126 begin = lend + 1
1125 begin = lend + 1
1127
1126
1128 class linestate(object):
1127 class linestate(object):
1129 def __init__(self, line, linenum, colstart, colend):
1128 def __init__(self, line, linenum, colstart, colend):
1130 self.line = line
1129 self.line = line
1131 self.linenum = linenum
1130 self.linenum = linenum
1132 self.colstart = colstart
1131 self.colstart = colstart
1133 self.colend = colend
1132 self.colend = colend
1134
1133
1135 def __eq__(self, other):
1134 def __eq__(self, other):
1136 return self.line == other.line
1135 return self.line == other.line
1137
1136
1138 matches = {}
1137 matches = {}
1139 copies = {}
1138 copies = {}
1140 def grepbody(fn, rev, body):
1139 def grepbody(fn, rev, body):
1141 matches[rev].setdefault(fn, [])
1140 matches[rev].setdefault(fn, [])
1142 m = matches[rev][fn]
1141 m = matches[rev][fn]
1143 for lnum, cstart, cend, line in matchlines(body):
1142 for lnum, cstart, cend, line in matchlines(body):
1144 s = linestate(line, lnum, cstart, cend)
1143 s = linestate(line, lnum, cstart, cend)
1145 m.append(s)
1144 m.append(s)
1146
1145
1147 def difflinestates(a, b):
1146 def difflinestates(a, b):
1148 sm = difflib.SequenceMatcher(None, a, b)
1147 sm = difflib.SequenceMatcher(None, a, b)
1149 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1148 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1150 if tag == 'insert':
1149 if tag == 'insert':
1151 for i in xrange(blo, bhi):
1150 for i in xrange(blo, bhi):
1152 yield ('+', b[i])
1151 yield ('+', b[i])
1153 elif tag == 'delete':
1152 elif tag == 'delete':
1154 for i in xrange(alo, ahi):
1153 for i in xrange(alo, ahi):
1155 yield ('-', a[i])
1154 yield ('-', a[i])
1156 elif tag == 'replace':
1155 elif tag == 'replace':
1157 for i in xrange(alo, ahi):
1156 for i in xrange(alo, ahi):
1158 yield ('-', a[i])
1157 yield ('-', a[i])
1159 for i in xrange(blo, bhi):
1158 for i in xrange(blo, bhi):
1160 yield ('+', b[i])
1159 yield ('+', b[i])
1161
1160
1162 prev = {}
1161 prev = {}
1163 def display(fn, rev, states, prevstates):
1162 def display(fn, rev, states, prevstates):
1164 found = False
1163 found = False
1165 filerevmatches = {}
1164 filerevmatches = {}
1166 r = prev.get(fn, -1)
1165 r = prev.get(fn, -1)
1167 if opts['all']:
1166 if opts['all']:
1168 iter = difflinestates(states, prevstates)
1167 iter = difflinestates(states, prevstates)
1169 else:
1168 else:
1170 iter = [('', l) for l in prevstates]
1169 iter = [('', l) for l in prevstates]
1171 for change, l in iter:
1170 for change, l in iter:
1172 cols = [fn, str(r)]
1171 cols = [fn, str(r)]
1173 if opts['line_number']:
1172 if opts['line_number']:
1174 cols.append(str(l.linenum))
1173 cols.append(str(l.linenum))
1175 if opts['all']:
1174 if opts['all']:
1176 cols.append(change)
1175 cols.append(change)
1177 if opts['user']:
1176 if opts['user']:
1178 cols.append(ui.shortuser(get(r)[1]))
1177 cols.append(ui.shortuser(get(r)[1]))
1179 if opts['files_with_matches']:
1178 if opts['files_with_matches']:
1180 c = (fn, r)
1179 c = (fn, r)
1181 if c in filerevmatches:
1180 if c in filerevmatches:
1182 continue
1181 continue
1183 filerevmatches[c] = 1
1182 filerevmatches[c] = 1
1184 else:
1183 else:
1185 cols.append(l.line)
1184 cols.append(l.line)
1186 ui.write(sep.join(cols), eol)
1185 ui.write(sep.join(cols), eol)
1187 found = True
1186 found = True
1188 return found
1187 return found
1189
1188
1190 fstate = {}
1189 fstate = {}
1191 skip = {}
1190 skip = {}
1192 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1191 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1193 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1192 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1194 found = False
1193 found = False
1195 follow = opts.get('follow')
1194 follow = opts.get('follow')
1196 for st, rev, fns in changeiter:
1195 for st, rev, fns in changeiter:
1197 if st == 'window':
1196 if st == 'window':
1198 matches.clear()
1197 matches.clear()
1199 elif st == 'add':
1198 elif st == 'add':
1200 mf = repo.changectx(rev).manifest()
1199 mf = repo.changectx(rev).manifest()
1201 matches[rev] = {}
1200 matches[rev] = {}
1202 for fn in fns:
1201 for fn in fns:
1203 if fn in skip:
1202 if fn in skip:
1204 continue
1203 continue
1205 fstate.setdefault(fn, {})
1204 fstate.setdefault(fn, {})
1206 try:
1205 try:
1207 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1206 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1208 if follow:
1207 if follow:
1209 copied = getfile(fn).renamed(mf[fn])
1208 copied = getfile(fn).renamed(mf[fn])
1210 if copied:
1209 if copied:
1211 copies.setdefault(rev, {})[fn] = copied[0]
1210 copies.setdefault(rev, {})[fn] = copied[0]
1212 except KeyError:
1211 except KeyError:
1213 pass
1212 pass
1214 elif st == 'iter':
1213 elif st == 'iter':
1215 states = matches[rev].items()
1214 states = matches[rev].items()
1216 states.sort()
1215 states.sort()
1217 for fn, m in states:
1216 for fn, m in states:
1218 copy = copies.get(rev, {}).get(fn)
1217 copy = copies.get(rev, {}).get(fn)
1219 if fn in skip:
1218 if fn in skip:
1220 if copy:
1219 if copy:
1221 skip[copy] = True
1220 skip[copy] = True
1222 continue
1221 continue
1223 if fn in prev or fstate[fn]:
1222 if fn in prev or fstate[fn]:
1224 r = display(fn, rev, m, fstate[fn])
1223 r = display(fn, rev, m, fstate[fn])
1225 found = found or r
1224 found = found or r
1226 if r and not opts['all']:
1225 if r and not opts['all']:
1227 skip[fn] = True
1226 skip[fn] = True
1228 if copy:
1227 if copy:
1229 skip[copy] = True
1228 skip[copy] = True
1230 fstate[fn] = m
1229 fstate[fn] = m
1231 if copy:
1230 if copy:
1232 fstate[copy] = m
1231 fstate[copy] = m
1233 prev[fn] = rev
1232 prev[fn] = rev
1234
1233
1235 fstate = fstate.items()
1234 fstate = fstate.items()
1236 fstate.sort()
1235 fstate.sort()
1237 for fn, state in fstate:
1236 for fn, state in fstate:
1238 if fn in skip:
1237 if fn in skip:
1239 continue
1238 continue
1240 if fn not in copies.get(prev[fn], {}):
1239 if fn not in copies.get(prev[fn], {}):
1241 found = display(fn, rev, {}, state) or found
1240 found = display(fn, rev, {}, state) or found
1242 return (not found and 1) or 0
1241 return (not found and 1) or 0
1243
1242
1244 def heads(ui, repo, **opts):
1243 def heads(ui, repo, **opts):
1245 """show current repository heads
1244 """show current repository heads
1246
1245
1247 Show all repository head changesets.
1246 Show all repository head changesets.
1248
1247
1249 Repository "heads" are changesets that don't have children
1248 Repository "heads" are changesets that don't have children
1250 changesets. They are where development generally takes place and
1249 changesets. They are where development generally takes place and
1251 are the usual targets for update and merge operations.
1250 are the usual targets for update and merge operations.
1252 """
1251 """
1253 if opts['rev']:
1252 if opts['rev']:
1254 heads = repo.heads(repo.lookup(opts['rev']))
1253 heads = repo.heads(repo.lookup(opts['rev']))
1255 else:
1254 else:
1256 heads = repo.heads()
1255 heads = repo.heads()
1257 displayer = cmdutil.show_changeset(ui, repo, opts)
1256 displayer = cmdutil.show_changeset(ui, repo, opts)
1258 for n in heads:
1257 for n in heads:
1259 displayer.show(changenode=n)
1258 displayer.show(changenode=n)
1260
1259
1261 def help_(ui, name=None, with_version=False):
1260 def help_(ui, name=None, with_version=False):
1262 """show help for a command, extension, or list of commands
1261 """show help for a command, extension, or list of commands
1263
1262
1264 With no arguments, print a list of commands and short help.
1263 With no arguments, print a list of commands and short help.
1265
1264
1266 Given a command name, print help for that command.
1265 Given a command name, print help for that command.
1267
1266
1268 Given an extension name, print help for that extension, and the
1267 Given an extension name, print help for that extension, and the
1269 commands it provides."""
1268 commands it provides."""
1270 option_lists = []
1269 option_lists = []
1271
1270
1272 def helpcmd(name):
1271 def helpcmd(name):
1273 if with_version:
1272 if with_version:
1274 version_(ui)
1273 version_(ui)
1275 ui.write('\n')
1274 ui.write('\n')
1276 aliases, i = findcmd(ui, name)
1275 aliases, i = findcmd(ui, name)
1277 # synopsis
1276 # synopsis
1278 ui.write("%s\n\n" % i[2])
1277 ui.write("%s\n\n" % i[2])
1279
1278
1280 # description
1279 # description
1281 doc = i[0].__doc__
1280 doc = i[0].__doc__
1282 if not doc:
1281 if not doc:
1283 doc = _("(No help text available)")
1282 doc = _("(No help text available)")
1284 if ui.quiet:
1283 if ui.quiet:
1285 doc = doc.splitlines(0)[0]
1284 doc = doc.splitlines(0)[0]
1286 ui.write("%s\n" % doc.rstrip())
1285 ui.write("%s\n" % doc.rstrip())
1287
1286
1288 if not ui.quiet:
1287 if not ui.quiet:
1289 # aliases
1288 # aliases
1290 if len(aliases) > 1:
1289 if len(aliases) > 1:
1291 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1290 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1292
1291
1293 # options
1292 # options
1294 if i[1]:
1293 if i[1]:
1295 option_lists.append(("options", i[1]))
1294 option_lists.append(("options", i[1]))
1296
1295
1297 def helplist(select=None):
1296 def helplist(select=None):
1298 h = {}
1297 h = {}
1299 cmds = {}
1298 cmds = {}
1300 for c, e in table.items():
1299 for c, e in table.items():
1301 f = c.split("|", 1)[0]
1300 f = c.split("|", 1)[0]
1302 if select and not select(f):
1301 if select and not select(f):
1303 continue
1302 continue
1304 if name == "shortlist" and not f.startswith("^"):
1303 if name == "shortlist" and not f.startswith("^"):
1305 continue
1304 continue
1306 f = f.lstrip("^")
1305 f = f.lstrip("^")
1307 if not ui.debugflag and f.startswith("debug"):
1306 if not ui.debugflag and f.startswith("debug"):
1308 continue
1307 continue
1309 doc = e[0].__doc__
1308 doc = e[0].__doc__
1310 if not doc:
1309 if not doc:
1311 doc = _("(No help text available)")
1310 doc = _("(No help text available)")
1312 h[f] = doc.splitlines(0)[0].rstrip()
1311 h[f] = doc.splitlines(0)[0].rstrip()
1313 cmds[f] = c.lstrip("^")
1312 cmds[f] = c.lstrip("^")
1314
1313
1315 fns = h.keys()
1314 fns = h.keys()
1316 fns.sort()
1315 fns.sort()
1317 m = max(map(len, fns))
1316 m = max(map(len, fns))
1318 for f in fns:
1317 for f in fns:
1319 if ui.verbose:
1318 if ui.verbose:
1320 commands = cmds[f].replace("|",", ")
1319 commands = cmds[f].replace("|",", ")
1321 ui.write(" %s:\n %s\n"%(commands, h[f]))
1320 ui.write(" %s:\n %s\n"%(commands, h[f]))
1322 else:
1321 else:
1323 ui.write(' %-*s %s\n' % (m, f, h[f]))
1322 ui.write(' %-*s %s\n' % (m, f, h[f]))
1324
1323
1325 def helptopic(name):
1324 def helptopic(name):
1326 v = None
1325 v = None
1327 for i in help.helptable:
1326 for i in help.helptable:
1328 l = i.split('|')
1327 l = i.split('|')
1329 if name in l:
1328 if name in l:
1330 v = i
1329 v = i
1331 header = l[-1]
1330 header = l[-1]
1332 if not v:
1331 if not v:
1333 raise UnknownCommand(name)
1332 raise UnknownCommand(name)
1334
1333
1335 # description
1334 # description
1336 doc = help.helptable[v]
1335 doc = help.helptable[v]
1337 if not doc:
1336 if not doc:
1338 doc = _("(No help text available)")
1337 doc = _("(No help text available)")
1339 if callable(doc):
1338 if callable(doc):
1340 doc = doc()
1339 doc = doc()
1341
1340
1342 ui.write("%s\n" % header)
1341 ui.write("%s\n" % header)
1343 ui.write("%s\n" % doc.rstrip())
1342 ui.write("%s\n" % doc.rstrip())
1344
1343
1345 def helpext(name):
1344 def helpext(name):
1346 try:
1345 try:
1347 mod = findext(name)
1346 mod = findext(name)
1348 except KeyError:
1347 except KeyError:
1349 raise UnknownCommand(name)
1348 raise UnknownCommand(name)
1350
1349
1351 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1350 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1352 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1351 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1353 for d in doc[1:]:
1352 for d in doc[1:]:
1354 ui.write(d, '\n')
1353 ui.write(d, '\n')
1355
1354
1356 ui.status('\n')
1355 ui.status('\n')
1357
1356
1358 try:
1357 try:
1359 ct = mod.cmdtable
1358 ct = mod.cmdtable
1360 except AttributeError:
1359 except AttributeError:
1361 ui.status(_('no commands defined\n'))
1360 ui.status(_('no commands defined\n'))
1362 return
1361 return
1363
1362
1364 if ui.verbose:
1363 if ui.verbose:
1365 ui.status(_('list of commands:\n\n'))
1364 ui.status(_('list of commands:\n\n'))
1366 else:
1365 else:
1367 ui.status(_('list of commands (use "hg help -v %s" '
1366 ui.status(_('list of commands (use "hg help -v %s" '
1368 'to show aliases and global options):\n\n') % name)
1367 'to show aliases and global options):\n\n') % name)
1369
1368
1370 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1369 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1371 helplist(modcmds.has_key)
1370 helplist(modcmds.has_key)
1372
1371
1373 if name and name != 'shortlist':
1372 if name and name != 'shortlist':
1374 i = None
1373 i = None
1375 for f in (helpcmd, helptopic, helpext):
1374 for f in (helpcmd, helptopic, helpext):
1376 try:
1375 try:
1377 f(name)
1376 f(name)
1378 i = None
1377 i = None
1379 break
1378 break
1380 except UnknownCommand, inst:
1379 except UnknownCommand, inst:
1381 i = inst
1380 i = inst
1382 if i:
1381 if i:
1383 raise i
1382 raise i
1384
1383
1385 else:
1384 else:
1386 # program name
1385 # program name
1387 if ui.verbose or with_version:
1386 if ui.verbose or with_version:
1388 version_(ui)
1387 version_(ui)
1389 else:
1388 else:
1390 ui.status(_("Mercurial Distributed SCM\n"))
1389 ui.status(_("Mercurial Distributed SCM\n"))
1391 ui.status('\n')
1390 ui.status('\n')
1392
1391
1393 # list of commands
1392 # list of commands
1394 if name == "shortlist":
1393 if name == "shortlist":
1395 ui.status(_('basic commands (use "hg help" '
1394 ui.status(_('basic commands (use "hg help" '
1396 'for the full list or option "-v" for details):\n\n'))
1395 'for the full list or option "-v" for details):\n\n'))
1397 elif ui.verbose:
1396 elif ui.verbose:
1398 ui.status(_('list of commands:\n\n'))
1397 ui.status(_('list of commands:\n\n'))
1399 else:
1398 else:
1400 ui.status(_('list of commands (use "hg help -v" '
1399 ui.status(_('list of commands (use "hg help -v" '
1401 'to show aliases and global options):\n\n'))
1400 'to show aliases and global options):\n\n'))
1402
1401
1403 helplist()
1402 helplist()
1404
1403
1405 # global options
1404 # global options
1406 if ui.verbose:
1405 if ui.verbose:
1407 option_lists.append(("global options", globalopts))
1406 option_lists.append(("global options", globalopts))
1408
1407
1409 # list all option lists
1408 # list all option lists
1410 opt_output = []
1409 opt_output = []
1411 for title, options in option_lists:
1410 for title, options in option_lists:
1412 opt_output.append(("\n%s:\n" % title, None))
1411 opt_output.append(("\n%s:\n" % title, None))
1413 for shortopt, longopt, default, desc in options:
1412 for shortopt, longopt, default, desc in options:
1414 if "DEPRECATED" in desc and not ui.verbose: continue
1413 if "DEPRECATED" in desc and not ui.verbose: continue
1415 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1414 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1416 longopt and " --%s" % longopt),
1415 longopt and " --%s" % longopt),
1417 "%s%s" % (desc,
1416 "%s%s" % (desc,
1418 default
1417 default
1419 and _(" (default: %s)") % default
1418 and _(" (default: %s)") % default
1420 or "")))
1419 or "")))
1421
1420
1422 if opt_output:
1421 if opt_output:
1423 opts_len = max([len(line[0]) for line in opt_output if line[1]])
1422 opts_len = max([len(line[0]) for line in opt_output if line[1]])
1424 for first, second in opt_output:
1423 for first, second in opt_output:
1425 if second:
1424 if second:
1426 ui.write(" %-*s %s\n" % (opts_len, first, second))
1425 ui.write(" %-*s %s\n" % (opts_len, first, second))
1427 else:
1426 else:
1428 ui.write("%s\n" % first)
1427 ui.write("%s\n" % first)
1429
1428
1430 def identify(ui, repo):
1429 def identify(ui, repo):
1431 """print information about the working copy
1430 """print information about the working copy
1432
1431
1433 Print a short summary of the current state of the repo.
1432 Print a short summary of the current state of the repo.
1434
1433
1435 This summary identifies the repository state using one or two parent
1434 This summary identifies the repository state using one or two parent
1436 hash identifiers, followed by a "+" if there are uncommitted changes
1435 hash identifiers, followed by a "+" if there are uncommitted changes
1437 in the working directory, followed by a list of tags for this revision.
1436 in the working directory, followed by a list of tags for this revision.
1438 """
1437 """
1439 parents = [p for p in repo.dirstate.parents() if p != nullid]
1438 parents = [p for p in repo.dirstate.parents() if p != nullid]
1440 if not parents:
1439 if not parents:
1441 ui.write(_("unknown\n"))
1440 ui.write(_("unknown\n"))
1442 return
1441 return
1443
1442
1444 hexfunc = ui.debugflag and hex or short
1443 hexfunc = ui.debugflag and hex or short
1445 modified, added, removed, deleted = repo.status()[:4]
1444 modified, added, removed, deleted = repo.status()[:4]
1446 output = ["%s%s" %
1445 output = ["%s%s" %
1447 ('+'.join([hexfunc(parent) for parent in parents]),
1446 ('+'.join([hexfunc(parent) for parent in parents]),
1448 (modified or added or removed or deleted) and "+" or "")]
1447 (modified or added or removed or deleted) and "+" or "")]
1449
1448
1450 if not ui.quiet:
1449 if not ui.quiet:
1451
1450
1452 branch = util.tolocal(repo.workingctx().branch())
1451 branch = util.tolocal(repo.workingctx().branch())
1453 if branch != 'default':
1452 if branch != 'default':
1454 output.append("(%s)" % branch)
1453 output.append("(%s)" % branch)
1455
1454
1456 # multiple tags for a single parent separated by '/'
1455 # multiple tags for a single parent separated by '/'
1457 parenttags = ['/'.join(tags)
1456 parenttags = ['/'.join(tags)
1458 for tags in map(repo.nodetags, parents) if tags]
1457 for tags in map(repo.nodetags, parents) if tags]
1459 # tags for multiple parents separated by ' + '
1458 # tags for multiple parents separated by ' + '
1460 if parenttags:
1459 if parenttags:
1461 output.append(' + '.join(parenttags))
1460 output.append(' + '.join(parenttags))
1462
1461
1463 ui.write("%s\n" % ' '.join(output))
1462 ui.write("%s\n" % ' '.join(output))
1464
1463
1465 def import_(ui, repo, patch1, *patches, **opts):
1464 def import_(ui, repo, patch1, *patches, **opts):
1466 """import an ordered set of patches
1465 """import an ordered set of patches
1467
1466
1468 Import a list of patches and commit them individually.
1467 Import a list of patches and commit them individually.
1469
1468
1470 If there are outstanding changes in the working directory, import
1469 If there are outstanding changes in the working directory, import
1471 will abort unless given the -f flag.
1470 will abort unless given the -f flag.
1472
1471
1473 You can import a patch straight from a mail message. Even patches
1472 You can import a patch straight from a mail message. Even patches
1474 as attachments work (body part must be type text/plain or
1473 as attachments work (body part must be type text/plain or
1475 text/x-patch to be used). From and Subject headers of email
1474 text/x-patch to be used). From and Subject headers of email
1476 message are used as default committer and commit message. All
1475 message are used as default committer and commit message. All
1477 text/plain body parts before first diff are added to commit
1476 text/plain body parts before first diff are added to commit
1478 message.
1477 message.
1479
1478
1480 If imported patch was generated by hg export, user and description
1479 If imported patch was generated by hg export, user and description
1481 from patch override values from message headers and body. Values
1480 from patch override values from message headers and body. Values
1482 given on command line with -m and -u override these.
1481 given on command line with -m and -u override these.
1483
1482
1484 To read a patch from standard input, use patch name "-".
1483 To read a patch from standard input, use patch name "-".
1485 """
1484 """
1486 patches = (patch1,) + patches
1485 patches = (patch1,) + patches
1487
1486
1488 if not opts['force']:
1487 if not opts['force']:
1489 bail_if_changed(repo)
1488 bail_if_changed(repo)
1490
1489
1491 d = opts["base"]
1490 d = opts["base"]
1492 strip = opts["strip"]
1491 strip = opts["strip"]
1493
1492
1494 wlock = repo.wlock()
1493 wlock = repo.wlock()
1495 lock = repo.lock()
1494 lock = repo.lock()
1496
1495
1497 for p in patches:
1496 for p in patches:
1498 pf = os.path.join(d, p)
1497 pf = os.path.join(d, p)
1499
1498
1500 if pf == '-':
1499 if pf == '-':
1501 ui.status(_("applying patch from stdin\n"))
1500 ui.status(_("applying patch from stdin\n"))
1502 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1501 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1503 else:
1502 else:
1504 ui.status(_("applying %s\n") % p)
1503 ui.status(_("applying %s\n") % p)
1505 tmpname, message, user, date = patch.extract(ui, file(pf))
1504 tmpname, message, user, date = patch.extract(ui, file(pf))
1506
1505
1507 if tmpname is None:
1506 if tmpname is None:
1508 raise util.Abort(_('no diffs found'))
1507 raise util.Abort(_('no diffs found'))
1509
1508
1510 try:
1509 try:
1511 cmdline_message = logmessage(opts)
1510 cmdline_message = logmessage(opts)
1512 if cmdline_message:
1511 if cmdline_message:
1513 # pickup the cmdline msg
1512 # pickup the cmdline msg
1514 message = cmdline_message
1513 message = cmdline_message
1515 elif message:
1514 elif message:
1516 # pickup the patch msg
1515 # pickup the patch msg
1517 message = message.strip()
1516 message = message.strip()
1518 else:
1517 else:
1519 # launch the editor
1518 # launch the editor
1520 message = None
1519 message = None
1521 ui.debug(_('message:\n%s\n') % message)
1520 ui.debug(_('message:\n%s\n') % message)
1522
1521
1523 files = {}
1522 files = {}
1524 try:
1523 try:
1525 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1524 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1526 files=files)
1525 files=files)
1527 finally:
1526 finally:
1528 files = patch.updatedir(ui, repo, files, wlock=wlock)
1527 files = patch.updatedir(ui, repo, files, wlock=wlock)
1529 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1528 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1530 finally:
1529 finally:
1531 os.unlink(tmpname)
1530 os.unlink(tmpname)
1532
1531
1533 def incoming(ui, repo, source="default", **opts):
1532 def incoming(ui, repo, source="default", **opts):
1534 """show new changesets found in source
1533 """show new changesets found in source
1535
1534
1536 Show new changesets found in the specified path/URL or the default
1535 Show new changesets found in the specified path/URL or the default
1537 pull location. These are the changesets that would be pulled if a pull
1536 pull location. These are the changesets that would be pulled if a pull
1538 was requested.
1537 was requested.
1539
1538
1540 For remote repository, using --bundle avoids downloading the changesets
1539 For remote repository, using --bundle avoids downloading the changesets
1541 twice if the incoming is followed by a pull.
1540 twice if the incoming is followed by a pull.
1542
1541
1543 See pull for valid source format details.
1542 See pull for valid source format details.
1544 """
1543 """
1545 source = ui.expandpath(source)
1544 source = ui.expandpath(source)
1546 setremoteconfig(ui, opts)
1545 setremoteconfig(ui, opts)
1547
1546
1548 other = hg.repository(ui, source)
1547 other = hg.repository(ui, source)
1549 ui.status(_('comparing with %s\n') % source)
1548 ui.status(_('comparing with %s\n') % source)
1550 incoming = repo.findincoming(other, force=opts["force"])
1549 incoming = repo.findincoming(other, force=opts["force"])
1551 if not incoming:
1550 if not incoming:
1552 try:
1551 try:
1553 os.unlink(opts["bundle"])
1552 os.unlink(opts["bundle"])
1554 except:
1553 except:
1555 pass
1554 pass
1556 ui.status(_("no changes found\n"))
1555 ui.status(_("no changes found\n"))
1557 return 1
1556 return 1
1558
1557
1559 cleanup = None
1558 cleanup = None
1560 try:
1559 try:
1561 fname = opts["bundle"]
1560 fname = opts["bundle"]
1562 if fname or not other.local():
1561 if fname or not other.local():
1563 # create a bundle (uncompressed if other repo is not local)
1562 # create a bundle (uncompressed if other repo is not local)
1564 cg = other.changegroup(incoming, "incoming")
1563 cg = other.changegroup(incoming, "incoming")
1565 bundletype = other.local() and "HG10BZ" or "HG10UN"
1564 bundletype = other.local() and "HG10BZ" or "HG10UN"
1566 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1565 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1567 # keep written bundle?
1566 # keep written bundle?
1568 if opts["bundle"]:
1567 if opts["bundle"]:
1569 cleanup = None
1568 cleanup = None
1570 if not other.local():
1569 if not other.local():
1571 # use the created uncompressed bundlerepo
1570 # use the created uncompressed bundlerepo
1572 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1571 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1573
1572
1574 revs = None
1573 revs = None
1575 if opts['rev']:
1574 if opts['rev']:
1576 revs = [other.lookup(rev) for rev in opts['rev']]
1575 revs = [other.lookup(rev) for rev in opts['rev']]
1577 o = other.changelog.nodesbetween(incoming, revs)[0]
1576 o = other.changelog.nodesbetween(incoming, revs)[0]
1578 if opts['newest_first']:
1577 if opts['newest_first']:
1579 o.reverse()
1578 o.reverse()
1580 displayer = cmdutil.show_changeset(ui, other, opts)
1579 displayer = cmdutil.show_changeset(ui, other, opts)
1581 for n in o:
1580 for n in o:
1582 parents = [p for p in other.changelog.parents(n) if p != nullid]
1581 parents = [p for p in other.changelog.parents(n) if p != nullid]
1583 if opts['no_merges'] and len(parents) == 2:
1582 if opts['no_merges'] and len(parents) == 2:
1584 continue
1583 continue
1585 displayer.show(changenode=n)
1584 displayer.show(changenode=n)
1586 finally:
1585 finally:
1587 if hasattr(other, 'close'):
1586 if hasattr(other, 'close'):
1588 other.close()
1587 other.close()
1589 if cleanup:
1588 if cleanup:
1590 os.unlink(cleanup)
1589 os.unlink(cleanup)
1591
1590
1592 def init(ui, dest=".", **opts):
1591 def init(ui, dest=".", **opts):
1593 """create a new repository in the given directory
1592 """create a new repository in the given directory
1594
1593
1595 Initialize a new repository in the given directory. If the given
1594 Initialize a new repository in the given directory. If the given
1596 directory does not exist, it is created.
1595 directory does not exist, it is created.
1597
1596
1598 If no directory is given, the current directory is used.
1597 If no directory is given, the current directory is used.
1599
1598
1600 It is possible to specify an ssh:// URL as the destination.
1599 It is possible to specify an ssh:// URL as the destination.
1601 Look at the help text for the pull command for important details
1600 Look at the help text for the pull command for important details
1602 about ssh:// URLs.
1601 about ssh:// URLs.
1603 """
1602 """
1604 setremoteconfig(ui, opts)
1603 setremoteconfig(ui, opts)
1605 hg.repository(ui, dest, create=1)
1604 hg.repository(ui, dest, create=1)
1606
1605
1607 def locate(ui, repo, *pats, **opts):
1606 def locate(ui, repo, *pats, **opts):
1608 """locate files matching specific patterns
1607 """locate files matching specific patterns
1609
1608
1610 Print all files under Mercurial control whose names match the
1609 Print all files under Mercurial control whose names match the
1611 given patterns.
1610 given patterns.
1612
1611
1613 This command searches the entire repository by default. To search
1612 This command searches the entire repository by default. To search
1614 just the current directory and its subdirectories, use "--include .".
1613 just the current directory and its subdirectories, use "--include .".
1615
1614
1616 If no patterns are given to match, this command prints all file
1615 If no patterns are given to match, this command prints all file
1617 names.
1616 names.
1618
1617
1619 If you want to feed the output of this command into the "xargs"
1618 If you want to feed the output of this command into the "xargs"
1620 command, use the "-0" option to both this command and "xargs".
1619 command, use the "-0" option to both this command and "xargs".
1621 This will avoid the problem of "xargs" treating single filenames
1620 This will avoid the problem of "xargs" treating single filenames
1622 that contain white space as multiple filenames.
1621 that contain white space as multiple filenames.
1623 """
1622 """
1624 end = opts['print0'] and '\0' or '\n'
1623 end = opts['print0'] and '\0' or '\n'
1625 rev = opts['rev']
1624 rev = opts['rev']
1626 if rev:
1625 if rev:
1627 node = repo.lookup(rev)
1626 node = repo.lookup(rev)
1628 else:
1627 else:
1629 node = None
1628 node = None
1630
1629
1631 ret = 1
1630 ret = 1
1632 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1631 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1633 default='relglob'):
1632 default='relglob'):
1634 if not node and repo.dirstate.state(abs) == '?':
1633 if not node and repo.dirstate.state(abs) == '?':
1635 continue
1634 continue
1636 if opts['fullpath']:
1635 if opts['fullpath']:
1637 ui.write(os.path.join(repo.root, abs), end)
1636 ui.write(os.path.join(repo.root, abs), end)
1638 else:
1637 else:
1639 ui.write(((pats and rel) or abs), end)
1638 ui.write(((pats and rel) or abs), end)
1640 ret = 0
1639 ret = 0
1641
1640
1642 return ret
1641 return ret
1643
1642
1644 def log(ui, repo, *pats, **opts):
1643 def log(ui, repo, *pats, **opts):
1645 """show revision history of entire repository or files
1644 """show revision history of entire repository or files
1646
1645
1647 Print the revision history of the specified files or the entire
1646 Print the revision history of the specified files or the entire
1648 project.
1647 project.
1649
1648
1650 File history is shown without following rename or copy history of
1649 File history is shown without following rename or copy history of
1651 files. Use -f/--follow with a file name to follow history across
1650 files. Use -f/--follow with a file name to follow history across
1652 renames and copies. --follow without a file name will only show
1651 renames and copies. --follow without a file name will only show
1653 ancestors or descendants of the starting revision. --follow-first
1652 ancestors or descendants of the starting revision. --follow-first
1654 only follows the first parent of merge revisions.
1653 only follows the first parent of merge revisions.
1655
1654
1656 If no revision range is specified, the default is tip:0 unless
1655 If no revision range is specified, the default is tip:0 unless
1657 --follow is set, in which case the working directory parent is
1656 --follow is set, in which case the working directory parent is
1658 used as the starting revision.
1657 used as the starting revision.
1659
1658
1660 By default this command outputs: changeset id and hash, tags,
1659 By default this command outputs: changeset id and hash, tags,
1661 non-trivial parents, user, date and time, and a summary for each
1660 non-trivial parents, user, date and time, and a summary for each
1662 commit. When the -v/--verbose switch is used, the list of changed
1661 commit. When the -v/--verbose switch is used, the list of changed
1663 files and full commit message is shown.
1662 files and full commit message is shown.
1664
1663
1665 NOTE: log -p may generate unexpected diff output for merge
1664 NOTE: log -p may generate unexpected diff output for merge
1666 changesets, as it will compare the merge changeset against its
1665 changesets, as it will compare the merge changeset against its
1667 first parent only. Also, the files: list will only reflect files
1666 first parent only. Also, the files: list will only reflect files
1668 that are different from BOTH parents.
1667 that are different from BOTH parents.
1669
1668
1670 """
1669 """
1671
1670
1672 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1671 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1673 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1672 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1674
1673
1675 if opts['limit']:
1674 if opts['limit']:
1676 try:
1675 try:
1677 limit = int(opts['limit'])
1676 limit = int(opts['limit'])
1678 except ValueError:
1677 except ValueError:
1679 raise util.Abort(_('limit must be a positive integer'))
1678 raise util.Abort(_('limit must be a positive integer'))
1680 if limit <= 0: raise util.Abort(_('limit must be positive'))
1679 if limit <= 0: raise util.Abort(_('limit must be positive'))
1681 else:
1680 else:
1682 limit = sys.maxint
1681 limit = sys.maxint
1683 count = 0
1682 count = 0
1684
1683
1685 if opts['copies'] and opts['rev']:
1684 if opts['copies'] and opts['rev']:
1686 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1685 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1687 else:
1686 else:
1688 endrev = repo.changelog.count()
1687 endrev = repo.changelog.count()
1689 rcache = {}
1688 rcache = {}
1690 ncache = {}
1689 ncache = {}
1691 dcache = []
1690 dcache = []
1692 def getrenamed(fn, rev, man):
1691 def getrenamed(fn, rev, man):
1693 '''looks up all renames for a file (up to endrev) the first
1692 '''looks up all renames for a file (up to endrev) the first
1694 time the file is given. It indexes on the changerev and only
1693 time the file is given. It indexes on the changerev and only
1695 parses the manifest if linkrev != changerev.
1694 parses the manifest if linkrev != changerev.
1696 Returns rename info for fn at changerev rev.'''
1695 Returns rename info for fn at changerev rev.'''
1697 if fn not in rcache:
1696 if fn not in rcache:
1698 rcache[fn] = {}
1697 rcache[fn] = {}
1699 ncache[fn] = {}
1698 ncache[fn] = {}
1700 fl = repo.file(fn)
1699 fl = repo.file(fn)
1701 for i in xrange(fl.count()):
1700 for i in xrange(fl.count()):
1702 node = fl.node(i)
1701 node = fl.node(i)
1703 lr = fl.linkrev(node)
1702 lr = fl.linkrev(node)
1704 renamed = fl.renamed(node)
1703 renamed = fl.renamed(node)
1705 rcache[fn][lr] = renamed
1704 rcache[fn][lr] = renamed
1706 if renamed:
1705 if renamed:
1707 ncache[fn][node] = renamed
1706 ncache[fn][node] = renamed
1708 if lr >= endrev:
1707 if lr >= endrev:
1709 break
1708 break
1710 if rev in rcache[fn]:
1709 if rev in rcache[fn]:
1711 return rcache[fn][rev]
1710 return rcache[fn][rev]
1712 mr = repo.manifest.rev(man)
1711 mr = repo.manifest.rev(man)
1713 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1712 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1714 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1713 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1715 if not dcache or dcache[0] != man:
1714 if not dcache or dcache[0] != man:
1716 dcache[:] = [man, repo.manifest.readdelta(man)]
1715 dcache[:] = [man, repo.manifest.readdelta(man)]
1717 if fn in dcache[1]:
1716 if fn in dcache[1]:
1718 return ncache[fn].get(dcache[1][fn])
1717 return ncache[fn].get(dcache[1][fn])
1719 return None
1718 return None
1720
1719
1721 df = False
1720 df = False
1722 if opts["date"]:
1721 if opts["date"]:
1723 df = util.matchdate(opts["date"])
1722 df = util.matchdate(opts["date"])
1724
1723
1725 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1724 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1726 for st, rev, fns in changeiter:
1725 for st, rev, fns in changeiter:
1727 if st == 'add':
1726 if st == 'add':
1728 changenode = repo.changelog.node(rev)
1727 changenode = repo.changelog.node(rev)
1729 parents = [p for p in repo.changelog.parentrevs(rev)
1728 parents = [p for p in repo.changelog.parentrevs(rev)
1730 if p != nullrev]
1729 if p != nullrev]
1731 if opts['no_merges'] and len(parents) == 2:
1730 if opts['no_merges'] and len(parents) == 2:
1732 continue
1731 continue
1733 if opts['only_merges'] and len(parents) != 2:
1732 if opts['only_merges'] and len(parents) != 2:
1734 continue
1733 continue
1735
1734
1736 if df:
1735 if df:
1737 changes = get(rev)
1736 changes = get(rev)
1738 if not df(changes[2][0]):
1737 if not df(changes[2][0]):
1739 continue
1738 continue
1740
1739
1741 if opts['keyword']:
1740 if opts['keyword']:
1742 changes = get(rev)
1741 changes = get(rev)
1743 miss = 0
1742 miss = 0
1744 for k in [kw.lower() for kw in opts['keyword']]:
1743 for k in [kw.lower() for kw in opts['keyword']]:
1745 if not (k in changes[1].lower() or
1744 if not (k in changes[1].lower() or
1746 k in changes[4].lower() or
1745 k in changes[4].lower() or
1747 k in " ".join(changes[3][:20]).lower()):
1746 k in " ".join(changes[3][:20]).lower()):
1748 miss = 1
1747 miss = 1
1749 break
1748 break
1750 if miss:
1749 if miss:
1751 continue
1750 continue
1752
1751
1753 copies = []
1752 copies = []
1754 if opts.get('copies') and rev:
1753 if opts.get('copies') and rev:
1755 mf = get(rev)[0]
1754 mf = get(rev)[0]
1756 for fn in get(rev)[3]:
1755 for fn in get(rev)[3]:
1757 rename = getrenamed(fn, rev, mf)
1756 rename = getrenamed(fn, rev, mf)
1758 if rename:
1757 if rename:
1759 copies.append((fn, rename[0]))
1758 copies.append((fn, rename[0]))
1760 displayer.show(rev, changenode, copies=copies)
1759 displayer.show(rev, changenode, copies=copies)
1761 elif st == 'iter':
1760 elif st == 'iter':
1762 if count == limit: break
1761 if count == limit: break
1763 if displayer.flush(rev):
1762 if displayer.flush(rev):
1764 count += 1
1763 count += 1
1765
1764
1766 def manifest(ui, repo, rev=None):
1765 def manifest(ui, repo, rev=None):
1767 """output the current or given revision of the project manifest
1766 """output the current or given revision of the project manifest
1768
1767
1769 Print a list of version controlled files for the given revision.
1768 Print a list of version controlled files for the given revision.
1770 If no revision is given, the parent of the working directory is used,
1769 If no revision is given, the parent of the working directory is used,
1771 or tip if no revision is checked out.
1770 or tip if no revision is checked out.
1772
1771
1773 The manifest is the list of files being version controlled. If no revision
1772 The manifest is the list of files being version controlled. If no revision
1774 is given then the first parent of the working directory is used.
1773 is given then the first parent of the working directory is used.
1775
1774
1776 With -v flag, print file permissions. With --debug flag, print
1775 With -v flag, print file permissions. With --debug flag, print
1777 file revision hashes.
1776 file revision hashes.
1778 """
1777 """
1779
1778
1780 m = repo.changectx(rev).manifest()
1779 m = repo.changectx(rev).manifest()
1781 files = m.keys()
1780 files = m.keys()
1782 files.sort()
1781 files.sort()
1783
1782
1784 for f in files:
1783 for f in files:
1785 if ui.debugflag:
1784 if ui.debugflag:
1786 ui.write("%40s " % hex(m[f]))
1785 ui.write("%40s " % hex(m[f]))
1787 if ui.verbose:
1786 if ui.verbose:
1788 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1787 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1789 ui.write("%s\n" % f)
1788 ui.write("%s\n" % f)
1790
1789
1791 def merge(ui, repo, node=None, force=None):
1790 def merge(ui, repo, node=None, force=None):
1792 """merge working directory with another revision
1791 """merge working directory with another revision
1793
1792
1794 Merge the contents of the current working directory and the
1793 Merge the contents of the current working directory and the
1795 requested revision. Files that changed between either parent are
1794 requested revision. Files that changed between either parent are
1796 marked as changed for the next commit and a commit must be
1795 marked as changed for the next commit and a commit must be
1797 performed before any further updates are allowed.
1796 performed before any further updates are allowed.
1798
1797
1799 If no revision is specified, the working directory's parent is a
1798 If no revision is specified, the working directory's parent is a
1800 head revision, and the repository contains exactly one other head,
1799 head revision, and the repository contains exactly one other head,
1801 the other head is merged with by default. Otherwise, an explicit
1800 the other head is merged with by default. Otherwise, an explicit
1802 revision to merge with must be provided.
1801 revision to merge with must be provided.
1803 """
1802 """
1804
1803
1805 if not node:
1804 if not node:
1806 heads = repo.heads()
1805 heads = repo.heads()
1807 if len(heads) > 2:
1806 if len(heads) > 2:
1808 raise util.Abort(_('repo has %d heads - '
1807 raise util.Abort(_('repo has %d heads - '
1809 'please merge with an explicit rev') %
1808 'please merge with an explicit rev') %
1810 len(heads))
1809 len(heads))
1811 if len(heads) == 1:
1810 if len(heads) == 1:
1812 raise util.Abort(_('there is nothing to merge - '
1811 raise util.Abort(_('there is nothing to merge - '
1813 'use "hg update" instead'))
1812 'use "hg update" instead'))
1814 parent = repo.dirstate.parents()[0]
1813 parent = repo.dirstate.parents()[0]
1815 if parent not in heads:
1814 if parent not in heads:
1816 raise util.Abort(_('working dir not at a head rev - '
1815 raise util.Abort(_('working dir not at a head rev - '
1817 'use "hg update" or merge with an explicit rev'))
1816 'use "hg update" or merge with an explicit rev'))
1818 node = parent == heads[0] and heads[-1] or heads[0]
1817 node = parent == heads[0] and heads[-1] or heads[0]
1819 return hg.merge(repo, node, force=force)
1818 return hg.merge(repo, node, force=force)
1820
1819
1821 def outgoing(ui, repo, dest=None, **opts):
1820 def outgoing(ui, repo, dest=None, **opts):
1822 """show changesets not found in destination
1821 """show changesets not found in destination
1823
1822
1824 Show changesets not found in the specified destination repository or
1823 Show changesets not found in the specified destination repository or
1825 the default push location. These are the changesets that would be pushed
1824 the default push location. These are the changesets that would be pushed
1826 if a push was requested.
1825 if a push was requested.
1827
1826
1828 See pull for valid destination format details.
1827 See pull for valid destination format details.
1829 """
1828 """
1830 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1829 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1831 setremoteconfig(ui, opts)
1830 setremoteconfig(ui, opts)
1832 revs = None
1831 revs = None
1833 if opts['rev']:
1832 if opts['rev']:
1834 revs = [repo.lookup(rev) for rev in opts['rev']]
1833 revs = [repo.lookup(rev) for rev in opts['rev']]
1835
1834
1836 other = hg.repository(ui, dest)
1835 other = hg.repository(ui, dest)
1837 ui.status(_('comparing with %s\n') % dest)
1836 ui.status(_('comparing with %s\n') % dest)
1838 o = repo.findoutgoing(other, force=opts['force'])
1837 o = repo.findoutgoing(other, force=opts['force'])
1839 if not o:
1838 if not o:
1840 ui.status(_("no changes found\n"))
1839 ui.status(_("no changes found\n"))
1841 return 1
1840 return 1
1842 o = repo.changelog.nodesbetween(o, revs)[0]
1841 o = repo.changelog.nodesbetween(o, revs)[0]
1843 if opts['newest_first']:
1842 if opts['newest_first']:
1844 o.reverse()
1843 o.reverse()
1845 displayer = cmdutil.show_changeset(ui, repo, opts)
1844 displayer = cmdutil.show_changeset(ui, repo, opts)
1846 for n in o:
1845 for n in o:
1847 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1846 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1848 if opts['no_merges'] and len(parents) == 2:
1847 if opts['no_merges'] and len(parents) == 2:
1849 continue
1848 continue
1850 displayer.show(changenode=n)
1849 displayer.show(changenode=n)
1851
1850
1852 def parents(ui, repo, file_=None, **opts):
1851 def parents(ui, repo, file_=None, **opts):
1853 """show the parents of the working dir or revision
1852 """show the parents of the working dir or revision
1854
1853
1855 Print the working directory's parent revisions.
1854 Print the working directory's parent revisions.
1856 """
1855 """
1857 rev = opts.get('rev')
1856 rev = opts.get('rev')
1858 if rev:
1857 if rev:
1859 if file_:
1858 if file_:
1860 ctx = repo.filectx(file_, changeid=rev)
1859 ctx = repo.filectx(file_, changeid=rev)
1861 else:
1860 else:
1862 ctx = repo.changectx(rev)
1861 ctx = repo.changectx(rev)
1863 p = [cp.node() for cp in ctx.parents()]
1862 p = [cp.node() for cp in ctx.parents()]
1864 else:
1863 else:
1865 p = repo.dirstate.parents()
1864 p = repo.dirstate.parents()
1866
1865
1867 displayer = cmdutil.show_changeset(ui, repo, opts)
1866 displayer = cmdutil.show_changeset(ui, repo, opts)
1868 for n in p:
1867 for n in p:
1869 if n != nullid:
1868 if n != nullid:
1870 displayer.show(changenode=n)
1869 displayer.show(changenode=n)
1871
1870
1872 def paths(ui, repo, search=None):
1871 def paths(ui, repo, search=None):
1873 """show definition of symbolic path names
1872 """show definition of symbolic path names
1874
1873
1875 Show definition of symbolic path name NAME. If no name is given, show
1874 Show definition of symbolic path name NAME. If no name is given, show
1876 definition of available names.
1875 definition of available names.
1877
1876
1878 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1877 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1879 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1878 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1880 """
1879 """
1881 if search:
1880 if search:
1882 for name, path in ui.configitems("paths"):
1881 for name, path in ui.configitems("paths"):
1883 if name == search:
1882 if name == search:
1884 ui.write("%s\n" % path)
1883 ui.write("%s\n" % path)
1885 return
1884 return
1886 ui.warn(_("not found!\n"))
1885 ui.warn(_("not found!\n"))
1887 return 1
1886 return 1
1888 else:
1887 else:
1889 for name, path in ui.configitems("paths"):
1888 for name, path in ui.configitems("paths"):
1890 ui.write("%s = %s\n" % (name, path))
1889 ui.write("%s = %s\n" % (name, path))
1891
1890
1892 def postincoming(ui, repo, modheads, optupdate):
1891 def postincoming(ui, repo, modheads, optupdate):
1893 if modheads == 0:
1892 if modheads == 0:
1894 return
1893 return
1895 if optupdate:
1894 if optupdate:
1896 if modheads == 1:
1895 if modheads == 1:
1897 return hg.update(repo, repo.changelog.tip()) # update
1896 return hg.update(repo, repo.changelog.tip()) # update
1898 else:
1897 else:
1899 ui.status(_("not updating, since new heads added\n"))
1898 ui.status(_("not updating, since new heads added\n"))
1900 if modheads > 1:
1899 if modheads > 1:
1901 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1900 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1902 else:
1901 else:
1903 ui.status(_("(run 'hg update' to get a working copy)\n"))
1902 ui.status(_("(run 'hg update' to get a working copy)\n"))
1904
1903
1905 def pull(ui, repo, source="default", **opts):
1904 def pull(ui, repo, source="default", **opts):
1906 """pull changes from the specified source
1905 """pull changes from the specified source
1907
1906
1908 Pull changes from a remote repository to a local one.
1907 Pull changes from a remote repository to a local one.
1909
1908
1910 This finds all changes from the repository at the specified path
1909 This finds all changes from the repository at the specified path
1911 or URL and adds them to the local repository. By default, this
1910 or URL and adds them to the local repository. By default, this
1912 does not update the copy of the project in the working directory.
1911 does not update the copy of the project in the working directory.
1913
1912
1914 Valid URLs are of the form:
1913 Valid URLs are of the form:
1915
1914
1916 local/filesystem/path (or file://local/filesystem/path)
1915 local/filesystem/path (or file://local/filesystem/path)
1917 http://[user@]host[:port]/[path]
1916 http://[user@]host[:port]/[path]
1918 https://[user@]host[:port]/[path]
1917 https://[user@]host[:port]/[path]
1919 ssh://[user@]host[:port]/[path]
1918 ssh://[user@]host[:port]/[path]
1920 static-http://host[:port]/[path]
1919 static-http://host[:port]/[path]
1921
1920
1922 Paths in the local filesystem can either point to Mercurial
1921 Paths in the local filesystem can either point to Mercurial
1923 repositories or to bundle files (as created by 'hg bundle' or
1922 repositories or to bundle files (as created by 'hg bundle' or
1924 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1923 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1925 allows access to a Mercurial repository where you simply use a web
1924 allows access to a Mercurial repository where you simply use a web
1926 server to publish the .hg directory as static content.
1925 server to publish the .hg directory as static content.
1927
1926
1928 Some notes about using SSH with Mercurial:
1927 Some notes about using SSH with Mercurial:
1929 - SSH requires an accessible shell account on the destination machine
1928 - SSH requires an accessible shell account on the destination machine
1930 and a copy of hg in the remote path or specified with as remotecmd.
1929 and a copy of hg in the remote path or specified with as remotecmd.
1931 - path is relative to the remote user's home directory by default.
1930 - path is relative to the remote user's home directory by default.
1932 Use an extra slash at the start of a path to specify an absolute path:
1931 Use an extra slash at the start of a path to specify an absolute path:
1933 ssh://example.com//tmp/repository
1932 ssh://example.com//tmp/repository
1934 - Mercurial doesn't use its own compression via SSH; the right thing
1933 - Mercurial doesn't use its own compression via SSH; the right thing
1935 to do is to configure it in your ~/.ssh/config, e.g.:
1934 to do is to configure it in your ~/.ssh/config, e.g.:
1936 Host *.mylocalnetwork.example.com
1935 Host *.mylocalnetwork.example.com
1937 Compression no
1936 Compression no
1938 Host *
1937 Host *
1939 Compression yes
1938 Compression yes
1940 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1939 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1941 with the --ssh command line option.
1940 with the --ssh command line option.
1942 """
1941 """
1943 source = ui.expandpath(source)
1942 source = ui.expandpath(source)
1944 setremoteconfig(ui, opts)
1943 setremoteconfig(ui, opts)
1945
1944
1946 other = hg.repository(ui, source)
1945 other = hg.repository(ui, source)
1947 ui.status(_('pulling from %s\n') % (source))
1946 ui.status(_('pulling from %s\n') % (source))
1948 revs = None
1947 revs = None
1949 if opts['rev']:
1948 if opts['rev']:
1950 if 'lookup' in other.capabilities:
1949 if 'lookup' in other.capabilities:
1951 revs = [other.lookup(rev) for rev in opts['rev']]
1950 revs = [other.lookup(rev) for rev in opts['rev']]
1952 else:
1951 else:
1953 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1952 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1954 raise util.Abort(error)
1953 raise util.Abort(error)
1955 modheads = repo.pull(other, heads=revs, force=opts['force'])
1954 modheads = repo.pull(other, heads=revs, force=opts['force'])
1956 return postincoming(ui, repo, modheads, opts['update'])
1955 return postincoming(ui, repo, modheads, opts['update'])
1957
1956
1958 def push(ui, repo, dest=None, **opts):
1957 def push(ui, repo, dest=None, **opts):
1959 """push changes to the specified destination
1958 """push changes to the specified destination
1960
1959
1961 Push changes from the local repository to the given destination.
1960 Push changes from the local repository to the given destination.
1962
1961
1963 This is the symmetrical operation for pull. It helps to move
1962 This is the symmetrical operation for pull. It helps to move
1964 changes from the current repository to a different one. If the
1963 changes from the current repository to a different one. If the
1965 destination is local this is identical to a pull in that directory
1964 destination is local this is identical to a pull in that directory
1966 from the current one.
1965 from the current one.
1967
1966
1968 By default, push will refuse to run if it detects the result would
1967 By default, push will refuse to run if it detects the result would
1969 increase the number of remote heads. This generally indicates the
1968 increase the number of remote heads. This generally indicates the
1970 the client has forgotten to sync and merge before pushing.
1969 the client has forgotten to sync and merge before pushing.
1971
1970
1972 Valid URLs are of the form:
1971 Valid URLs are of the form:
1973
1972
1974 local/filesystem/path (or file://local/filesystem/path)
1973 local/filesystem/path (or file://local/filesystem/path)
1975 ssh://[user@]host[:port]/[path]
1974 ssh://[user@]host[:port]/[path]
1976 http://[user@]host[:port]/[path]
1975 http://[user@]host[:port]/[path]
1977 https://[user@]host[:port]/[path]
1976 https://[user@]host[:port]/[path]
1978
1977
1979 Look at the help text for the pull command for important details
1978 Look at the help text for the pull command for important details
1980 about ssh:// URLs.
1979 about ssh:// URLs.
1981
1980
1982 Pushing to http:// and https:// URLs is only possible, if this
1981 Pushing to http:// and https:// URLs is only possible, if this
1983 feature is explicitly enabled on the remote Mercurial server.
1982 feature is explicitly enabled on the remote Mercurial server.
1984 """
1983 """
1985 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1984 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1986 setremoteconfig(ui, opts)
1985 setremoteconfig(ui, opts)
1987
1986
1988 other = hg.repository(ui, dest)
1987 other = hg.repository(ui, dest)
1989 ui.status('pushing to %s\n' % (dest))
1988 ui.status('pushing to %s\n' % (dest))
1990 revs = None
1989 revs = None
1991 if opts['rev']:
1990 if opts['rev']:
1992 revs = [repo.lookup(rev) for rev in opts['rev']]
1991 revs = [repo.lookup(rev) for rev in opts['rev']]
1993 r = repo.push(other, opts['force'], revs=revs)
1992 r = repo.push(other, opts['force'], revs=revs)
1994 return r == 0
1993 return r == 0
1995
1994
1996 def rawcommit(ui, repo, *pats, **opts):
1995 def rawcommit(ui, repo, *pats, **opts):
1997 """raw commit interface (DEPRECATED)
1996 """raw commit interface (DEPRECATED)
1998
1997
1999 (DEPRECATED)
1998 (DEPRECATED)
2000 Lowlevel commit, for use in helper scripts.
1999 Lowlevel commit, for use in helper scripts.
2001
2000
2002 This command is not intended to be used by normal users, as it is
2001 This command is not intended to be used by normal users, as it is
2003 primarily useful for importing from other SCMs.
2002 primarily useful for importing from other SCMs.
2004
2003
2005 This command is now deprecated and will be removed in a future
2004 This command is now deprecated and will be removed in a future
2006 release, please use debugsetparents and commit instead.
2005 release, please use debugsetparents and commit instead.
2007 """
2006 """
2008
2007
2009 ui.warn(_("(the rawcommit command is deprecated)\n"))
2008 ui.warn(_("(the rawcommit command is deprecated)\n"))
2010
2009
2011 message = logmessage(opts)
2010 message = logmessage(opts)
2012
2011
2013 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2012 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2014 if opts['files']:
2013 if opts['files']:
2015 files += open(opts['files']).read().splitlines()
2014 files += open(opts['files']).read().splitlines()
2016
2015
2017 parents = [repo.lookup(p) for p in opts['parent']]
2016 parents = [repo.lookup(p) for p in opts['parent']]
2018
2017
2019 try:
2018 try:
2020 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2019 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2021 except ValueError, inst:
2020 except ValueError, inst:
2022 raise util.Abort(str(inst))
2021 raise util.Abort(str(inst))
2023
2022
2024 def recover(ui, repo):
2023 def recover(ui, repo):
2025 """roll back an interrupted transaction
2024 """roll back an interrupted transaction
2026
2025
2027 Recover from an interrupted commit or pull.
2026 Recover from an interrupted commit or pull.
2028
2027
2029 This command tries to fix the repository status after an interrupted
2028 This command tries to fix the repository status after an interrupted
2030 operation. It should only be necessary when Mercurial suggests it.
2029 operation. It should only be necessary when Mercurial suggests it.
2031 """
2030 """
2032 if repo.recover():
2031 if repo.recover():
2033 return hg.verify(repo)
2032 return hg.verify(repo)
2034 return 1
2033 return 1
2035
2034
2036 def remove(ui, repo, *pats, **opts):
2035 def remove(ui, repo, *pats, **opts):
2037 """remove the specified files on the next commit
2036 """remove the specified files on the next commit
2038
2037
2039 Schedule the indicated files for removal from the repository.
2038 Schedule the indicated files for removal from the repository.
2040
2039
2041 This only removes files from the current branch, not from the
2040 This only removes files from the current branch, not from the
2042 entire project history. If the files still exist in the working
2041 entire project history. If the files still exist in the working
2043 directory, they will be deleted from it. If invoked with --after,
2042 directory, they will be deleted from it. If invoked with --after,
2044 files that have been manually deleted are marked as removed.
2043 files that have been manually deleted are marked as removed.
2045
2044
2046 This command schedules the files to be removed at the next commit.
2045 This command schedules the files to be removed at the next commit.
2047 To undo a remove before that, see hg revert.
2046 To undo a remove before that, see hg revert.
2048
2047
2049 Modified files and added files are not removed by default. To
2048 Modified files and added files are not removed by default. To
2050 remove them, use the -f/--force option.
2049 remove them, use the -f/--force option.
2051 """
2050 """
2052 names = []
2051 names = []
2053 if not opts['after'] and not pats:
2052 if not opts['after'] and not pats:
2054 raise util.Abort(_('no files specified'))
2053 raise util.Abort(_('no files specified'))
2055 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2054 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2056 exact = dict.fromkeys(files)
2055 exact = dict.fromkeys(files)
2057 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2056 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2058 modified, added, removed, deleted, unknown = mardu
2057 modified, added, removed, deleted, unknown = mardu
2059 remove, forget = [], []
2058 remove, forget = [], []
2060 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2059 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2061 reason = None
2060 reason = None
2062 if abs not in deleted and opts['after']:
2061 if abs not in deleted and opts['after']:
2063 reason = _('is still present')
2062 reason = _('is still present')
2064 elif abs in modified and not opts['force']:
2063 elif abs in modified and not opts['force']:
2065 reason = _('is modified (use -f to force removal)')
2064 reason = _('is modified (use -f to force removal)')
2066 elif abs in added:
2065 elif abs in added:
2067 if opts['force']:
2066 if opts['force']:
2068 forget.append(abs)
2067 forget.append(abs)
2069 continue
2068 continue
2070 reason = _('has been marked for add (use -f to force removal)')
2069 reason = _('has been marked for add (use -f to force removal)')
2071 elif abs in unknown:
2070 elif abs in unknown:
2072 reason = _('is not managed')
2071 reason = _('is not managed')
2073 elif abs in removed:
2072 elif abs in removed:
2074 continue
2073 continue
2075 if reason:
2074 if reason:
2076 if exact:
2075 if exact:
2077 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2076 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2078 else:
2077 else:
2079 if ui.verbose or not exact:
2078 if ui.verbose or not exact:
2080 ui.status(_('removing %s\n') % rel)
2079 ui.status(_('removing %s\n') % rel)
2081 remove.append(abs)
2080 remove.append(abs)
2082 repo.forget(forget)
2081 repo.forget(forget)
2083 repo.remove(remove, unlink=not opts['after'])
2082 repo.remove(remove, unlink=not opts['after'])
2084
2083
2085 def rename(ui, repo, *pats, **opts):
2084 def rename(ui, repo, *pats, **opts):
2086 """rename files; equivalent of copy + remove
2085 """rename files; equivalent of copy + remove
2087
2086
2088 Mark dest as copies of sources; mark sources for deletion. If
2087 Mark dest as copies of sources; mark sources for deletion. If
2089 dest is a directory, copies are put in that directory. If dest is
2088 dest is a directory, copies are put in that directory. If dest is
2090 a file, there can only be one source.
2089 a file, there can only be one source.
2091
2090
2092 By default, this command copies the contents of files as they
2091 By default, this command copies the contents of files as they
2093 stand in the working directory. If invoked with --after, the
2092 stand in the working directory. If invoked with --after, the
2094 operation is recorded, but no copying is performed.
2093 operation is recorded, but no copying is performed.
2095
2094
2096 This command takes effect in the next commit. To undo a rename
2095 This command takes effect in the next commit. To undo a rename
2097 before that, see hg revert.
2096 before that, see hg revert.
2098 """
2097 """
2099 wlock = repo.wlock(0)
2098 wlock = repo.wlock(0)
2100 errs, copied = docopy(ui, repo, pats, opts, wlock)
2099 errs, copied = docopy(ui, repo, pats, opts, wlock)
2101 names = []
2100 names = []
2102 for abs, rel, exact in copied:
2101 for abs, rel, exact in copied:
2103 if ui.verbose or not exact:
2102 if ui.verbose or not exact:
2104 ui.status(_('removing %s\n') % rel)
2103 ui.status(_('removing %s\n') % rel)
2105 names.append(abs)
2104 names.append(abs)
2106 if not opts.get('dry_run'):
2105 if not opts.get('dry_run'):
2107 repo.remove(names, True, wlock)
2106 repo.remove(names, True, wlock)
2108 return errs
2107 return errs
2109
2108
2110 def revert(ui, repo, *pats, **opts):
2109 def revert(ui, repo, *pats, **opts):
2111 """revert files or dirs to their states as of some revision
2110 """revert files or dirs to their states as of some revision
2112
2111
2113 With no revision specified, revert the named files or directories
2112 With no revision specified, revert the named files or directories
2114 to the contents they had in the parent of the working directory.
2113 to the contents they had in the parent of the working directory.
2115 This restores the contents of the affected files to an unmodified
2114 This restores the contents of the affected files to an unmodified
2116 state and unschedules adds, removes, copies, and renames. If the
2115 state and unschedules adds, removes, copies, and renames. If the
2117 working directory has two parents, you must explicitly specify the
2116 working directory has two parents, you must explicitly specify the
2118 revision to revert to.
2117 revision to revert to.
2119
2118
2120 Modified files are saved with a .orig suffix before reverting.
2119 Modified files are saved with a .orig suffix before reverting.
2121 To disable these backups, use --no-backup.
2120 To disable these backups, use --no-backup.
2122
2121
2123 Using the -r option, revert the given files or directories to their
2122 Using the -r option, revert the given files or directories to their
2124 contents as of a specific revision. This can be helpful to "roll
2123 contents as of a specific revision. This can be helpful to "roll
2125 back" some or all of a change that should not have been committed.
2124 back" some or all of a change that should not have been committed.
2126
2125
2127 Revert modifies the working directory. It does not commit any
2126 Revert modifies the working directory. It does not commit any
2128 changes, or change the parent of the working directory. If you
2127 changes, or change the parent of the working directory. If you
2129 revert to a revision other than the parent of the working
2128 revert to a revision other than the parent of the working
2130 directory, the reverted files will thus appear modified
2129 directory, the reverted files will thus appear modified
2131 afterwards.
2130 afterwards.
2132
2131
2133 If a file has been deleted, it is recreated. If the executable
2132 If a file has been deleted, it is recreated. If the executable
2134 mode of a file was changed, it is reset.
2133 mode of a file was changed, it is reset.
2135
2134
2136 If names are given, all files matching the names are reverted.
2135 If names are given, all files matching the names are reverted.
2137
2136
2138 If no arguments are given, no files are reverted.
2137 If no arguments are given, no files are reverted.
2139 """
2138 """
2140
2139
2141 if opts["date"]:
2140 if opts["date"]:
2142 if opts["rev"]:
2141 if opts["rev"]:
2143 raise util.Abort(_("you can't specify a revision and a date"))
2142 raise util.Abort(_("you can't specify a revision and a date"))
2144 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2143 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2145
2144
2146 if not pats and not opts['all']:
2145 if not pats and not opts['all']:
2147 raise util.Abort(_('no files or directories specified; '
2146 raise util.Abort(_('no files or directories specified; '
2148 'use --all to revert the whole repo'))
2147 'use --all to revert the whole repo'))
2149
2148
2150 parent, p2 = repo.dirstate.parents()
2149 parent, p2 = repo.dirstate.parents()
2151 if not opts['rev'] and p2 != nullid:
2150 if not opts['rev'] and p2 != nullid:
2152 raise util.Abort(_('uncommitted merge - please provide a '
2151 raise util.Abort(_('uncommitted merge - please provide a '
2153 'specific revision'))
2152 'specific revision'))
2154 ctx = repo.changectx(opts['rev'])
2153 ctx = repo.changectx(opts['rev'])
2155 node = ctx.node()
2154 node = ctx.node()
2156 mf = ctx.manifest()
2155 mf = ctx.manifest()
2157 if node == parent:
2156 if node == parent:
2158 pmf = mf
2157 pmf = mf
2159 else:
2158 else:
2160 pmf = None
2159 pmf = None
2161
2160
2162 wlock = repo.wlock()
2161 wlock = repo.wlock()
2163
2162
2164 # need all matching names in dirstate and manifest of target rev,
2163 # need all matching names in dirstate and manifest of target rev,
2165 # so have to walk both. do not print errors if files exist in one
2164 # so have to walk both. do not print errors if files exist in one
2166 # but not other.
2165 # but not other.
2167
2166
2168 names = {}
2167 names = {}
2169 target_only = {}
2168 target_only = {}
2170
2169
2171 # walk dirstate.
2170 # walk dirstate.
2172
2171
2173 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2172 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2174 badmatch=mf.has_key):
2173 badmatch=mf.has_key):
2175 names[abs] = (rel, exact)
2174 names[abs] = (rel, exact)
2176 if src == 'b':
2175 if src == 'b':
2177 target_only[abs] = True
2176 target_only[abs] = True
2178
2177
2179 # walk target manifest.
2178 # walk target manifest.
2180
2179
2181 def badmatch(path):
2180 def badmatch(path):
2182 if path in names:
2181 if path in names:
2183 return True
2182 return True
2184 path_ = path + '/'
2183 path_ = path + '/'
2185 for f in names:
2184 for f in names:
2186 if f.startswith(path_):
2185 if f.startswith(path_):
2187 return True
2186 return True
2188 return False
2187 return False
2189
2188
2190 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2189 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2191 badmatch=badmatch):
2190 badmatch=badmatch):
2192 if abs in names or src == 'b':
2191 if abs in names or src == 'b':
2193 continue
2192 continue
2194 names[abs] = (rel, exact)
2193 names[abs] = (rel, exact)
2195 target_only[abs] = True
2194 target_only[abs] = True
2196
2195
2197 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2196 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2198 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2197 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2199
2198
2200 revert = ([], _('reverting %s\n'))
2199 revert = ([], _('reverting %s\n'))
2201 add = ([], _('adding %s\n'))
2200 add = ([], _('adding %s\n'))
2202 remove = ([], _('removing %s\n'))
2201 remove = ([], _('removing %s\n'))
2203 forget = ([], _('forgetting %s\n'))
2202 forget = ([], _('forgetting %s\n'))
2204 undelete = ([], _('undeleting %s\n'))
2203 undelete = ([], _('undeleting %s\n'))
2205 update = {}
2204 update = {}
2206
2205
2207 disptable = (
2206 disptable = (
2208 # dispatch table:
2207 # dispatch table:
2209 # file state
2208 # file state
2210 # action if in target manifest
2209 # action if in target manifest
2211 # action if not in target manifest
2210 # action if not in target manifest
2212 # make backup if in target manifest
2211 # make backup if in target manifest
2213 # make backup if not in target manifest
2212 # make backup if not in target manifest
2214 (modified, revert, remove, True, True),
2213 (modified, revert, remove, True, True),
2215 (added, revert, forget, True, False),
2214 (added, revert, forget, True, False),
2216 (removed, undelete, None, False, False),
2215 (removed, undelete, None, False, False),
2217 (deleted, revert, remove, False, False),
2216 (deleted, revert, remove, False, False),
2218 (unknown, add, None, True, False),
2217 (unknown, add, None, True, False),
2219 (target_only, add, None, False, False),
2218 (target_only, add, None, False, False),
2220 )
2219 )
2221
2220
2222 entries = names.items()
2221 entries = names.items()
2223 entries.sort()
2222 entries.sort()
2224
2223
2225 for abs, (rel, exact) in entries:
2224 for abs, (rel, exact) in entries:
2226 mfentry = mf.get(abs)
2225 mfentry = mf.get(abs)
2227 def handle(xlist, dobackup):
2226 def handle(xlist, dobackup):
2228 xlist[0].append(abs)
2227 xlist[0].append(abs)
2229 update[abs] = 1
2228 update[abs] = 1
2230 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2229 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2231 bakname = "%s.orig" % rel
2230 bakname = "%s.orig" % rel
2232 ui.note(_('saving current version of %s as %s\n') %
2231 ui.note(_('saving current version of %s as %s\n') %
2233 (rel, bakname))
2232 (rel, bakname))
2234 if not opts.get('dry_run'):
2233 if not opts.get('dry_run'):
2235 util.copyfile(rel, bakname)
2234 util.copyfile(rel, bakname)
2236 if ui.verbose or not exact:
2235 if ui.verbose or not exact:
2237 ui.status(xlist[1] % rel)
2236 ui.status(xlist[1] % rel)
2238 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2237 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2239 if abs not in table: continue
2238 if abs not in table: continue
2240 # file has changed in dirstate
2239 # file has changed in dirstate
2241 if mfentry:
2240 if mfentry:
2242 handle(hitlist, backuphit)
2241 handle(hitlist, backuphit)
2243 elif misslist is not None:
2242 elif misslist is not None:
2244 handle(misslist, backupmiss)
2243 handle(misslist, backupmiss)
2245 else:
2244 else:
2246 if exact: ui.warn(_('file not managed: %s\n') % rel)
2245 if exact: ui.warn(_('file not managed: %s\n') % rel)
2247 break
2246 break
2248 else:
2247 else:
2249 # file has not changed in dirstate
2248 # file has not changed in dirstate
2250 if node == parent:
2249 if node == parent:
2251 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2250 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2252 continue
2251 continue
2253 if pmf is None:
2252 if pmf is None:
2254 # only need parent manifest in this unlikely case,
2253 # only need parent manifest in this unlikely case,
2255 # so do not read by default
2254 # so do not read by default
2256 pmf = repo.changectx(parent).manifest()
2255 pmf = repo.changectx(parent).manifest()
2257 if abs in pmf:
2256 if abs in pmf:
2258 if mfentry:
2257 if mfentry:
2259 # if version of file is same in parent and target
2258 # if version of file is same in parent and target
2260 # manifests, do nothing
2259 # manifests, do nothing
2261 if pmf[abs] != mfentry:
2260 if pmf[abs] != mfentry:
2262 handle(revert, False)
2261 handle(revert, False)
2263 else:
2262 else:
2264 handle(remove, False)
2263 handle(remove, False)
2265
2264
2266 if not opts.get('dry_run'):
2265 if not opts.get('dry_run'):
2267 repo.dirstate.forget(forget[0])
2266 repo.dirstate.forget(forget[0])
2268 r = hg.revert(repo, node, update.has_key, wlock)
2267 r = hg.revert(repo, node, update.has_key, wlock)
2269 repo.dirstate.update(add[0], 'a')
2268 repo.dirstate.update(add[0], 'a')
2270 repo.dirstate.update(undelete[0], 'n')
2269 repo.dirstate.update(undelete[0], 'n')
2271 repo.dirstate.update(remove[0], 'r')
2270 repo.dirstate.update(remove[0], 'r')
2272 return r
2271 return r
2273
2272
2274 def rollback(ui, repo):
2273 def rollback(ui, repo):
2275 """roll back the last transaction in this repository
2274 """roll back the last transaction in this repository
2276
2275
2277 Roll back the last transaction in this repository, restoring the
2276 Roll back the last transaction in this repository, restoring the
2278 project to its state prior to the transaction.
2277 project to its state prior to the transaction.
2279
2278
2280 Transactions are used to encapsulate the effects of all commands
2279 Transactions are used to encapsulate the effects of all commands
2281 that create new changesets or propagate existing changesets into a
2280 that create new changesets or propagate existing changesets into a
2282 repository. For example, the following commands are transactional,
2281 repository. For example, the following commands are transactional,
2283 and their effects can be rolled back:
2282 and their effects can be rolled back:
2284
2283
2285 commit
2284 commit
2286 import
2285 import
2287 pull
2286 pull
2288 push (with this repository as destination)
2287 push (with this repository as destination)
2289 unbundle
2288 unbundle
2290
2289
2291 This command should be used with care. There is only one level of
2290 This command should be used with care. There is only one level of
2292 rollback, and there is no way to undo a rollback.
2291 rollback, and there is no way to undo a rollback.
2293
2292
2294 This command is not intended for use on public repositories. Once
2293 This command is not intended for use on public repositories. Once
2295 changes are visible for pull by other users, rolling a transaction
2294 changes are visible for pull by other users, rolling a transaction
2296 back locally is ineffective (someone else may already have pulled
2295 back locally is ineffective (someone else may already have pulled
2297 the changes). Furthermore, a race is possible with readers of the
2296 the changes). Furthermore, a race is possible with readers of the
2298 repository; for example an in-progress pull from the repository
2297 repository; for example an in-progress pull from the repository
2299 may fail if a rollback is performed.
2298 may fail if a rollback is performed.
2300 """
2299 """
2301 repo.rollback()
2300 repo.rollback()
2302
2301
2303 def root(ui, repo):
2302 def root(ui, repo):
2304 """print the root (top) of the current working dir
2303 """print the root (top) of the current working dir
2305
2304
2306 Print the root directory of the current repository.
2305 Print the root directory of the current repository.
2307 """
2306 """
2308 ui.write(repo.root + "\n")
2307 ui.write(repo.root + "\n")
2309
2308
2310 def serve(ui, repo, **opts):
2309 def serve(ui, repo, **opts):
2311 """export the repository via HTTP
2310 """export the repository via HTTP
2312
2311
2313 Start a local HTTP repository browser and pull server.
2312 Start a local HTTP repository browser and pull server.
2314
2313
2315 By default, the server logs accesses to stdout and errors to
2314 By default, the server logs accesses to stdout and errors to
2316 stderr. Use the "-A" and "-E" options to log to files.
2315 stderr. Use the "-A" and "-E" options to log to files.
2317 """
2316 """
2318
2317
2319 if opts["stdio"]:
2318 if opts["stdio"]:
2320 if repo is None:
2319 if repo is None:
2321 raise hg.RepoError(_("There is no Mercurial repository here"
2320 raise hg.RepoError(_("There is no Mercurial repository here"
2322 " (.hg not found)"))
2321 " (.hg not found)"))
2323 s = sshserver.sshserver(ui, repo)
2322 s = sshserver.sshserver(ui, repo)
2324 s.serve_forever()
2323 s.serve_forever()
2325
2324
2326 parentui = ui.parentui or ui
2325 parentui = ui.parentui or ui
2327 optlist = ("name templates style address port ipv6"
2326 optlist = ("name templates style address port ipv6"
2328 " accesslog errorlog webdir_conf")
2327 " accesslog errorlog webdir_conf")
2329 for o in optlist.split():
2328 for o in optlist.split():
2330 if opts[o]:
2329 if opts[o]:
2331 parentui.setconfig("web", o, str(opts[o]))
2330 parentui.setconfig("web", o, str(opts[o]))
2332
2331
2333 if repo is None and not ui.config("web", "webdir_conf"):
2332 if repo is None and not ui.config("web", "webdir_conf"):
2334 raise hg.RepoError(_("There is no Mercurial repository here"
2333 raise hg.RepoError(_("There is no Mercurial repository here"
2335 " (.hg not found)"))
2334 " (.hg not found)"))
2336
2335
2337 if opts['daemon'] and not opts['daemon_pipefds']:
2336 if opts['daemon'] and not opts['daemon_pipefds']:
2338 rfd, wfd = os.pipe()
2337 rfd, wfd = os.pipe()
2339 args = sys.argv[:]
2338 args = sys.argv[:]
2340 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2339 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2341 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2340 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2342 args[0], args)
2341 args[0], args)
2343 os.close(wfd)
2342 os.close(wfd)
2344 os.read(rfd, 1)
2343 os.read(rfd, 1)
2345 os._exit(0)
2344 os._exit(0)
2346
2345
2347 httpd = hgweb.server.create_server(parentui, repo)
2346 httpd = hgweb.server.create_server(parentui, repo)
2348
2347
2349 if ui.verbose:
2348 if ui.verbose:
2350 if httpd.port != 80:
2349 if httpd.port != 80:
2351 ui.status(_('listening at http://%s:%d/\n') %
2350 ui.status(_('listening at http://%s:%d/\n') %
2352 (httpd.addr, httpd.port))
2351 (httpd.addr, httpd.port))
2353 else:
2352 else:
2354 ui.status(_('listening at http://%s/\n') % httpd.addr)
2353 ui.status(_('listening at http://%s/\n') % httpd.addr)
2355
2354
2356 if opts['pid_file']:
2355 if opts['pid_file']:
2357 fp = open(opts['pid_file'], 'w')
2356 fp = open(opts['pid_file'], 'w')
2358 fp.write(str(os.getpid()) + '\n')
2357 fp.write(str(os.getpid()) + '\n')
2359 fp.close()
2358 fp.close()
2360
2359
2361 if opts['daemon_pipefds']:
2360 if opts['daemon_pipefds']:
2362 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2361 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2363 os.close(rfd)
2362 os.close(rfd)
2364 os.write(wfd, 'y')
2363 os.write(wfd, 'y')
2365 os.close(wfd)
2364 os.close(wfd)
2366 sys.stdout.flush()
2365 sys.stdout.flush()
2367 sys.stderr.flush()
2366 sys.stderr.flush()
2368 fd = os.open(util.nulldev, os.O_RDWR)
2367 fd = os.open(util.nulldev, os.O_RDWR)
2369 if fd != 0: os.dup2(fd, 0)
2368 if fd != 0: os.dup2(fd, 0)
2370 if fd != 1: os.dup2(fd, 1)
2369 if fd != 1: os.dup2(fd, 1)
2371 if fd != 2: os.dup2(fd, 2)
2370 if fd != 2: os.dup2(fd, 2)
2372 if fd not in (0, 1, 2): os.close(fd)
2371 if fd not in (0, 1, 2): os.close(fd)
2373
2372
2374 httpd.serve_forever()
2373 httpd.serve_forever()
2375
2374
2376 def status(ui, repo, *pats, **opts):
2375 def status(ui, repo, *pats, **opts):
2377 """show changed files in the working directory
2376 """show changed files in the working directory
2378
2377
2379 Show status of files in the repository. If names are given, only
2378 Show status of files in the repository. If names are given, only
2380 files that match are shown. Files that are clean or ignored, are
2379 files that match are shown. Files that are clean or ignored, are
2381 not listed unless -c (clean), -i (ignored) or -A is given.
2380 not listed unless -c (clean), -i (ignored) or -A is given.
2382
2381
2383 NOTE: status may appear to disagree with diff if permissions have
2382 NOTE: status may appear to disagree with diff if permissions have
2384 changed or a merge has occurred. The standard diff format does not
2383 changed or a merge has occurred. The standard diff format does not
2385 report permission changes and diff only reports changes relative
2384 report permission changes and diff only reports changes relative
2386 to one merge parent.
2385 to one merge parent.
2387
2386
2388 If one revision is given, it is used as the base revision.
2387 If one revision is given, it is used as the base revision.
2389 If two revisions are given, the difference between them is shown.
2388 If two revisions are given, the difference between them is shown.
2390
2389
2391 The codes used to show the status of files are:
2390 The codes used to show the status of files are:
2392 M = modified
2391 M = modified
2393 A = added
2392 A = added
2394 R = removed
2393 R = removed
2395 C = clean
2394 C = clean
2396 ! = deleted, but still tracked
2395 ! = deleted, but still tracked
2397 ? = not tracked
2396 ? = not tracked
2398 I = ignored (not shown by default)
2397 I = ignored (not shown by default)
2399 = the previous added file was copied from here
2398 = the previous added file was copied from here
2400 """
2399 """
2401
2400
2402 all = opts['all']
2401 all = opts['all']
2403 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2402 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2404
2403
2405 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2404 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2406 cwd = (pats and repo.getcwd()) or ''
2405 cwd = (pats and repo.getcwd()) or ''
2407 modified, added, removed, deleted, unknown, ignored, clean = [
2406 modified, added, removed, deleted, unknown, ignored, clean = [
2408 n for n in repo.status(node1=node1, node2=node2, files=files,
2407 n for n in repo.status(node1=node1, node2=node2, files=files,
2409 match=matchfn,
2408 match=matchfn,
2410 list_ignored=all or opts['ignored'],
2409 list_ignored=all or opts['ignored'],
2411 list_clean=all or opts['clean'])]
2410 list_clean=all or opts['clean'])]
2412
2411
2413 changetypes = (('modified', 'M', modified),
2412 changetypes = (('modified', 'M', modified),
2414 ('added', 'A', added),
2413 ('added', 'A', added),
2415 ('removed', 'R', removed),
2414 ('removed', 'R', removed),
2416 ('deleted', '!', deleted),
2415 ('deleted', '!', deleted),
2417 ('unknown', '?', unknown),
2416 ('unknown', '?', unknown),
2418 ('ignored', 'I', ignored))
2417 ('ignored', 'I', ignored))
2419
2418
2420 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2419 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2421
2420
2422 end = opts['print0'] and '\0' or '\n'
2421 end = opts['print0'] and '\0' or '\n'
2423
2422
2424 for opt, char, changes in ([ct for ct in explicit_changetypes
2423 for opt, char, changes in ([ct for ct in explicit_changetypes
2425 if all or opts[ct[0]]]
2424 if all or opts[ct[0]]]
2426 or changetypes):
2425 or changetypes):
2427 if opts['no_status']:
2426 if opts['no_status']:
2428 format = "%%s%s" % end
2427 format = "%%s%s" % end
2429 else:
2428 else:
2430 format = "%s %%s%s" % (char, end)
2429 format = "%s %%s%s" % (char, end)
2431
2430
2432 for f in changes:
2431 for f in changes:
2433 ui.write(format % util.pathto(repo.root, cwd, f))
2432 ui.write(format % util.pathto(repo.root, cwd, f))
2434 if ((all or opts.get('copies')) and not opts.get('no_status')):
2433 if ((all or opts.get('copies')) and not opts.get('no_status')):
2435 copied = repo.dirstate.copied(f)
2434 copied = repo.dirstate.copied(f)
2436 if copied:
2435 if copied:
2437 ui.write(' %s%s' % (util.pathto(repo.root, cwd, copied),
2436 ui.write(' %s%s' % (util.pathto(repo.root, cwd, copied),
2438 end))
2437 end))
2439
2438
2440 def tag(ui, repo, name, rev_=None, **opts):
2439 def tag(ui, repo, name, rev_=None, **opts):
2441 """add a tag for the current or given revision
2440 """add a tag for the current or given revision
2442
2441
2443 Name a particular revision using <name>.
2442 Name a particular revision using <name>.
2444
2443
2445 Tags are used to name particular revisions of the repository and are
2444 Tags are used to name particular revisions of the repository and are
2446 very useful to compare different revision, to go back to significant
2445 very useful to compare different revision, to go back to significant
2447 earlier versions or to mark branch points as releases, etc.
2446 earlier versions or to mark branch points as releases, etc.
2448
2447
2449 If no revision is given, the parent of the working directory is used,
2448 If no revision is given, the parent of the working directory is used,
2450 or tip if no revision is checked out.
2449 or tip if no revision is checked out.
2451
2450
2452 To facilitate version control, distribution, and merging of tags,
2451 To facilitate version control, distribution, and merging of tags,
2453 they are stored as a file named ".hgtags" which is managed
2452 they are stored as a file named ".hgtags" which is managed
2454 similarly to other project files and can be hand-edited if
2453 similarly to other project files and can be hand-edited if
2455 necessary. The file '.hg/localtags' is used for local tags (not
2454 necessary. The file '.hg/localtags' is used for local tags (not
2456 shared among repositories).
2455 shared among repositories).
2457 """
2456 """
2458 if name in ['tip', '.', 'null']:
2457 if name in ['tip', '.', 'null']:
2459 raise util.Abort(_("the name '%s' is reserved") % name)
2458 raise util.Abort(_("the name '%s' is reserved") % name)
2460 if rev_ is not None:
2459 if rev_ is not None:
2461 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2460 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2462 "please use 'hg tag [-r REV] NAME' instead\n"))
2461 "please use 'hg tag [-r REV] NAME' instead\n"))
2463 if opts['rev']:
2462 if opts['rev']:
2464 raise util.Abort(_("use only one form to specify the revision"))
2463 raise util.Abort(_("use only one form to specify the revision"))
2465 if opts['rev'] and opts['remove']:
2464 if opts['rev'] and opts['remove']:
2466 raise util.Abort(_("--rev and --remove are incompatible"))
2465 raise util.Abort(_("--rev and --remove are incompatible"))
2467 if opts['rev']:
2466 if opts['rev']:
2468 rev_ = opts['rev']
2467 rev_ = opts['rev']
2469 message = opts['message']
2468 message = opts['message']
2470 if opts['remove']:
2469 if opts['remove']:
2471 rev_ = nullid
2470 rev_ = nullid
2472 if not message:
2471 if not message:
2473 message = _('Removed tag %s') % name
2472 message = _('Removed tag %s') % name
2474 if not rev_ and repo.dirstate.parents()[1] != nullid:
2473 if not rev_ and repo.dirstate.parents()[1] != nullid:
2475 raise util.Abort(_('uncommitted merge - please provide a '
2474 raise util.Abort(_('uncommitted merge - please provide a '
2476 'specific revision'))
2475 'specific revision'))
2477 r = repo.changectx(rev_).node()
2476 r = repo.changectx(rev_).node()
2478
2477
2479 if not message:
2478 if not message:
2480 message = _('Added tag %s for changeset %s') % (name, short(r))
2479 message = _('Added tag %s for changeset %s') % (name, short(r))
2481
2480
2482 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2481 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2483
2482
2484 def tags(ui, repo):
2483 def tags(ui, repo):
2485 """list repository tags
2484 """list repository tags
2486
2485
2487 List the repository tags.
2486 List the repository tags.
2488
2487
2489 This lists both regular and local tags.
2488 This lists both regular and local tags.
2490 """
2489 """
2491
2490
2492 l = repo.tagslist()
2491 l = repo.tagslist()
2493 l.reverse()
2492 l.reverse()
2494 hexfunc = ui.debugflag and hex or short
2493 hexfunc = ui.debugflag and hex or short
2495 for t, n in l:
2494 for t, n in l:
2496 try:
2495 try:
2497 hn = hexfunc(n)
2496 hn = hexfunc(n)
2498 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2497 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2499 except revlog.LookupError:
2498 except revlog.LookupError:
2500 r = " ?:%s" % hn
2499 r = " ?:%s" % hn
2501 if ui.quiet:
2500 if ui.quiet:
2502 ui.write("%s\n" % t)
2501 ui.write("%s\n" % t)
2503 else:
2502 else:
2504 spaces = " " * (30 - util.locallen(t))
2503 spaces = " " * (30 - util.locallen(t))
2505 ui.write("%s%s %s\n" % (t, spaces, r))
2504 ui.write("%s%s %s\n" % (t, spaces, r))
2506
2505
2507 def tip(ui, repo, **opts):
2506 def tip(ui, repo, **opts):
2508 """show the tip revision
2507 """show the tip revision
2509
2508
2510 Show the tip revision.
2509 Show the tip revision.
2511 """
2510 """
2512 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2511 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2513
2512
2514 def unbundle(ui, repo, fname, **opts):
2513 def unbundle(ui, repo, fname, **opts):
2515 """apply a changegroup file
2514 """apply a changegroup file
2516
2515
2517 Apply a compressed changegroup file generated by the bundle
2516 Apply a compressed changegroup file generated by the bundle
2518 command.
2517 command.
2519 """
2518 """
2520 if os.path.exists(fname):
2519 if os.path.exists(fname):
2521 f = open(fname, "rb")
2520 f = open(fname, "rb")
2522 else:
2521 else:
2523 f = urllib.urlopen(fname)
2522 f = urllib.urlopen(fname)
2524 gen = changegroup.readbundle(f, fname)
2523 gen = changegroup.readbundle(f, fname)
2525 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2524 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2526 return postincoming(ui, repo, modheads, opts['update'])
2525 return postincoming(ui, repo, modheads, opts['update'])
2527
2526
2528 def update(ui, repo, node=None, clean=False, date=None):
2527 def update(ui, repo, node=None, clean=False, date=None):
2529 """update working directory
2528 """update working directory
2530
2529
2531 Update the working directory to the specified revision, or the
2530 Update the working directory to the specified revision, or the
2532 tip of the current branch if none is specified.
2531 tip of the current branch if none is specified.
2533
2532
2534 If there are no outstanding changes in the working directory and
2533 If there are no outstanding changes in the working directory and
2535 there is a linear relationship between the current version and the
2534 there is a linear relationship between the current version and the
2536 requested version, the result is the requested version.
2535 requested version, the result is the requested version.
2537
2536
2538 To merge the working directory with another revision, use the
2537 To merge the working directory with another revision, use the
2539 merge command.
2538 merge command.
2540
2539
2541 By default, update will refuse to run if doing so would require
2540 By default, update will refuse to run if doing so would require
2542 discarding local changes.
2541 discarding local changes.
2543 """
2542 """
2544 if date:
2543 if date:
2545 if node:
2544 if node:
2546 raise util.Abort(_("you can't specify a revision and a date"))
2545 raise util.Abort(_("you can't specify a revision and a date"))
2547 node = cmdutil.finddate(ui, repo, date)
2546 node = cmdutil.finddate(ui, repo, date)
2548
2547
2549 if clean:
2548 if clean:
2550 return hg.clean(repo, node)
2549 return hg.clean(repo, node)
2551 else:
2550 else:
2552 return hg.update(repo, node)
2551 return hg.update(repo, node)
2553
2552
2554 def verify(ui, repo):
2553 def verify(ui, repo):
2555 """verify the integrity of the repository
2554 """verify the integrity of the repository
2556
2555
2557 Verify the integrity of the current repository.
2556 Verify the integrity of the current repository.
2558
2557
2559 This will perform an extensive check of the repository's
2558 This will perform an extensive check of the repository's
2560 integrity, validating the hashes and checksums of each entry in
2559 integrity, validating the hashes and checksums of each entry in
2561 the changelog, manifest, and tracked files, as well as the
2560 the changelog, manifest, and tracked files, as well as the
2562 integrity of their crosslinks and indices.
2561 integrity of their crosslinks and indices.
2563 """
2562 """
2564 return hg.verify(repo)
2563 return hg.verify(repo)
2565
2564
2566 def version_(ui):
2565 def version_(ui):
2567 """output version and copyright information"""
2566 """output version and copyright information"""
2568 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2567 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2569 % version.get_version())
2568 % version.get_version())
2570 ui.status(_(
2569 ui.status(_(
2571 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
2570 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
2572 "This is free software; see the source for copying conditions. "
2571 "This is free software; see the source for copying conditions. "
2573 "There is NO\nwarranty; "
2572 "There is NO\nwarranty; "
2574 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2573 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2575 ))
2574 ))
2576
2575
2577 # Command options and aliases are listed here, alphabetically
2576 # Command options and aliases are listed here, alphabetically
2578
2577
2579 globalopts = [
2578 globalopts = [
2580 ('R', 'repository', '',
2579 ('R', 'repository', '',
2581 _('repository root directory or symbolic path name')),
2580 _('repository root directory or symbolic path name')),
2582 ('', 'cwd', '', _('change working directory')),
2581 ('', 'cwd', '', _('change working directory')),
2583 ('y', 'noninteractive', None,
2582 ('y', 'noninteractive', None,
2584 _('do not prompt, assume \'yes\' for any required answers')),
2583 _('do not prompt, assume \'yes\' for any required answers')),
2585 ('q', 'quiet', None, _('suppress output')),
2584 ('q', 'quiet', None, _('suppress output')),
2586 ('v', 'verbose', None, _('enable additional output')),
2585 ('v', 'verbose', None, _('enable additional output')),
2587 ('', 'config', [], _('set/override config option')),
2586 ('', 'config', [], _('set/override config option')),
2588 ('', 'debug', None, _('enable debugging output')),
2587 ('', 'debug', None, _('enable debugging output')),
2589 ('', 'debugger', None, _('start debugger')),
2588 ('', 'debugger', None, _('start debugger')),
2590 ('', 'encoding', util._encoding, _('set the charset encoding')),
2589 ('', 'encoding', util._encoding, _('set the charset encoding')),
2591 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2590 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2592 ('', 'lsprof', None, _('print improved command execution profile')),
2591 ('', 'lsprof', None, _('print improved command execution profile')),
2593 ('', 'traceback', None, _('print traceback on exception')),
2592 ('', 'traceback', None, _('print traceback on exception')),
2594 ('', 'time', None, _('time how long the command takes')),
2593 ('', 'time', None, _('time how long the command takes')),
2595 ('', 'profile', None, _('print command execution profile')),
2594 ('', 'profile', None, _('print command execution profile')),
2596 ('', 'version', None, _('output version information and exit')),
2595 ('', 'version', None, _('output version information and exit')),
2597 ('h', 'help', None, _('display help and exit')),
2596 ('h', 'help', None, _('display help and exit')),
2598 ]
2597 ]
2599
2598
2600 dryrunopts = [('n', 'dry-run', None,
2599 dryrunopts = [('n', 'dry-run', None,
2601 _('do not perform actions, just print output'))]
2600 _('do not perform actions, just print output'))]
2602
2601
2603 remoteopts = [
2602 remoteopts = [
2604 ('e', 'ssh', '', _('specify ssh command to use')),
2603 ('e', 'ssh', '', _('specify ssh command to use')),
2605 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2604 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2606 ]
2605 ]
2607
2606
2608 walkopts = [
2607 walkopts = [
2609 ('I', 'include', [], _('include names matching the given patterns')),
2608 ('I', 'include', [], _('include names matching the given patterns')),
2610 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2609 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2611 ]
2610 ]
2612
2611
2613 commitopts = [
2612 commitopts = [
2614 ('m', 'message', '', _('use <text> as commit message')),
2613 ('m', 'message', '', _('use <text> as commit message')),
2615 ('l', 'logfile', '', _('read commit message from <file>')),
2614 ('l', 'logfile', '', _('read commit message from <file>')),
2616 ]
2615 ]
2617
2616
2618 table = {
2617 table = {
2619 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2618 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2620 "addremove":
2619 "addremove":
2621 (addremove,
2620 (addremove,
2622 [('s', 'similarity', '',
2621 [('s', 'similarity', '',
2623 _('guess renamed files by similarity (0<=s<=100)')),
2622 _('guess renamed files by similarity (0<=s<=100)')),
2624 ] + walkopts + dryrunopts,
2623 ] + walkopts + dryrunopts,
2625 _('hg addremove [OPTION]... [FILE]...')),
2624 _('hg addremove [OPTION]... [FILE]...')),
2626 "^annotate":
2625 "^annotate":
2627 (annotate,
2626 (annotate,
2628 [('r', 'rev', '', _('annotate the specified revision')),
2627 [('r', 'rev', '', _('annotate the specified revision')),
2629 ('f', 'follow', None, _('follow file copies and renames')),
2628 ('f', 'follow', None, _('follow file copies and renames')),
2630 ('a', 'text', None, _('treat all files as text')),
2629 ('a', 'text', None, _('treat all files as text')),
2631 ('u', 'user', None, _('list the author')),
2630 ('u', 'user', None, _('list the author')),
2632 ('d', 'date', None, _('list the date')),
2631 ('d', 'date', None, _('list the date')),
2633 ('n', 'number', None, _('list the revision number (default)')),
2632 ('n', 'number', None, _('list the revision number (default)')),
2634 ('c', 'changeset', None, _('list the changeset')),
2633 ('c', 'changeset', None, _('list the changeset')),
2635 ] + walkopts,
2634 ] + walkopts,
2636 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] FILE...')),
2635 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] FILE...')),
2637 "archive":
2636 "archive":
2638 (archive,
2637 (archive,
2639 [('', 'no-decode', None, _('do not pass files through decoders')),
2638 [('', 'no-decode', None, _('do not pass files through decoders')),
2640 ('p', 'prefix', '', _('directory prefix for files in archive')),
2639 ('p', 'prefix', '', _('directory prefix for files in archive')),
2641 ('r', 'rev', '', _('revision to distribute')),
2640 ('r', 'rev', '', _('revision to distribute')),
2642 ('t', 'type', '', _('type of distribution to create')),
2641 ('t', 'type', '', _('type of distribution to create')),
2643 ] + walkopts,
2642 ] + walkopts,
2644 _('hg archive [OPTION]... DEST')),
2643 _('hg archive [OPTION]... DEST')),
2645 "backout":
2644 "backout":
2646 (backout,
2645 (backout,
2647 [('', 'merge', None,
2646 [('', 'merge', None,
2648 _('merge with old dirstate parent after backout')),
2647 _('merge with old dirstate parent after backout')),
2649 ('d', 'date', '', _('record datecode as commit date')),
2648 ('d', 'date', '', _('record datecode as commit date')),
2650 ('', 'parent', '', _('parent to choose when backing out merge')),
2649 ('', 'parent', '', _('parent to choose when backing out merge')),
2651 ('u', 'user', '', _('record user as committer')),
2650 ('u', 'user', '', _('record user as committer')),
2652 ] + walkopts + commitopts,
2651 ] + walkopts + commitopts,
2653 _('hg backout [OPTION]... REV')),
2652 _('hg backout [OPTION]... REV')),
2654 "branch": (branch,
2653 "branch": (branch,
2655 [('f', 'force', None,
2654 [('f', 'force', None,
2656 _('set branch name even if it shadows an existing branch'))],
2655 _('set branch name even if it shadows an existing branch'))],
2657 _('hg branch [NAME]')),
2656 _('hg branch [NAME]')),
2658 "branches": (branches, [], _('hg branches')),
2657 "branches": (branches, [], _('hg branches')),
2659 "bundle":
2658 "bundle":
2660 (bundle,
2659 (bundle,
2661 [('f', 'force', None,
2660 [('f', 'force', None,
2662 _('run even when remote repository is unrelated')),
2661 _('run even when remote repository is unrelated')),
2663 ('r', 'rev', [],
2662 ('r', 'rev', [],
2664 _('a changeset you would like to bundle')),
2663 _('a changeset you would like to bundle')),
2665 ('', 'base', [],
2664 ('', 'base', [],
2666 _('a base changeset to specify instead of a destination')),
2665 _('a base changeset to specify instead of a destination')),
2667 ] + remoteopts,
2666 ] + remoteopts,
2668 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2667 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2669 "cat":
2668 "cat":
2670 (cat,
2669 (cat,
2671 [('o', 'output', '', _('print output to file with formatted name')),
2670 [('o', 'output', '', _('print output to file with formatted name')),
2672 ('r', 'rev', '', _('print the given revision')),
2671 ('r', 'rev', '', _('print the given revision')),
2673 ] + walkopts,
2672 ] + walkopts,
2674 _('hg cat [OPTION]... FILE...')),
2673 _('hg cat [OPTION]... FILE...')),
2675 "^clone":
2674 "^clone":
2676 (clone,
2675 (clone,
2677 [('U', 'noupdate', None, _('do not update the new working directory')),
2676 [('U', 'noupdate', None, _('do not update the new working directory')),
2678 ('r', 'rev', [],
2677 ('r', 'rev', [],
2679 _('a changeset you would like to have after cloning')),
2678 _('a changeset you would like to have after cloning')),
2680 ('', 'pull', None, _('use pull protocol to copy metadata')),
2679 ('', 'pull', None, _('use pull protocol to copy metadata')),
2681 ('', 'uncompressed', None,
2680 ('', 'uncompressed', None,
2682 _('use uncompressed transfer (fast over LAN)')),
2681 _('use uncompressed transfer (fast over LAN)')),
2683 ] + remoteopts,
2682 ] + remoteopts,
2684 _('hg clone [OPTION]... SOURCE [DEST]')),
2683 _('hg clone [OPTION]... SOURCE [DEST]')),
2685 "^commit|ci":
2684 "^commit|ci":
2686 (commit,
2685 (commit,
2687 [('A', 'addremove', None,
2686 [('A', 'addremove', None,
2688 _('mark new/missing files as added/removed before committing')),
2687 _('mark new/missing files as added/removed before committing')),
2689 ('d', 'date', '', _('record datecode as commit date')),
2688 ('d', 'date', '', _('record datecode as commit date')),
2690 ('u', 'user', '', _('record user as commiter')),
2689 ('u', 'user', '', _('record user as commiter')),
2691 ] + walkopts + commitopts,
2690 ] + walkopts + commitopts,
2692 _('hg commit [OPTION]... [FILE]...')),
2691 _('hg commit [OPTION]... [FILE]...')),
2693 "copy|cp":
2692 "copy|cp":
2694 (copy,
2693 (copy,
2695 [('A', 'after', None, _('record a copy that has already occurred')),
2694 [('A', 'after', None, _('record a copy that has already occurred')),
2696 ('f', 'force', None,
2695 ('f', 'force', None,
2697 _('forcibly copy over an existing managed file')),
2696 _('forcibly copy over an existing managed file')),
2698 ] + walkopts + dryrunopts,
2697 ] + walkopts + dryrunopts,
2699 _('hg copy [OPTION]... [SOURCE]... DEST')),
2698 _('hg copy [OPTION]... [SOURCE]... DEST')),
2700 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2699 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2701 "debugcomplete":
2700 "debugcomplete":
2702 (debugcomplete,
2701 (debugcomplete,
2703 [('o', 'options', None, _('show the command options'))],
2702 [('o', 'options', None, _('show the command options'))],
2704 _('debugcomplete [-o] CMD')),
2703 _('debugcomplete [-o] CMD')),
2705 "debuginstall": (debuginstall, [], _('debuginstall')),
2704 "debuginstall": (debuginstall, [], _('debuginstall')),
2706 "debugrebuildstate":
2705 "debugrebuildstate":
2707 (debugrebuildstate,
2706 (debugrebuildstate,
2708 [('r', 'rev', '', _('revision to rebuild to'))],
2707 [('r', 'rev', '', _('revision to rebuild to'))],
2709 _('debugrebuildstate [-r REV] [REV]')),
2708 _('debugrebuildstate [-r REV] [REV]')),
2710 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2709 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2711 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2710 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2712 "debugstate": (debugstate, [], _('debugstate')),
2711 "debugstate": (debugstate, [], _('debugstate')),
2713 "debugdate":
2712 "debugdate":
2714 (debugdate,
2713 (debugdate,
2715 [('e', 'extended', None, _('try extended date formats'))],
2714 [('e', 'extended', None, _('try extended date formats'))],
2716 _('debugdate [-e] DATE [RANGE]')),
2715 _('debugdate [-e] DATE [RANGE]')),
2717 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2716 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2718 "debugindex": (debugindex, [], _('debugindex FILE')),
2717 "debugindex": (debugindex, [], _('debugindex FILE')),
2719 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2718 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2720 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2719 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2721 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2720 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2722 "^diff":
2721 "^diff":
2723 (diff,
2722 (diff,
2724 [('r', 'rev', [], _('revision')),
2723 [('r', 'rev', [], _('revision')),
2725 ('a', 'text', None, _('treat all files as text')),
2724 ('a', 'text', None, _('treat all files as text')),
2726 ('p', 'show-function', None,
2725 ('p', 'show-function', None,
2727 _('show which function each change is in')),
2726 _('show which function each change is in')),
2728 ('g', 'git', None, _('use git extended diff format')),
2727 ('g', 'git', None, _('use git extended diff format')),
2729 ('', 'nodates', None, _("don't include dates in diff headers")),
2728 ('', 'nodates', None, _("don't include dates in diff headers")),
2730 ('w', 'ignore-all-space', None,
2729 ('w', 'ignore-all-space', None,
2731 _('ignore white space when comparing lines')),
2730 _('ignore white space when comparing lines')),
2732 ('b', 'ignore-space-change', None,
2731 ('b', 'ignore-space-change', None,
2733 _('ignore changes in the amount of white space')),
2732 _('ignore changes in the amount of white space')),
2734 ('B', 'ignore-blank-lines', None,
2733 ('B', 'ignore-blank-lines', None,
2735 _('ignore changes whose lines are all blank')),
2734 _('ignore changes whose lines are all blank')),
2736 ] + walkopts,
2735 ] + walkopts,
2737 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2736 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2738 "^export":
2737 "^export":
2739 (export,
2738 (export,
2740 [('o', 'output', '', _('print output to file with formatted name')),
2739 [('o', 'output', '', _('print output to file with formatted name')),
2741 ('a', 'text', None, _('treat all files as text')),
2740 ('a', 'text', None, _('treat all files as text')),
2742 ('g', 'git', None, _('use git extended diff format')),
2741 ('g', 'git', None, _('use git extended diff format')),
2743 ('', 'nodates', None, _("don't include dates in diff headers")),
2742 ('', 'nodates', None, _("don't include dates in diff headers")),
2744 ('', 'switch-parent', None, _('diff against the second parent'))],
2743 ('', 'switch-parent', None, _('diff against the second parent'))],
2745 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2744 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2746 "grep":
2745 "grep":
2747 (grep,
2746 (grep,
2748 [('0', 'print0', None, _('end fields with NUL')),
2747 [('0', 'print0', None, _('end fields with NUL')),
2749 ('', 'all', None, _('print all revisions that match')),
2748 ('', 'all', None, _('print all revisions that match')),
2750 ('f', 'follow', None,
2749 ('f', 'follow', None,
2751 _('follow changeset history, or file history across copies and renames')),
2750 _('follow changeset history, or file history across copies and renames')),
2752 ('i', 'ignore-case', None, _('ignore case when matching')),
2751 ('i', 'ignore-case', None, _('ignore case when matching')),
2753 ('l', 'files-with-matches', None,
2752 ('l', 'files-with-matches', None,
2754 _('print only filenames and revs that match')),
2753 _('print only filenames and revs that match')),
2755 ('n', 'line-number', None, _('print matching line numbers')),
2754 ('n', 'line-number', None, _('print matching line numbers')),
2756 ('r', 'rev', [], _('search in given revision range')),
2755 ('r', 'rev', [], _('search in given revision range')),
2757 ('u', 'user', None, _('print user who committed change')),
2756 ('u', 'user', None, _('print user who committed change')),
2758 ] + walkopts,
2757 ] + walkopts,
2759 _('hg grep [OPTION]... PATTERN [FILE]...')),
2758 _('hg grep [OPTION]... PATTERN [FILE]...')),
2760 "heads":
2759 "heads":
2761 (heads,
2760 (heads,
2762 [('', 'style', '', _('display using template map file')),
2761 [('', 'style', '', _('display using template map file')),
2763 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2762 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2764 ('', 'template', '', _('display with template'))],
2763 ('', 'template', '', _('display with template'))],
2765 _('hg heads [-r REV]')),
2764 _('hg heads [-r REV]')),
2766 "help": (help_, [], _('hg help [COMMAND]')),
2765 "help": (help_, [], _('hg help [COMMAND]')),
2767 "identify|id": (identify, [], _('hg identify')),
2766 "identify|id": (identify, [], _('hg identify')),
2768 "import|patch":
2767 "import|patch":
2769 (import_,
2768 (import_,
2770 [('p', 'strip', 1,
2769 [('p', 'strip', 1,
2771 _('directory strip option for patch. This has the same\n'
2770 _('directory strip option for patch. This has the same\n'
2772 'meaning as the corresponding patch option')),
2771 'meaning as the corresponding patch option')),
2773 ('b', 'base', '', _('base path')),
2772 ('b', 'base', '', _('base path')),
2774 ('f', 'force', None,
2773 ('f', 'force', None,
2775 _('skip check for outstanding uncommitted changes'))] + commitopts,
2774 _('skip check for outstanding uncommitted changes'))] + commitopts,
2776 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2775 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2777 "incoming|in": (incoming,
2776 "incoming|in": (incoming,
2778 [('M', 'no-merges', None, _('do not show merges')),
2777 [('M', 'no-merges', None, _('do not show merges')),
2779 ('f', 'force', None,
2778 ('f', 'force', None,
2780 _('run even when remote repository is unrelated')),
2779 _('run even when remote repository is unrelated')),
2781 ('', 'style', '', _('display using template map file')),
2780 ('', 'style', '', _('display using template map file')),
2782 ('n', 'newest-first', None, _('show newest record first')),
2781 ('n', 'newest-first', None, _('show newest record first')),
2783 ('', 'bundle', '', _('file to store the bundles into')),
2782 ('', 'bundle', '', _('file to store the bundles into')),
2784 ('p', 'patch', None, _('show patch')),
2783 ('p', 'patch', None, _('show patch')),
2785 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2784 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2786 ('', 'template', '', _('display with template')),
2785 ('', 'template', '', _('display with template')),
2787 ] + remoteopts,
2786 ] + remoteopts,
2788 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2787 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2789 ' [--bundle FILENAME] [SOURCE]')),
2788 ' [--bundle FILENAME] [SOURCE]')),
2790 "^init":
2789 "^init":
2791 (init,
2790 (init,
2792 remoteopts,
2791 remoteopts,
2793 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2792 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2794 "locate":
2793 "locate":
2795 (locate,
2794 (locate,
2796 [('r', 'rev', '', _('search the repository as it stood at rev')),
2795 [('r', 'rev', '', _('search the repository as it stood at rev')),
2797 ('0', 'print0', None,
2796 ('0', 'print0', None,
2798 _('end filenames with NUL, for use with xargs')),
2797 _('end filenames with NUL, for use with xargs')),
2799 ('f', 'fullpath', None,
2798 ('f', 'fullpath', None,
2800 _('print complete paths from the filesystem root')),
2799 _('print complete paths from the filesystem root')),
2801 ] + walkopts,
2800 ] + walkopts,
2802 _('hg locate [OPTION]... [PATTERN]...')),
2801 _('hg locate [OPTION]... [PATTERN]...')),
2803 "^log|history":
2802 "^log|history":
2804 (log,
2803 (log,
2805 [('f', 'follow', None,
2804 [('f', 'follow', None,
2806 _('follow changeset history, or file history across copies and renames')),
2805 _('follow changeset history, or file history across copies and renames')),
2807 ('', 'follow-first', None,
2806 ('', 'follow-first', None,
2808 _('only follow the first parent of merge changesets')),
2807 _('only follow the first parent of merge changesets')),
2809 ('d', 'date', '', _('show revs matching date spec')),
2808 ('d', 'date', '', _('show revs matching date spec')),
2810 ('C', 'copies', None, _('show copied files')),
2809 ('C', 'copies', None, _('show copied files')),
2811 ('k', 'keyword', [], _('search for a keyword')),
2810 ('k', 'keyword', [], _('search for a keyword')),
2812 ('l', 'limit', '', _('limit number of changes displayed')),
2811 ('l', 'limit', '', _('limit number of changes displayed')),
2813 ('r', 'rev', [], _('show the specified revision or range')),
2812 ('r', 'rev', [], _('show the specified revision or range')),
2814 ('', 'removed', None, _('include revs where files were removed')),
2813 ('', 'removed', None, _('include revs where files were removed')),
2815 ('M', 'no-merges', None, _('do not show merges')),
2814 ('M', 'no-merges', None, _('do not show merges')),
2816 ('', 'style', '', _('display using template map file')),
2815 ('', 'style', '', _('display using template map file')),
2817 ('m', 'only-merges', None, _('show only merges')),
2816 ('m', 'only-merges', None, _('show only merges')),
2818 ('p', 'patch', None, _('show patch')),
2817 ('p', 'patch', None, _('show patch')),
2819 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2818 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2820 ('', 'template', '', _('display with template')),
2819 ('', 'template', '', _('display with template')),
2821 ] + walkopts,
2820 ] + walkopts,
2822 _('hg log [OPTION]... [FILE]')),
2821 _('hg log [OPTION]... [FILE]')),
2823 "manifest": (manifest, [], _('hg manifest [REV]')),
2822 "manifest": (manifest, [], _('hg manifest [REV]')),
2824 "^merge":
2823 "^merge":
2825 (merge,
2824 (merge,
2826 [('f', 'force', None, _('force a merge with outstanding changes'))],
2825 [('f', 'force', None, _('force a merge with outstanding changes'))],
2827 _('hg merge [-f] [REV]')),
2826 _('hg merge [-f] [REV]')),
2828 "outgoing|out": (outgoing,
2827 "outgoing|out": (outgoing,
2829 [('M', 'no-merges', None, _('do not show merges')),
2828 [('M', 'no-merges', None, _('do not show merges')),
2830 ('f', 'force', None,
2829 ('f', 'force', None,
2831 _('run even when remote repository is unrelated')),
2830 _('run even when remote repository is unrelated')),
2832 ('p', 'patch', None, _('show patch')),
2831 ('p', 'patch', None, _('show patch')),
2833 ('', 'style', '', _('display using template map file')),
2832 ('', 'style', '', _('display using template map file')),
2834 ('r', 'rev', [], _('a specific revision you would like to push')),
2833 ('r', 'rev', [], _('a specific revision you would like to push')),
2835 ('n', 'newest-first', None, _('show newest record first')),
2834 ('n', 'newest-first', None, _('show newest record first')),
2836 ('', 'template', '', _('display with template')),
2835 ('', 'template', '', _('display with template')),
2837 ] + remoteopts,
2836 ] + remoteopts,
2838 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
2837 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
2839 "^parents":
2838 "^parents":
2840 (parents,
2839 (parents,
2841 [('r', 'rev', '', _('show parents from the specified rev')),
2840 [('r', 'rev', '', _('show parents from the specified rev')),
2842 ('', 'style', '', _('display using template map file')),
2841 ('', 'style', '', _('display using template map file')),
2843 ('', 'template', '', _('display with template'))],
2842 ('', 'template', '', _('display with template'))],
2844 _('hg parents [-r REV] [FILE]')),
2843 _('hg parents [-r REV] [FILE]')),
2845 "paths": (paths, [], _('hg paths [NAME]')),
2844 "paths": (paths, [], _('hg paths [NAME]')),
2846 "^pull":
2845 "^pull":
2847 (pull,
2846 (pull,
2848 [('u', 'update', None,
2847 [('u', 'update', None,
2849 _('update to new tip if changesets were pulled')),
2848 _('update to new tip if changesets were pulled')),
2850 ('f', 'force', None,
2849 ('f', 'force', None,
2851 _('run even when remote repository is unrelated')),
2850 _('run even when remote repository is unrelated')),
2852 ('r', 'rev', [],
2851 ('r', 'rev', [],
2853 _('a specific revision up to which you would like to pull')),
2852 _('a specific revision up to which you would like to pull')),
2854 ] + remoteopts,
2853 ] + remoteopts,
2855 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
2854 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
2856 "^push":
2855 "^push":
2857 (push,
2856 (push,
2858 [('f', 'force', None, _('force push')),
2857 [('f', 'force', None, _('force push')),
2859 ('r', 'rev', [], _('a specific revision you would like to push')),
2858 ('r', 'rev', [], _('a specific revision you would like to push')),
2860 ] + remoteopts,
2859 ] + remoteopts,
2861 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
2860 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
2862 "debugrawcommit|rawcommit":
2861 "debugrawcommit|rawcommit":
2863 (rawcommit,
2862 (rawcommit,
2864 [('p', 'parent', [], _('parent')),
2863 [('p', 'parent', [], _('parent')),
2865 ('d', 'date', '', _('date code')),
2864 ('d', 'date', '', _('date code')),
2866 ('u', 'user', '', _('user')),
2865 ('u', 'user', '', _('user')),
2867 ('F', 'files', '', _('file list'))
2866 ('F', 'files', '', _('file list'))
2868 ] + commitopts,
2867 ] + commitopts,
2869 _('hg debugrawcommit [OPTION]... [FILE]...')),
2868 _('hg debugrawcommit [OPTION]... [FILE]...')),
2870 "recover": (recover, [], _('hg recover')),
2869 "recover": (recover, [], _('hg recover')),
2871 "^remove|rm":
2870 "^remove|rm":
2872 (remove,
2871 (remove,
2873 [('A', 'after', None, _('record remove that has already occurred')),
2872 [('A', 'after', None, _('record remove that has already occurred')),
2874 ('f', 'force', None, _('remove file even if modified')),
2873 ('f', 'force', None, _('remove file even if modified')),
2875 ] + walkopts,
2874 ] + walkopts,
2876 _('hg remove [OPTION]... FILE...')),
2875 _('hg remove [OPTION]... FILE...')),
2877 "rename|mv":
2876 "rename|mv":
2878 (rename,
2877 (rename,
2879 [('A', 'after', None, _('record a rename that has already occurred')),
2878 [('A', 'after', None, _('record a rename that has already occurred')),
2880 ('f', 'force', None,
2879 ('f', 'force', None,
2881 _('forcibly copy over an existing managed file')),
2880 _('forcibly copy over an existing managed file')),
2882 ] + walkopts + dryrunopts,
2881 ] + walkopts + dryrunopts,
2883 _('hg rename [OPTION]... SOURCE... DEST')),
2882 _('hg rename [OPTION]... SOURCE... DEST')),
2884 "^revert":
2883 "^revert":
2885 (revert,
2884 (revert,
2886 [('a', 'all', None, _('revert all changes when no arguments given')),
2885 [('a', 'all', None, _('revert all changes when no arguments given')),
2887 ('d', 'date', '', _('tipmost revision matching date')),
2886 ('d', 'date', '', _('tipmost revision matching date')),
2888 ('r', 'rev', '', _('revision to revert to')),
2887 ('r', 'rev', '', _('revision to revert to')),
2889 ('', 'no-backup', None, _('do not save backup copies of files')),
2888 ('', 'no-backup', None, _('do not save backup copies of files')),
2890 ] + walkopts + dryrunopts,
2889 ] + walkopts + dryrunopts,
2891 _('hg revert [OPTION]... [-r REV] [NAME]...')),
2890 _('hg revert [OPTION]... [-r REV] [NAME]...')),
2892 "rollback": (rollback, [], _('hg rollback')),
2891 "rollback": (rollback, [], _('hg rollback')),
2893 "root": (root, [], _('hg root')),
2892 "root": (root, [], _('hg root')),
2894 "showconfig|debugconfig":
2893 "showconfig|debugconfig":
2895 (showconfig,
2894 (showconfig,
2896 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2895 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2897 _('showconfig [-u] [NAME]...')),
2896 _('showconfig [-u] [NAME]...')),
2898 "^serve":
2897 "^serve":
2899 (serve,
2898 (serve,
2900 [('A', 'accesslog', '', _('name of access log file to write to')),
2899 [('A', 'accesslog', '', _('name of access log file to write to')),
2901 ('d', 'daemon', None, _('run server in background')),
2900 ('d', 'daemon', None, _('run server in background')),
2902 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2901 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2903 ('E', 'errorlog', '', _('name of error log file to write to')),
2902 ('E', 'errorlog', '', _('name of error log file to write to')),
2904 ('p', 'port', 0, _('port to use (default: 8000)')),
2903 ('p', 'port', 0, _('port to use (default: 8000)')),
2905 ('a', 'address', '', _('address to use')),
2904 ('a', 'address', '', _('address to use')),
2906 ('n', 'name', '',
2905 ('n', 'name', '',
2907 _('name to show in web pages (default: working dir)')),
2906 _('name to show in web pages (default: working dir)')),
2908 ('', 'webdir-conf', '', _('name of the webdir config file'
2907 ('', 'webdir-conf', '', _('name of the webdir config file'
2909 ' (serve more than one repo)')),
2908 ' (serve more than one repo)')),
2910 ('', 'pid-file', '', _('name of file to write process ID to')),
2909 ('', 'pid-file', '', _('name of file to write process ID to')),
2911 ('', 'stdio', None, _('for remote clients')),
2910 ('', 'stdio', None, _('for remote clients')),
2912 ('t', 'templates', '', _('web templates to use')),
2911 ('t', 'templates', '', _('web templates to use')),
2913 ('', 'style', '', _('template style to use')),
2912 ('', 'style', '', _('template style to use')),
2914 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2913 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2915 _('hg serve [OPTION]...')),
2914 _('hg serve [OPTION]...')),
2916 "^status|st":
2915 "^status|st":
2917 (status,
2916 (status,
2918 [('A', 'all', None, _('show status of all files')),
2917 [('A', 'all', None, _('show status of all files')),
2919 ('m', 'modified', None, _('show only modified files')),
2918 ('m', 'modified', None, _('show only modified files')),
2920 ('a', 'added', None, _('show only added files')),
2919 ('a', 'added', None, _('show only added files')),
2921 ('r', 'removed', None, _('show only removed files')),
2920 ('r', 'removed', None, _('show only removed files')),
2922 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2921 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2923 ('c', 'clean', None, _('show only files without changes')),
2922 ('c', 'clean', None, _('show only files without changes')),
2924 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2923 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2925 ('i', 'ignored', None, _('show only ignored files')),
2924 ('i', 'ignored', None, _('show only ignored files')),
2926 ('n', 'no-status', None, _('hide status prefix')),
2925 ('n', 'no-status', None, _('hide status prefix')),
2927 ('C', 'copies', None, _('show source of copied files')),
2926 ('C', 'copies', None, _('show source of copied files')),
2928 ('0', 'print0', None,
2927 ('0', 'print0', None,
2929 _('end filenames with NUL, for use with xargs')),
2928 _('end filenames with NUL, for use with xargs')),
2930 ('', 'rev', [], _('show difference from revision')),
2929 ('', 'rev', [], _('show difference from revision')),
2931 ] + walkopts,
2930 ] + walkopts,
2932 _('hg status [OPTION]... [FILE]...')),
2931 _('hg status [OPTION]... [FILE]...')),
2933 "tag":
2932 "tag":
2934 (tag,
2933 (tag,
2935 [('l', 'local', None, _('make the tag local')),
2934 [('l', 'local', None, _('make the tag local')),
2936 ('m', 'message', '', _('message for tag commit log entry')),
2935 ('m', 'message', '', _('message for tag commit log entry')),
2937 ('d', 'date', '', _('record datecode as commit date')),
2936 ('d', 'date', '', _('record datecode as commit date')),
2938 ('u', 'user', '', _('record user as commiter')),
2937 ('u', 'user', '', _('record user as commiter')),
2939 ('r', 'rev', '', _('revision to tag')),
2938 ('r', 'rev', '', _('revision to tag')),
2940 ('', 'remove', None, _('remove a tag'))],
2939 ('', 'remove', None, _('remove a tag'))],
2941 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2940 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2942 "tags": (tags, [], _('hg tags')),
2941 "tags": (tags, [], _('hg tags')),
2943 "tip":
2942 "tip":
2944 (tip,
2943 (tip,
2945 [('', 'style', '', _('display using template map file')),
2944 [('', 'style', '', _('display using template map file')),
2946 ('p', 'patch', None, _('show patch')),
2945 ('p', 'patch', None, _('show patch')),
2947 ('', 'template', '', _('display with template'))],
2946 ('', 'template', '', _('display with template'))],
2948 _('hg tip [-p]')),
2947 _('hg tip [-p]')),
2949 "unbundle":
2948 "unbundle":
2950 (unbundle,
2949 (unbundle,
2951 [('u', 'update', None,
2950 [('u', 'update', None,
2952 _('update to new tip if changesets were unbundled'))],
2951 _('update to new tip if changesets were unbundled'))],
2953 _('hg unbundle [-u] FILE')),
2952 _('hg unbundle [-u] FILE')),
2954 "^update|up|checkout|co":
2953 "^update|up|checkout|co":
2955 (update,
2954 (update,
2956 [('C', 'clean', None, _('overwrite locally modified files')),
2955 [('C', 'clean', None, _('overwrite locally modified files')),
2957 ('d', 'date', '', _('tipmost revision matching date'))],
2956 ('d', 'date', '', _('tipmost revision matching date'))],
2958 _('hg update [-C] [-d DATE] [REV]')),
2957 _('hg update [-C] [-d DATE] [REV]')),
2959 "verify": (verify, [], _('hg verify')),
2958 "verify": (verify, [], _('hg verify')),
2960 "version": (version_, [], _('hg version')),
2959 "version": (version_, [], _('hg version')),
2961 }
2960 }
2962
2961
2963 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2962 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2964 " debugindex debugindexdot debugdate debuginstall")
2963 " debugindex debugindexdot debugdate debuginstall")
2965 optionalrepo = ("paths serve showconfig")
2964 optionalrepo = ("paths serve showconfig")
2966
2965
2967 def findpossible(ui, cmd):
2966 def findpossible(ui, cmd):
2968 """
2967 """
2969 Return cmd -> (aliases, command table entry)
2968 Return cmd -> (aliases, command table entry)
2970 for each matching command.
2969 for each matching command.
2971 Return debug commands (or their aliases) only if no normal command matches.
2970 Return debug commands (or their aliases) only if no normal command matches.
2972 """
2971 """
2973 choice = {}
2972 choice = {}
2974 debugchoice = {}
2973 debugchoice = {}
2975 for e in table.keys():
2974 for e in table.keys():
2976 aliases = e.lstrip("^").split("|")
2975 aliases = e.lstrip("^").split("|")
2977 found = None
2976 found = None
2978 if cmd in aliases:
2977 if cmd in aliases:
2979 found = cmd
2978 found = cmd
2980 elif not ui.config("ui", "strict"):
2979 elif not ui.config("ui", "strict"):
2981 for a in aliases:
2980 for a in aliases:
2982 if a.startswith(cmd):
2981 if a.startswith(cmd):
2983 found = a
2982 found = a
2984 break
2983 break
2985 if found is not None:
2984 if found is not None:
2986 if aliases[0].startswith("debug") or found.startswith("debug"):
2985 if aliases[0].startswith("debug") or found.startswith("debug"):
2987 debugchoice[found] = (aliases, table[e])
2986 debugchoice[found] = (aliases, table[e])
2988 else:
2987 else:
2989 choice[found] = (aliases, table[e])
2988 choice[found] = (aliases, table[e])
2990
2989
2991 if not choice and debugchoice:
2990 if not choice and debugchoice:
2992 choice = debugchoice
2991 choice = debugchoice
2993
2992
2994 return choice
2993 return choice
2995
2994
2996 def findcmd(ui, cmd):
2995 def findcmd(ui, cmd):
2997 """Return (aliases, command table entry) for command string."""
2996 """Return (aliases, command table entry) for command string."""
2998 choice = findpossible(ui, cmd)
2997 choice = findpossible(ui, cmd)
2999
2998
3000 if choice.has_key(cmd):
2999 if choice.has_key(cmd):
3001 return choice[cmd]
3000 return choice[cmd]
3002
3001
3003 if len(choice) > 1:
3002 if len(choice) > 1:
3004 clist = choice.keys()
3003 clist = choice.keys()
3005 clist.sort()
3004 clist.sort()
3006 raise AmbiguousCommand(cmd, clist)
3005 raise AmbiguousCommand(cmd, clist)
3007
3006
3008 if choice:
3007 if choice:
3009 return choice.values()[0]
3008 return choice.values()[0]
3010
3009
3011 raise UnknownCommand(cmd)
3010 raise UnknownCommand(cmd)
3012
3011
3013 def catchterm(*args):
3012 def catchterm(*args):
3014 raise util.SignalInterrupt
3013 raise util.SignalInterrupt
3015
3014
3016 def run():
3015 def run():
3017 sys.exit(dispatch(sys.argv[1:]))
3016 sys.exit(dispatch(sys.argv[1:]))
3018
3017
3019 class ParseError(Exception):
3018 class ParseError(Exception):
3020 """Exception raised on errors in parsing the command line."""
3019 """Exception raised on errors in parsing the command line."""
3021
3020
3022 def parse(ui, args):
3021 def parse(ui, args):
3023 options = {}
3022 options = {}
3024 cmdoptions = {}
3023 cmdoptions = {}
3025
3024
3026 try:
3025 try:
3027 args = fancyopts.fancyopts(args, globalopts, options)
3026 args = fancyopts.fancyopts(args, globalopts, options)
3028 except fancyopts.getopt.GetoptError, inst:
3027 except fancyopts.getopt.GetoptError, inst:
3029 raise ParseError(None, inst)
3028 raise ParseError(None, inst)
3030
3029
3031 if args:
3030 if args:
3032 cmd, args = args[0], args[1:]
3031 cmd, args = args[0], args[1:]
3033 aliases, i = findcmd(ui, cmd)
3032 aliases, i = findcmd(ui, cmd)
3034 cmd = aliases[0]
3033 cmd = aliases[0]
3035 defaults = ui.config("defaults", cmd)
3034 defaults = ui.config("defaults", cmd)
3036 if defaults:
3035 if defaults:
3037 args = shlex.split(defaults) + args
3036 args = shlex.split(defaults) + args
3038 c = list(i[1])
3037 c = list(i[1])
3039 else:
3038 else:
3040 cmd = None
3039 cmd = None
3041 c = []
3040 c = []
3042
3041
3043 # combine global options into local
3042 # combine global options into local
3044 for o in globalopts:
3043 for o in globalopts:
3045 c.append((o[0], o[1], options[o[1]], o[3]))
3044 c.append((o[0], o[1], options[o[1]], o[3]))
3046
3045
3047 try:
3046 try:
3048 args = fancyopts.fancyopts(args, c, cmdoptions)
3047 args = fancyopts.fancyopts(args, c, cmdoptions)
3049 except fancyopts.getopt.GetoptError, inst:
3048 except fancyopts.getopt.GetoptError, inst:
3050 raise ParseError(cmd, inst)
3049 raise ParseError(cmd, inst)
3051
3050
3052 # separate global options back out
3051 # separate global options back out
3053 for o in globalopts:
3052 for o in globalopts:
3054 n = o[1]
3053 n = o[1]
3055 options[n] = cmdoptions[n]
3054 options[n] = cmdoptions[n]
3056 del cmdoptions[n]
3055 del cmdoptions[n]
3057
3056
3058 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3057 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3059
3058
3060 external = {}
3059 external = {}
3061
3060
3062 def findext(name):
3061 def findext(name):
3063 '''return module with given extension name'''
3062 '''return module with given extension name'''
3064 try:
3063 try:
3065 return sys.modules[external[name]]
3064 return sys.modules[external[name]]
3066 except KeyError:
3065 except KeyError:
3067 for k, v in external.iteritems():
3066 for k, v in external.iteritems():
3068 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3067 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3069 return sys.modules[v]
3068 return sys.modules[v]
3070 raise KeyError(name)
3069 raise KeyError(name)
3071
3070
3072 def load_extensions(ui):
3071 def load_extensions(ui):
3073 added = []
3072 added = []
3074 for ext_name, load_from_name in ui.extensions():
3073 for ext_name, load_from_name in ui.extensions():
3075 if ext_name in external:
3074 if ext_name in external:
3076 continue
3075 continue
3077 try:
3076 try:
3078 if load_from_name:
3077 if load_from_name:
3079 # the module will be loaded in sys.modules
3078 # the module will be loaded in sys.modules
3080 # choose an unique name so that it doesn't
3079 # choose an unique name so that it doesn't
3081 # conflicts with other modules
3080 # conflicts with other modules
3082 module_name = "hgext_%s" % ext_name.replace('.', '_')
3081 module_name = "hgext_%s" % ext_name.replace('.', '_')
3083 mod = imp.load_source(module_name, load_from_name)
3082 mod = imp.load_source(module_name, load_from_name)
3084 else:
3083 else:
3085 def importh(name):
3084 def importh(name):
3086 mod = __import__(name)
3085 mod = __import__(name)
3087 components = name.split('.')
3086 components = name.split('.')
3088 for comp in components[1:]:
3087 for comp in components[1:]:
3089 mod = getattr(mod, comp)
3088 mod = getattr(mod, comp)
3090 return mod
3089 return mod
3091 try:
3090 try:
3092 mod = importh("hgext.%s" % ext_name)
3091 mod = importh("hgext.%s" % ext_name)
3093 except ImportError:
3092 except ImportError:
3094 mod = importh(ext_name)
3093 mod = importh(ext_name)
3095 external[ext_name] = mod.__name__
3094 external[ext_name] = mod.__name__
3096 added.append((mod, ext_name))
3095 added.append((mod, ext_name))
3097 except (util.SignalInterrupt, KeyboardInterrupt):
3096 except (util.SignalInterrupt, KeyboardInterrupt):
3098 raise
3097 raise
3099 except Exception, inst:
3098 except Exception, inst:
3100 ui.warn(_("*** failed to import extension %s: %s\n") %
3099 ui.warn(_("*** failed to import extension %s: %s\n") %
3101 (ext_name, inst))
3100 (ext_name, inst))
3102 if ui.print_exc():
3101 if ui.print_exc():
3103 return 1
3102 return 1
3104
3103
3105 for mod, name in added:
3104 for mod, name in added:
3106 uisetup = getattr(mod, 'uisetup', None)
3105 uisetup = getattr(mod, 'uisetup', None)
3107 if uisetup:
3106 if uisetup:
3108 uisetup(ui)
3107 uisetup(ui)
3109 reposetup = getattr(mod, 'reposetup', None)
3108 reposetup = getattr(mod, 'reposetup', None)
3110 if reposetup:
3109 if reposetup:
3111 hg.repo_setup_hooks.append(reposetup)
3110 hg.repo_setup_hooks.append(reposetup)
3112 cmdtable = getattr(mod, 'cmdtable', {})
3111 cmdtable = getattr(mod, 'cmdtable', {})
3113 overrides = [cmd for cmd in cmdtable if cmd in table]
3112 overrides = [cmd for cmd in cmdtable if cmd in table]
3114 if overrides:
3113 if overrides:
3115 ui.warn(_("extension '%s' overrides commands: %s\n")
3114 ui.warn(_("extension '%s' overrides commands: %s\n")
3116 % (name, " ".join(overrides)))
3115 % (name, " ".join(overrides)))
3117 table.update(cmdtable)
3116 table.update(cmdtable)
3118
3117
3119 def parseconfig(config):
3118 def parseconfig(config):
3120 """parse the --config options from the command line"""
3119 """parse the --config options from the command line"""
3121 parsed = []
3120 parsed = []
3122 for cfg in config:
3121 for cfg in config:
3123 try:
3122 try:
3124 name, value = cfg.split('=', 1)
3123 name, value = cfg.split('=', 1)
3125 section, name = name.split('.', 1)
3124 section, name = name.split('.', 1)
3126 if not section or not name:
3125 if not section or not name:
3127 raise IndexError
3126 raise IndexError
3128 parsed.append((section, name, value))
3127 parsed.append((section, name, value))
3129 except (IndexError, ValueError):
3128 except (IndexError, ValueError):
3130 raise util.Abort(_('malformed --config option: %s') % cfg)
3129 raise util.Abort(_('malformed --config option: %s') % cfg)
3131 return parsed
3130 return parsed
3132
3131
3133 def dispatch(args):
3132 def dispatch(args):
3134 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3133 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3135 num = getattr(signal, name, None)
3134 num = getattr(signal, name, None)
3136 if num: signal.signal(num, catchterm)
3135 if num: signal.signal(num, catchterm)
3137
3136
3138 try:
3137 try:
3139 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3138 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3140 except util.Abort, inst:
3139 except util.Abort, inst:
3141 sys.stderr.write(_("abort: %s\n") % inst)
3140 sys.stderr.write(_("abort: %s\n") % inst)
3142 return -1
3141 return -1
3143
3142
3144 load_extensions(u)
3143 load_extensions(u)
3145 u.addreadhook(load_extensions)
3144 u.addreadhook(load_extensions)
3146
3145
3147 try:
3146 try:
3148 cmd, func, args, options, cmdoptions = parse(u, args)
3147 cmd, func, args, options, cmdoptions = parse(u, args)
3149 if options["encoding"]:
3148 if options["encoding"]:
3150 util._encoding = options["encoding"]
3149 util._encoding = options["encoding"]
3151 if options["encodingmode"]:
3150 if options["encodingmode"]:
3152 util._encodingmode = options["encodingmode"]
3151 util._encodingmode = options["encodingmode"]
3153 if options["time"]:
3152 if options["time"]:
3154 def get_times():
3153 def get_times():
3155 t = os.times()
3154 t = os.times()
3156 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3155 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3157 t = (t[0], t[1], t[2], t[3], time.clock())
3156 t = (t[0], t[1], t[2], t[3], time.clock())
3158 return t
3157 return t
3159 s = get_times()
3158 s = get_times()
3160 def print_time():
3159 def print_time():
3161 t = get_times()
3160 t = get_times()
3162 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3161 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3163 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3162 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3164 atexit.register(print_time)
3163 atexit.register(print_time)
3165
3164
3166 # enter the debugger before command execution
3165 # enter the debugger before command execution
3167 if options['debugger']:
3166 if options['debugger']:
3168 pdb.set_trace()
3167 pdb.set_trace()
3169
3168
3170 try:
3169 try:
3171 if options['cwd']:
3170 if options['cwd']:
3172 os.chdir(options['cwd'])
3171 os.chdir(options['cwd'])
3173
3172
3174 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3173 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3175 not options["noninteractive"], options["traceback"],
3174 not options["noninteractive"], options["traceback"],
3176 parseconfig(options["config"]))
3175 parseconfig(options["config"]))
3177
3176
3178 path = u.expandpath(options["repository"]) or ""
3177 path = u.expandpath(options["repository"]) or ""
3179 repo = path and hg.repository(u, path=path) or None
3178 repo = path and hg.repository(u, path=path) or None
3180 if repo and not repo.local():
3179 if repo and not repo.local():
3181 raise util.Abort(_("repository '%s' is not local") % path)
3180 raise util.Abort(_("repository '%s' is not local") % path)
3182
3181
3183 if options['help']:
3182 if options['help']:
3184 return help_(u, cmd, options['version'])
3183 return help_(u, cmd, options['version'])
3185 elif options['version']:
3184 elif options['version']:
3186 return version_(u)
3185 return version_(u)
3187 elif not cmd:
3186 elif not cmd:
3188 return help_(u, 'shortlist')
3187 return help_(u, 'shortlist')
3189
3188
3190 if cmd not in norepo.split():
3189 if cmd not in norepo.split():
3191 try:
3190 try:
3192 if not repo:
3191 if not repo:
3193 repo = hg.repository(u, path=path)
3192 repo = hg.repository(u, path=path)
3194 u = repo.ui
3193 u = repo.ui
3195 except hg.RepoError:
3194 except hg.RepoError:
3196 if cmd not in optionalrepo.split():
3195 if cmd not in optionalrepo.split():
3197 raise
3196 raise
3198 d = lambda: func(u, repo, *args, **cmdoptions)
3197 d = lambda: func(u, repo, *args, **cmdoptions)
3199 else:
3198 else:
3200 d = lambda: func(u, *args, **cmdoptions)
3199 d = lambda: func(u, *args, **cmdoptions)
3201
3200
3202 try:
3201 try:
3203 if options['profile']:
3202 if options['profile']:
3204 import hotshot, hotshot.stats
3203 import hotshot, hotshot.stats
3205 prof = hotshot.Profile("hg.prof")
3204 prof = hotshot.Profile("hg.prof")
3206 try:
3205 try:
3207 try:
3206 try:
3208 return prof.runcall(d)
3207 return prof.runcall(d)
3209 except:
3208 except:
3210 try:
3209 try:
3211 u.warn(_('exception raised - generating '
3210 u.warn(_('exception raised - generating '
3212 'profile anyway\n'))
3211 'profile anyway\n'))
3213 except:
3212 except:
3214 pass
3213 pass
3215 raise
3214 raise
3216 finally:
3215 finally:
3217 prof.close()
3216 prof.close()
3218 stats = hotshot.stats.load("hg.prof")
3217 stats = hotshot.stats.load("hg.prof")
3219 stats.strip_dirs()
3218 stats.strip_dirs()
3220 stats.sort_stats('time', 'calls')
3219 stats.sort_stats('time', 'calls')
3221 stats.print_stats(40)
3220 stats.print_stats(40)
3222 elif options['lsprof']:
3221 elif options['lsprof']:
3223 try:
3222 try:
3224 from mercurial import lsprof
3223 from mercurial import lsprof
3225 except ImportError:
3224 except ImportError:
3226 raise util.Abort(_(
3225 raise util.Abort(_(
3227 'lsprof not available - install from '
3226 'lsprof not available - install from '
3228 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3227 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3229 p = lsprof.Profiler()
3228 p = lsprof.Profiler()
3230 p.enable(subcalls=True)
3229 p.enable(subcalls=True)
3231 try:
3230 try:
3232 return d()
3231 return d()
3233 finally:
3232 finally:
3234 p.disable()
3233 p.disable()
3235 stats = lsprof.Stats(p.getstats())
3234 stats = lsprof.Stats(p.getstats())
3236 stats.sort()
3235 stats.sort()
3237 stats.pprint(top=10, file=sys.stderr, climit=5)
3236 stats.pprint(top=10, file=sys.stderr, climit=5)
3238 else:
3237 else:
3239 return d()
3238 return d()
3240 finally:
3239 finally:
3241 u.flush()
3240 u.flush()
3242 except:
3241 except:
3243 # enter the debugger when we hit an exception
3242 # enter the debugger when we hit an exception
3244 if options['debugger']:
3243 if options['debugger']:
3245 pdb.post_mortem(sys.exc_info()[2])
3244 pdb.post_mortem(sys.exc_info()[2])
3246 u.print_exc()
3245 u.print_exc()
3247 raise
3246 raise
3248 except ParseError, inst:
3247 except ParseError, inst:
3249 if inst.args[0]:
3248 if inst.args[0]:
3250 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3249 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3251 help_(u, inst.args[0])
3250 help_(u, inst.args[0])
3252 else:
3251 else:
3253 u.warn(_("hg: %s\n") % inst.args[1])
3252 u.warn(_("hg: %s\n") % inst.args[1])
3254 help_(u, 'shortlist')
3253 help_(u, 'shortlist')
3255 except AmbiguousCommand, inst:
3254 except AmbiguousCommand, inst:
3256 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3255 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3257 (inst.args[0], " ".join(inst.args[1])))
3256 (inst.args[0], " ".join(inst.args[1])))
3258 except UnknownCommand, inst:
3257 except UnknownCommand, inst:
3259 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3258 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3260 help_(u, 'shortlist')
3259 help_(u, 'shortlist')
3261 except hg.RepoError, inst:
3260 except hg.RepoError, inst:
3262 u.warn(_("abort: %s!\n") % inst)
3261 u.warn(_("abort: %s!\n") % inst)
3263 except lock.LockHeld, inst:
3262 except lock.LockHeld, inst:
3264 if inst.errno == errno.ETIMEDOUT:
3263 if inst.errno == errno.ETIMEDOUT:
3265 reason = _('timed out waiting for lock held by %s') % inst.locker
3264 reason = _('timed out waiting for lock held by %s') % inst.locker
3266 else:
3265 else:
3267 reason = _('lock held by %s') % inst.locker
3266 reason = _('lock held by %s') % inst.locker
3268 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3267 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3269 except lock.LockUnavailable, inst:
3268 except lock.LockUnavailable, inst:
3270 u.warn(_("abort: could not lock %s: %s\n") %
3269 u.warn(_("abort: could not lock %s: %s\n") %
3271 (inst.desc or inst.filename, inst.strerror))
3270 (inst.desc or inst.filename, inst.strerror))
3272 except revlog.RevlogError, inst:
3271 except revlog.RevlogError, inst:
3273 u.warn(_("abort: %s!\n") % inst)
3272 u.warn(_("abort: %s!\n") % inst)
3274 except util.SignalInterrupt:
3273 except util.SignalInterrupt:
3275 u.warn(_("killed!\n"))
3274 u.warn(_("killed!\n"))
3276 except KeyboardInterrupt:
3275 except KeyboardInterrupt:
3277 try:
3276 try:
3278 u.warn(_("interrupted!\n"))
3277 u.warn(_("interrupted!\n"))
3279 except IOError, inst:
3278 except IOError, inst:
3280 if inst.errno == errno.EPIPE:
3279 if inst.errno == errno.EPIPE:
3281 if u.debugflag:
3280 if u.debugflag:
3282 u.warn(_("\nbroken pipe\n"))
3281 u.warn(_("\nbroken pipe\n"))
3283 else:
3282 else:
3284 raise
3283 raise
3285 except socket.error, inst:
3284 except socket.error, inst:
3286 u.warn(_("abort: %s\n") % inst[1])
3285 u.warn(_("abort: %s\n") % inst[1])
3287 except IOError, inst:
3286 except IOError, inst:
3288 if hasattr(inst, "code"):
3287 if hasattr(inst, "code"):
3289 u.warn(_("abort: %s\n") % inst)
3288 u.warn(_("abort: %s\n") % inst)
3290 elif hasattr(inst, "reason"):
3289 elif hasattr(inst, "reason"):
3291 try: # usually it is in the form (errno, strerror)
3290 try: # usually it is in the form (errno, strerror)
3292 reason = inst.reason.args[1]
3291 reason = inst.reason.args[1]
3293 except: # it might be anything, for example a string
3292 except: # it might be anything, for example a string
3294 reason = inst.reason
3293 reason = inst.reason
3295 u.warn(_("abort: error: %s\n") % reason)
3294 u.warn(_("abort: error: %s\n") % reason)
3296 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3295 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3297 if u.debugflag:
3296 if u.debugflag:
3298 u.warn(_("broken pipe\n"))
3297 u.warn(_("broken pipe\n"))
3299 elif getattr(inst, "strerror", None):
3298 elif getattr(inst, "strerror", None):
3300 if getattr(inst, "filename", None):
3299 if getattr(inst, "filename", None):
3301 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3300 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3302 else:
3301 else:
3303 u.warn(_("abort: %s\n") % inst.strerror)
3302 u.warn(_("abort: %s\n") % inst.strerror)
3304 else:
3303 else:
3305 raise
3304 raise
3306 except OSError, inst:
3305 except OSError, inst:
3307 if getattr(inst, "filename", None):
3306 if getattr(inst, "filename", None):
3308 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3307 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3309 else:
3308 else:
3310 u.warn(_("abort: %s\n") % inst.strerror)
3309 u.warn(_("abort: %s\n") % inst.strerror)
3311 except util.UnexpectedOutput, inst:
3310 except util.UnexpectedOutput, inst:
3312 u.warn(_("abort: %s") % inst[0])
3311 u.warn(_("abort: %s") % inst[0])
3313 if not isinstance(inst[1], basestring):
3312 if not isinstance(inst[1], basestring):
3314 u.warn(" %r\n" % (inst[1],))
3313 u.warn(" %r\n" % (inst[1],))
3315 elif not inst[1]:
3314 elif not inst[1]:
3316 u.warn(_(" empty string\n"))
3315 u.warn(_(" empty string\n"))
3317 else:
3316 else:
3318 u.warn("\n%r\n" % util.ellipsis(inst[1]))
3317 u.warn("\n%r\n" % util.ellipsis(inst[1]))
3319 except util.Abort, inst:
3318 except util.Abort, inst:
3320 u.warn(_("abort: %s\n") % inst)
3319 u.warn(_("abort: %s\n") % inst)
3321 except TypeError, inst:
3320 except TypeError, inst:
3322 # was this an argument error?
3321 # was this an argument error?
3323 tb = traceback.extract_tb(sys.exc_info()[2])
3322 tb = traceback.extract_tb(sys.exc_info()[2])
3324 if len(tb) > 2: # no
3323 if len(tb) > 2: # no
3325 raise
3324 raise
3326 u.debug(inst, "\n")
3325 u.debug(inst, "\n")
3327 u.warn(_("%s: invalid arguments\n") % cmd)
3326 u.warn(_("%s: invalid arguments\n") % cmd)
3328 help_(u, cmd)
3327 help_(u, cmd)
3329 except SystemExit, inst:
3328 except SystemExit, inst:
3330 # Commands shouldn't sys.exit directly, but give a return code.
3329 # Commands shouldn't sys.exit directly, but give a return code.
3331 # Just in case catch this and and pass exit code to caller.
3330 # Just in case catch this and and pass exit code to caller.
3332 return inst.code
3331 return inst.code
3333 except:
3332 except:
3334 u.warn(_("** unknown exception encountered, details follow\n"))
3333 u.warn(_("** unknown exception encountered, details follow\n"))
3335 u.warn(_("** report bug details to "
3334 u.warn(_("** report bug details to "
3336 "http://www.selenic.com/mercurial/bts\n"))
3335 "http://www.selenic.com/mercurial/bts\n"))
3337 u.warn(_("** or mercurial@selenic.com\n"))
3336 u.warn(_("** or mercurial@selenic.com\n"))
3338 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3337 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3339 % version.get_version())
3338 % version.get_version())
3340 raise
3339 raise
3341
3340
3342 return -1
3341 return -1
@@ -1,84 +1,83 b''
1 # filelog.py - file history class for mercurial
1 # filelog.py - file history class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from revlog import *
8 from revlog import *
9 import os
9 import os
10
10
11 class filelog(revlog):
11 class filelog(revlog):
12 def __init__(self, opener, path, defversion=REVLOG_DEFAULT_VERSION):
12 def __init__(self, opener, path):
13 revlog.__init__(self, opener,
13 revlog.__init__(self, opener,
14 "/".join(("data", self.encodedir(path + ".i"))),
14 "/".join(("data", self.encodedir(path + ".i"))))
15 defversion)
16
15
17 # This avoids a collision between a file named foo and a dir named
16 # This avoids a collision between a file named foo and a dir named
18 # foo.i or foo.d
17 # foo.i or foo.d
19 def encodedir(self, path):
18 def encodedir(self, path):
20 return (path
19 return (path
21 .replace(".hg/", ".hg.hg/")
20 .replace(".hg/", ".hg.hg/")
22 .replace(".i/", ".i.hg/")
21 .replace(".i/", ".i.hg/")
23 .replace(".d/", ".d.hg/"))
22 .replace(".d/", ".d.hg/"))
24
23
25 def decodedir(self, path):
24 def decodedir(self, path):
26 return (path
25 return (path
27 .replace(".d.hg/", ".d/")
26 .replace(".d.hg/", ".d/")
28 .replace(".i.hg/", ".i/")
27 .replace(".i.hg/", ".i/")
29 .replace(".hg.hg/", ".hg/"))
28 .replace(".hg.hg/", ".hg/"))
30
29
31 def read(self, node):
30 def read(self, node):
32 t = self.revision(node)
31 t = self.revision(node)
33 if not t.startswith('\1\n'):
32 if not t.startswith('\1\n'):
34 return t
33 return t
35 s = t.index('\1\n', 2)
34 s = t.index('\1\n', 2)
36 return t[s+2:]
35 return t[s+2:]
37
36
38 def _readmeta(self, node):
37 def _readmeta(self, node):
39 t = self.revision(node)
38 t = self.revision(node)
40 if not t.startswith('\1\n'):
39 if not t.startswith('\1\n'):
41 return {}
40 return {}
42 s = t.index('\1\n', 2)
41 s = t.index('\1\n', 2)
43 mt = t[2:s]
42 mt = t[2:s]
44 m = {}
43 m = {}
45 for l in mt.splitlines():
44 for l in mt.splitlines():
46 k, v = l.split(": ", 1)
45 k, v = l.split(": ", 1)
47 m[k] = v
46 m[k] = v
48 return m
47 return m
49
48
50 def add(self, text, meta, transaction, link, p1=None, p2=None):
49 def add(self, text, meta, transaction, link, p1=None, p2=None):
51 if meta or text.startswith('\1\n'):
50 if meta or text.startswith('\1\n'):
52 mt = ""
51 mt = ""
53 if meta:
52 if meta:
54 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
53 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
55 text = "\1\n%s\1\n%s" % ("".join(mt), text)
54 text = "\1\n%s\1\n%s" % ("".join(mt), text)
56 return self.addrevision(text, transaction, link, p1, p2)
55 return self.addrevision(text, transaction, link, p1, p2)
57
56
58 def renamed(self, node):
57 def renamed(self, node):
59 if self.parents(node)[0] != nullid:
58 if self.parents(node)[0] != nullid:
60 return False
59 return False
61 m = self._readmeta(node)
60 m = self._readmeta(node)
62 if m and m.has_key("copy"):
61 if m and m.has_key("copy"):
63 return (m["copy"], bin(m["copyrev"]))
62 return (m["copy"], bin(m["copyrev"]))
64 return False
63 return False
65
64
66 def size(self, rev):
65 def size(self, rev):
67 """return the size of a given revision"""
66 """return the size of a given revision"""
68
67
69 # for revisions with renames, we have to go the slow way
68 # for revisions with renames, we have to go the slow way
70 node = self.node(rev)
69 node = self.node(rev)
71 if self.renamed(node):
70 if self.renamed(node):
72 return len(self.read(node))
71 return len(self.read(node))
73
72
74 return revlog.size(self, rev)
73 return revlog.size(self, rev)
75
74
76 def cmp(self, node, text):
75 def cmp(self, node, text):
77 """compare text with a given file revision"""
76 """compare text with a given file revision"""
78
77
79 # for renames, we have to go the slow way
78 # for renames, we have to go the slow way
80 if self.renamed(node):
79 if self.renamed(node):
81 t2 = self.read(node)
80 t2 = self.read(node)
82 return t2 != text
81 return t2 != text
83
82
84 return revlog.cmp(self, node, text)
83 return revlog.cmp(self, node, text)
@@ -1,1173 +1,1173 b''
1 # hgweb/hgweb_mod.py - Web interface for a repository.
1 # hgweb/hgweb_mod.py - Web interface for a repository.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 import os, mimetypes, re, zlib, mimetools, cStringIO, sys
9 import os, mimetypes, re, zlib, mimetools, cStringIO, sys
10 import tempfile, urllib, bz2
10 import tempfile, urllib, bz2
11 from mercurial.node import *
11 from mercurial.node import *
12 from mercurial.i18n import gettext as _
12 from mercurial.i18n import gettext as _
13 from mercurial import mdiff, ui, hg, util, archival, streamclone, patch
13 from mercurial import mdiff, ui, hg, util, archival, streamclone, patch
14 from mercurial import revlog, templater
14 from mercurial import revlog, templater
15 from common import get_mtime, staticfile, style_map
15 from common import get_mtime, staticfile, style_map
16
16
17 def _up(p):
17 def _up(p):
18 if p[0] != "/":
18 if p[0] != "/":
19 p = "/" + p
19 p = "/" + p
20 if p[-1] == "/":
20 if p[-1] == "/":
21 p = p[:-1]
21 p = p[:-1]
22 up = os.path.dirname(p)
22 up = os.path.dirname(p)
23 if up == "/":
23 if up == "/":
24 return "/"
24 return "/"
25 return up + "/"
25 return up + "/"
26
26
27 def revnavgen(pos, pagelen, limit, nodefunc):
27 def revnavgen(pos, pagelen, limit, nodefunc):
28 def seq(factor, limit=None):
28 def seq(factor, limit=None):
29 if limit:
29 if limit:
30 yield limit
30 yield limit
31 if limit >= 20 and limit <= 40:
31 if limit >= 20 and limit <= 40:
32 yield 50
32 yield 50
33 else:
33 else:
34 yield 1 * factor
34 yield 1 * factor
35 yield 3 * factor
35 yield 3 * factor
36 for f in seq(factor * 10):
36 for f in seq(factor * 10):
37 yield f
37 yield f
38
38
39 def nav(**map):
39 def nav(**map):
40 l = []
40 l = []
41 last = 0
41 last = 0
42 for f in seq(1, pagelen):
42 for f in seq(1, pagelen):
43 if f < pagelen or f <= last:
43 if f < pagelen or f <= last:
44 continue
44 continue
45 if f > limit:
45 if f > limit:
46 break
46 break
47 last = f
47 last = f
48 if pos + f < limit:
48 if pos + f < limit:
49 l.append(("+%d" % f, hex(nodefunc(pos + f).node())))
49 l.append(("+%d" % f, hex(nodefunc(pos + f).node())))
50 if pos - f >= 0:
50 if pos - f >= 0:
51 l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node())))
51 l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node())))
52
52
53 try:
53 try:
54 yield {"label": "(0)", "node": hex(nodefunc('0').node())}
54 yield {"label": "(0)", "node": hex(nodefunc('0').node())}
55
55
56 for label, node in l:
56 for label, node in l:
57 yield {"label": label, "node": node}
57 yield {"label": label, "node": node}
58
58
59 yield {"label": "tip", "node": "tip"}
59 yield {"label": "tip", "node": "tip"}
60 except hg.RepoError:
60 except hg.RepoError:
61 pass
61 pass
62
62
63 return nav
63 return nav
64
64
65 class hgweb(object):
65 class hgweb(object):
66 def __init__(self, repo, name=None):
66 def __init__(self, repo, name=None):
67 if type(repo) == type(""):
67 if type(repo) == type(""):
68 self.repo = hg.repository(ui.ui(report_untrusted=False), repo)
68 self.repo = hg.repository(ui.ui(report_untrusted=False), repo)
69 else:
69 else:
70 self.repo = repo
70 self.repo = repo
71
71
72 self.mtime = -1
72 self.mtime = -1
73 self.reponame = name
73 self.reponame = name
74 self.archives = 'zip', 'gz', 'bz2'
74 self.archives = 'zip', 'gz', 'bz2'
75 self.stripecount = 1
75 self.stripecount = 1
76 # a repo owner may set web.templates in .hg/hgrc to get any file
76 # a repo owner may set web.templates in .hg/hgrc to get any file
77 # readable by the user running the CGI script
77 # readable by the user running the CGI script
78 self.templatepath = self.config("web", "templates",
78 self.templatepath = self.config("web", "templates",
79 templater.templatepath(),
79 templater.templatepath(),
80 untrusted=False)
80 untrusted=False)
81
81
82 # The CGI scripts are often run by a user different from the repo owner.
82 # The CGI scripts are often run by a user different from the repo owner.
83 # Trust the settings from the .hg/hgrc files by default.
83 # Trust the settings from the .hg/hgrc files by default.
84 def config(self, section, name, default=None, untrusted=True):
84 def config(self, section, name, default=None, untrusted=True):
85 return self.repo.ui.config(section, name, default,
85 return self.repo.ui.config(section, name, default,
86 untrusted=untrusted)
86 untrusted=untrusted)
87
87
88 def configbool(self, section, name, default=False, untrusted=True):
88 def configbool(self, section, name, default=False, untrusted=True):
89 return self.repo.ui.configbool(section, name, default,
89 return self.repo.ui.configbool(section, name, default,
90 untrusted=untrusted)
90 untrusted=untrusted)
91
91
92 def configlist(self, section, name, default=None, untrusted=True):
92 def configlist(self, section, name, default=None, untrusted=True):
93 return self.repo.ui.configlist(section, name, default,
93 return self.repo.ui.configlist(section, name, default,
94 untrusted=untrusted)
94 untrusted=untrusted)
95
95
96 def refresh(self):
96 def refresh(self):
97 mtime = get_mtime(self.repo.root)
97 mtime = get_mtime(self.repo.root)
98 if mtime != self.mtime:
98 if mtime != self.mtime:
99 self.mtime = mtime
99 self.mtime = mtime
100 self.repo = hg.repository(self.repo.ui, self.repo.root)
100 self.repo = hg.repository(self.repo.ui, self.repo.root)
101 self.maxchanges = int(self.config("web", "maxchanges", 10))
101 self.maxchanges = int(self.config("web", "maxchanges", 10))
102 self.stripecount = int(self.config("web", "stripes", 1))
102 self.stripecount = int(self.config("web", "stripes", 1))
103 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
103 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
104 self.maxfiles = int(self.config("web", "maxfiles", 10))
104 self.maxfiles = int(self.config("web", "maxfiles", 10))
105 self.allowpull = self.configbool("web", "allowpull", True)
105 self.allowpull = self.configbool("web", "allowpull", True)
106
106
107 def archivelist(self, nodeid):
107 def archivelist(self, nodeid):
108 allowed = self.configlist("web", "allow_archive")
108 allowed = self.configlist("web", "allow_archive")
109 for i, spec in self.archive_specs.iteritems():
109 for i, spec in self.archive_specs.iteritems():
110 if i in allowed or self.configbool("web", "allow" + i):
110 if i in allowed or self.configbool("web", "allow" + i):
111 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
111 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
112
112
113 def listfilediffs(self, files, changeset):
113 def listfilediffs(self, files, changeset):
114 for f in files[:self.maxfiles]:
114 for f in files[:self.maxfiles]:
115 yield self.t("filedifflink", node=hex(changeset), file=f)
115 yield self.t("filedifflink", node=hex(changeset), file=f)
116 if len(files) > self.maxfiles:
116 if len(files) > self.maxfiles:
117 yield self.t("fileellipses")
117 yield self.t("fileellipses")
118
118
119 def siblings(self, siblings=[], hiderev=None, **args):
119 def siblings(self, siblings=[], hiderev=None, **args):
120 siblings = [s for s in siblings if s.node() != nullid]
120 siblings = [s for s in siblings if s.node() != nullid]
121 if len(siblings) == 1 and siblings[0].rev() == hiderev:
121 if len(siblings) == 1 and siblings[0].rev() == hiderev:
122 return
122 return
123 for s in siblings:
123 for s in siblings:
124 d = {'node': hex(s.node()), 'rev': s.rev()}
124 d = {'node': hex(s.node()), 'rev': s.rev()}
125 if hasattr(s, 'path'):
125 if hasattr(s, 'path'):
126 d['file'] = s.path()
126 d['file'] = s.path()
127 d.update(args)
127 d.update(args)
128 yield d
128 yield d
129
129
130 def renamelink(self, fl, node):
130 def renamelink(self, fl, node):
131 r = fl.renamed(node)
131 r = fl.renamed(node)
132 if r:
132 if r:
133 return [dict(file=r[0], node=hex(r[1]))]
133 return [dict(file=r[0], node=hex(r[1]))]
134 return []
134 return []
135
135
136 def showtag(self, t1, node=nullid, **args):
136 def showtag(self, t1, node=nullid, **args):
137 for t in self.repo.nodetags(node):
137 for t in self.repo.nodetags(node):
138 yield self.t(t1, tag=t, **args)
138 yield self.t(t1, tag=t, **args)
139
139
140 def diff(self, node1, node2, files):
140 def diff(self, node1, node2, files):
141 def filterfiles(filters, files):
141 def filterfiles(filters, files):
142 l = [x for x in files if x in filters]
142 l = [x for x in files if x in filters]
143
143
144 for t in filters:
144 for t in filters:
145 if t and t[-1] != os.sep:
145 if t and t[-1] != os.sep:
146 t += os.sep
146 t += os.sep
147 l += [x for x in files if x.startswith(t)]
147 l += [x for x in files if x.startswith(t)]
148 return l
148 return l
149
149
150 parity = [0]
150 parity = [0]
151 def diffblock(diff, f, fn):
151 def diffblock(diff, f, fn):
152 yield self.t("diffblock",
152 yield self.t("diffblock",
153 lines=prettyprintlines(diff),
153 lines=prettyprintlines(diff),
154 parity=parity[0],
154 parity=parity[0],
155 file=f,
155 file=f,
156 filenode=hex(fn or nullid))
156 filenode=hex(fn or nullid))
157 parity[0] = 1 - parity[0]
157 parity[0] = 1 - parity[0]
158
158
159 def prettyprintlines(diff):
159 def prettyprintlines(diff):
160 for l in diff.splitlines(1):
160 for l in diff.splitlines(1):
161 if l.startswith('+'):
161 if l.startswith('+'):
162 yield self.t("difflineplus", line=l)
162 yield self.t("difflineplus", line=l)
163 elif l.startswith('-'):
163 elif l.startswith('-'):
164 yield self.t("difflineminus", line=l)
164 yield self.t("difflineminus", line=l)
165 elif l.startswith('@'):
165 elif l.startswith('@'):
166 yield self.t("difflineat", line=l)
166 yield self.t("difflineat", line=l)
167 else:
167 else:
168 yield self.t("diffline", line=l)
168 yield self.t("diffline", line=l)
169
169
170 r = self.repo
170 r = self.repo
171 c1 = r.changectx(node1)
171 c1 = r.changectx(node1)
172 c2 = r.changectx(node2)
172 c2 = r.changectx(node2)
173 date1 = util.datestr(c1.date())
173 date1 = util.datestr(c1.date())
174 date2 = util.datestr(c2.date())
174 date2 = util.datestr(c2.date())
175
175
176 modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
176 modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
177 if files:
177 if files:
178 modified, added, removed = map(lambda x: filterfiles(files, x),
178 modified, added, removed = map(lambda x: filterfiles(files, x),
179 (modified, added, removed))
179 (modified, added, removed))
180
180
181 diffopts = patch.diffopts(self.repo.ui, untrusted=True)
181 diffopts = patch.diffopts(self.repo.ui, untrusted=True)
182 for f in modified:
182 for f in modified:
183 to = c1.filectx(f).data()
183 to = c1.filectx(f).data()
184 tn = c2.filectx(f).data()
184 tn = c2.filectx(f).data()
185 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
185 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
186 opts=diffopts), f, tn)
186 opts=diffopts), f, tn)
187 for f in added:
187 for f in added:
188 to = None
188 to = None
189 tn = c2.filectx(f).data()
189 tn = c2.filectx(f).data()
190 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
190 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
191 opts=diffopts), f, tn)
191 opts=diffopts), f, tn)
192 for f in removed:
192 for f in removed:
193 to = c1.filectx(f).data()
193 to = c1.filectx(f).data()
194 tn = None
194 tn = None
195 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
195 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
196 opts=diffopts), f, tn)
196 opts=diffopts), f, tn)
197
197
198 def changelog(self, ctx, shortlog=False):
198 def changelog(self, ctx, shortlog=False):
199 def changelist(**map):
199 def changelist(**map):
200 parity = (start - end) & 1
200 parity = (start - end) & 1
201 cl = self.repo.changelog
201 cl = self.repo.changelog
202 l = [] # build a list in forward order for efficiency
202 l = [] # build a list in forward order for efficiency
203 for i in xrange(start, end):
203 for i in xrange(start, end):
204 ctx = self.repo.changectx(i)
204 ctx = self.repo.changectx(i)
205 n = ctx.node()
205 n = ctx.node()
206
206
207 l.insert(0, {"parity": parity,
207 l.insert(0, {"parity": parity,
208 "author": ctx.user(),
208 "author": ctx.user(),
209 "parent": self.siblings(ctx.parents(), i - 1),
209 "parent": self.siblings(ctx.parents(), i - 1),
210 "child": self.siblings(ctx.children(), i + 1),
210 "child": self.siblings(ctx.children(), i + 1),
211 "changelogtag": self.showtag("changelogtag",n),
211 "changelogtag": self.showtag("changelogtag",n),
212 "desc": ctx.description(),
212 "desc": ctx.description(),
213 "date": ctx.date(),
213 "date": ctx.date(),
214 "files": self.listfilediffs(ctx.files(), n),
214 "files": self.listfilediffs(ctx.files(), n),
215 "rev": i,
215 "rev": i,
216 "node": hex(n)})
216 "node": hex(n)})
217 parity = 1 - parity
217 parity = 1 - parity
218
218
219 for e in l:
219 for e in l:
220 yield e
220 yield e
221
221
222 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
222 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
223 cl = self.repo.changelog
223 cl = self.repo.changelog
224 count = cl.count()
224 count = cl.count()
225 pos = ctx.rev()
225 pos = ctx.rev()
226 start = max(0, pos - maxchanges + 1)
226 start = max(0, pos - maxchanges + 1)
227 end = min(count, start + maxchanges)
227 end = min(count, start + maxchanges)
228 pos = end - 1
228 pos = end - 1
229
229
230 changenav = revnavgen(pos, maxchanges, count, self.repo.changectx)
230 changenav = revnavgen(pos, maxchanges, count, self.repo.changectx)
231
231
232 yield self.t(shortlog and 'shortlog' or 'changelog',
232 yield self.t(shortlog and 'shortlog' or 'changelog',
233 changenav=changenav,
233 changenav=changenav,
234 node=hex(cl.tip()),
234 node=hex(cl.tip()),
235 rev=pos, changesets=count, entries=changelist,
235 rev=pos, changesets=count, entries=changelist,
236 archives=self.archivelist("tip"))
236 archives=self.archivelist("tip"))
237
237
238 def search(self, query):
238 def search(self, query):
239
239
240 def changelist(**map):
240 def changelist(**map):
241 cl = self.repo.changelog
241 cl = self.repo.changelog
242 count = 0
242 count = 0
243 qw = query.lower().split()
243 qw = query.lower().split()
244
244
245 def revgen():
245 def revgen():
246 for i in xrange(cl.count() - 1, 0, -100):
246 for i in xrange(cl.count() - 1, 0, -100):
247 l = []
247 l = []
248 for j in xrange(max(0, i - 100), i):
248 for j in xrange(max(0, i - 100), i):
249 ctx = self.repo.changectx(j)
249 ctx = self.repo.changectx(j)
250 l.append(ctx)
250 l.append(ctx)
251 l.reverse()
251 l.reverse()
252 for e in l:
252 for e in l:
253 yield e
253 yield e
254
254
255 for ctx in revgen():
255 for ctx in revgen():
256 miss = 0
256 miss = 0
257 for q in qw:
257 for q in qw:
258 if not (q in ctx.user().lower() or
258 if not (q in ctx.user().lower() or
259 q in ctx.description().lower() or
259 q in ctx.description().lower() or
260 q in " ".join(ctx.files()[:20]).lower()):
260 q in " ".join(ctx.files()[:20]).lower()):
261 miss = 1
261 miss = 1
262 break
262 break
263 if miss:
263 if miss:
264 continue
264 continue
265
265
266 count += 1
266 count += 1
267 n = ctx.node()
267 n = ctx.node()
268
268
269 yield self.t('searchentry',
269 yield self.t('searchentry',
270 parity=self.stripes(count),
270 parity=self.stripes(count),
271 author=ctx.user(),
271 author=ctx.user(),
272 parent=self.siblings(ctx.parents()),
272 parent=self.siblings(ctx.parents()),
273 child=self.siblings(ctx.children()),
273 child=self.siblings(ctx.children()),
274 changelogtag=self.showtag("changelogtag",n),
274 changelogtag=self.showtag("changelogtag",n),
275 desc=ctx.description(),
275 desc=ctx.description(),
276 date=ctx.date(),
276 date=ctx.date(),
277 files=self.listfilediffs(ctx.files(), n),
277 files=self.listfilediffs(ctx.files(), n),
278 rev=ctx.rev(),
278 rev=ctx.rev(),
279 node=hex(n))
279 node=hex(n))
280
280
281 if count >= self.maxchanges:
281 if count >= self.maxchanges:
282 break
282 break
283
283
284 cl = self.repo.changelog
284 cl = self.repo.changelog
285
285
286 yield self.t('search',
286 yield self.t('search',
287 query=query,
287 query=query,
288 node=hex(cl.tip()),
288 node=hex(cl.tip()),
289 entries=changelist)
289 entries=changelist)
290
290
291 def changeset(self, ctx):
291 def changeset(self, ctx):
292 n = ctx.node()
292 n = ctx.node()
293 parents = ctx.parents()
293 parents = ctx.parents()
294 p1 = parents[0].node()
294 p1 = parents[0].node()
295
295
296 files = []
296 files = []
297 parity = 0
297 parity = 0
298 for f in ctx.files():
298 for f in ctx.files():
299 files.append(self.t("filenodelink",
299 files.append(self.t("filenodelink",
300 node=hex(n), file=f,
300 node=hex(n), file=f,
301 parity=parity))
301 parity=parity))
302 parity = 1 - parity
302 parity = 1 - parity
303
303
304 def diff(**map):
304 def diff(**map):
305 yield self.diff(p1, n, None)
305 yield self.diff(p1, n, None)
306
306
307 yield self.t('changeset',
307 yield self.t('changeset',
308 diff=diff,
308 diff=diff,
309 rev=ctx.rev(),
309 rev=ctx.rev(),
310 node=hex(n),
310 node=hex(n),
311 parent=self.siblings(parents),
311 parent=self.siblings(parents),
312 child=self.siblings(ctx.children()),
312 child=self.siblings(ctx.children()),
313 changesettag=self.showtag("changesettag",n),
313 changesettag=self.showtag("changesettag",n),
314 author=ctx.user(),
314 author=ctx.user(),
315 desc=ctx.description(),
315 desc=ctx.description(),
316 date=ctx.date(),
316 date=ctx.date(),
317 files=files,
317 files=files,
318 archives=self.archivelist(hex(n)))
318 archives=self.archivelist(hex(n)))
319
319
320 def filelog(self, fctx):
320 def filelog(self, fctx):
321 f = fctx.path()
321 f = fctx.path()
322 fl = fctx.filelog()
322 fl = fctx.filelog()
323 count = fl.count()
323 count = fl.count()
324 pagelen = self.maxshortchanges
324 pagelen = self.maxshortchanges
325 pos = fctx.filerev()
325 pos = fctx.filerev()
326 start = max(0, pos - pagelen + 1)
326 start = max(0, pos - pagelen + 1)
327 end = min(count, start + pagelen)
327 end = min(count, start + pagelen)
328 pos = end - 1
328 pos = end - 1
329
329
330 def entries(**map):
330 def entries(**map):
331 l = []
331 l = []
332 parity = (count - 1) & 1
332 parity = (count - 1) & 1
333
333
334 for i in xrange(start, end):
334 for i in xrange(start, end):
335 ctx = fctx.filectx(i)
335 ctx = fctx.filectx(i)
336 n = fl.node(i)
336 n = fl.node(i)
337
337
338 l.insert(0, {"parity": parity,
338 l.insert(0, {"parity": parity,
339 "filerev": i,
339 "filerev": i,
340 "file": f,
340 "file": f,
341 "node": hex(ctx.node()),
341 "node": hex(ctx.node()),
342 "author": ctx.user(),
342 "author": ctx.user(),
343 "date": ctx.date(),
343 "date": ctx.date(),
344 "rename": self.renamelink(fl, n),
344 "rename": self.renamelink(fl, n),
345 "parent": self.siblings(fctx.parents()),
345 "parent": self.siblings(fctx.parents()),
346 "child": self.siblings(fctx.children()),
346 "child": self.siblings(fctx.children()),
347 "desc": ctx.description()})
347 "desc": ctx.description()})
348 parity = 1 - parity
348 parity = 1 - parity
349
349
350 for e in l:
350 for e in l:
351 yield e
351 yield e
352
352
353 nodefunc = lambda x: fctx.filectx(fileid=x)
353 nodefunc = lambda x: fctx.filectx(fileid=x)
354 nav = revnavgen(pos, pagelen, count, nodefunc)
354 nav = revnavgen(pos, pagelen, count, nodefunc)
355 yield self.t("filelog", file=f, node=hex(fctx.node()), nav=nav,
355 yield self.t("filelog", file=f, node=hex(fctx.node()), nav=nav,
356 entries=entries)
356 entries=entries)
357
357
358 def filerevision(self, fctx):
358 def filerevision(self, fctx):
359 f = fctx.path()
359 f = fctx.path()
360 text = fctx.data()
360 text = fctx.data()
361 fl = fctx.filelog()
361 fl = fctx.filelog()
362 n = fctx.filenode()
362 n = fctx.filenode()
363
363
364 mt = mimetypes.guess_type(f)[0]
364 mt = mimetypes.guess_type(f)[0]
365 rawtext = text
365 rawtext = text
366 if util.binary(text):
366 if util.binary(text):
367 mt = mt or 'application/octet-stream'
367 mt = mt or 'application/octet-stream'
368 text = "(binary:%s)" % mt
368 text = "(binary:%s)" % mt
369 mt = mt or 'text/plain'
369 mt = mt or 'text/plain'
370
370
371 def lines():
371 def lines():
372 for l, t in enumerate(text.splitlines(1)):
372 for l, t in enumerate(text.splitlines(1)):
373 yield {"line": t,
373 yield {"line": t,
374 "linenumber": "% 6d" % (l + 1),
374 "linenumber": "% 6d" % (l + 1),
375 "parity": self.stripes(l)}
375 "parity": self.stripes(l)}
376
376
377 yield self.t("filerevision",
377 yield self.t("filerevision",
378 file=f,
378 file=f,
379 path=_up(f),
379 path=_up(f),
380 text=lines(),
380 text=lines(),
381 raw=rawtext,
381 raw=rawtext,
382 mimetype=mt,
382 mimetype=mt,
383 rev=fctx.rev(),
383 rev=fctx.rev(),
384 node=hex(fctx.node()),
384 node=hex(fctx.node()),
385 author=fctx.user(),
385 author=fctx.user(),
386 date=fctx.date(),
386 date=fctx.date(),
387 desc=fctx.description(),
387 desc=fctx.description(),
388 parent=self.siblings(fctx.parents()),
388 parent=self.siblings(fctx.parents()),
389 child=self.siblings(fctx.children()),
389 child=self.siblings(fctx.children()),
390 rename=self.renamelink(fl, n),
390 rename=self.renamelink(fl, n),
391 permissions=fctx.manifest().execf(f))
391 permissions=fctx.manifest().execf(f))
392
392
393 def fileannotate(self, fctx):
393 def fileannotate(self, fctx):
394 f = fctx.path()
394 f = fctx.path()
395 n = fctx.filenode()
395 n = fctx.filenode()
396 fl = fctx.filelog()
396 fl = fctx.filelog()
397
397
398 def annotate(**map):
398 def annotate(**map):
399 parity = 0
399 parity = 0
400 last = None
400 last = None
401 for f, l in fctx.annotate(follow=True):
401 for f, l in fctx.annotate(follow=True):
402 fnode = f.filenode()
402 fnode = f.filenode()
403 name = self.repo.ui.shortuser(f.user())
403 name = self.repo.ui.shortuser(f.user())
404
404
405 if last != fnode:
405 if last != fnode:
406 parity = 1 - parity
406 parity = 1 - parity
407 last = fnode
407 last = fnode
408
408
409 yield {"parity": parity,
409 yield {"parity": parity,
410 "node": hex(f.node()),
410 "node": hex(f.node()),
411 "rev": f.rev(),
411 "rev": f.rev(),
412 "author": name,
412 "author": name,
413 "file": f.path(),
413 "file": f.path(),
414 "line": l}
414 "line": l}
415
415
416 yield self.t("fileannotate",
416 yield self.t("fileannotate",
417 file=f,
417 file=f,
418 annotate=annotate,
418 annotate=annotate,
419 path=_up(f),
419 path=_up(f),
420 rev=fctx.rev(),
420 rev=fctx.rev(),
421 node=hex(fctx.node()),
421 node=hex(fctx.node()),
422 author=fctx.user(),
422 author=fctx.user(),
423 date=fctx.date(),
423 date=fctx.date(),
424 desc=fctx.description(),
424 desc=fctx.description(),
425 rename=self.renamelink(fl, n),
425 rename=self.renamelink(fl, n),
426 parent=self.siblings(fctx.parents()),
426 parent=self.siblings(fctx.parents()),
427 child=self.siblings(fctx.children()),
427 child=self.siblings(fctx.children()),
428 permissions=fctx.manifest().execf(f))
428 permissions=fctx.manifest().execf(f))
429
429
430 def manifest(self, ctx, path):
430 def manifest(self, ctx, path):
431 mf = ctx.manifest()
431 mf = ctx.manifest()
432 node = ctx.node()
432 node = ctx.node()
433
433
434 files = {}
434 files = {}
435
435
436 if path and path[-1] != "/":
436 if path and path[-1] != "/":
437 path += "/"
437 path += "/"
438 l = len(path)
438 l = len(path)
439 abspath = "/" + path
439 abspath = "/" + path
440
440
441 for f, n in mf.items():
441 for f, n in mf.items():
442 if f[:l] != path:
442 if f[:l] != path:
443 continue
443 continue
444 remain = f[l:]
444 remain = f[l:]
445 if "/" in remain:
445 if "/" in remain:
446 short = remain[:remain.index("/") + 1] # bleah
446 short = remain[:remain.index("/") + 1] # bleah
447 files[short] = (f, None)
447 files[short] = (f, None)
448 else:
448 else:
449 short = os.path.basename(remain)
449 short = os.path.basename(remain)
450 files[short] = (f, n)
450 files[short] = (f, n)
451
451
452 def filelist(**map):
452 def filelist(**map):
453 parity = 0
453 parity = 0
454 fl = files.keys()
454 fl = files.keys()
455 fl.sort()
455 fl.sort()
456 for f in fl:
456 for f in fl:
457 full, fnode = files[f]
457 full, fnode = files[f]
458 if not fnode:
458 if not fnode:
459 continue
459 continue
460
460
461 yield {"file": full,
461 yield {"file": full,
462 "parity": self.stripes(parity),
462 "parity": self.stripes(parity),
463 "basename": f,
463 "basename": f,
464 "size": ctx.filectx(full).size(),
464 "size": ctx.filectx(full).size(),
465 "permissions": mf.execf(full)}
465 "permissions": mf.execf(full)}
466 parity += 1
466 parity += 1
467
467
468 def dirlist(**map):
468 def dirlist(**map):
469 parity = 0
469 parity = 0
470 fl = files.keys()
470 fl = files.keys()
471 fl.sort()
471 fl.sort()
472 for f in fl:
472 for f in fl:
473 full, fnode = files[f]
473 full, fnode = files[f]
474 if fnode:
474 if fnode:
475 continue
475 continue
476
476
477 yield {"parity": self.stripes(parity),
477 yield {"parity": self.stripes(parity),
478 "path": os.path.join(abspath, f),
478 "path": os.path.join(abspath, f),
479 "basename": f[:-1]}
479 "basename": f[:-1]}
480 parity += 1
480 parity += 1
481
481
482 yield self.t("manifest",
482 yield self.t("manifest",
483 rev=ctx.rev(),
483 rev=ctx.rev(),
484 node=hex(node),
484 node=hex(node),
485 path=abspath,
485 path=abspath,
486 up=_up(abspath),
486 up=_up(abspath),
487 fentries=filelist,
487 fentries=filelist,
488 dentries=dirlist,
488 dentries=dirlist,
489 archives=self.archivelist(hex(node)))
489 archives=self.archivelist(hex(node)))
490
490
491 def tags(self):
491 def tags(self):
492 i = self.repo.tagslist()
492 i = self.repo.tagslist()
493 i.reverse()
493 i.reverse()
494
494
495 def entries(notip=False, **map):
495 def entries(notip=False, **map):
496 parity = 0
496 parity = 0
497 for k, n in i:
497 for k, n in i:
498 if notip and k == "tip":
498 if notip and k == "tip":
499 continue
499 continue
500 yield {"parity": self.stripes(parity),
500 yield {"parity": self.stripes(parity),
501 "tag": k,
501 "tag": k,
502 "date": self.repo.changectx(n).date(),
502 "date": self.repo.changectx(n).date(),
503 "node": hex(n)}
503 "node": hex(n)}
504 parity += 1
504 parity += 1
505
505
506 yield self.t("tags",
506 yield self.t("tags",
507 node=hex(self.repo.changelog.tip()),
507 node=hex(self.repo.changelog.tip()),
508 entries=lambda **x: entries(False, **x),
508 entries=lambda **x: entries(False, **x),
509 entriesnotip=lambda **x: entries(True, **x))
509 entriesnotip=lambda **x: entries(True, **x))
510
510
511 def summary(self):
511 def summary(self):
512 i = self.repo.tagslist()
512 i = self.repo.tagslist()
513 i.reverse()
513 i.reverse()
514
514
515 def tagentries(**map):
515 def tagentries(**map):
516 parity = 0
516 parity = 0
517 count = 0
517 count = 0
518 for k, n in i:
518 for k, n in i:
519 if k == "tip": # skip tip
519 if k == "tip": # skip tip
520 continue;
520 continue;
521
521
522 count += 1
522 count += 1
523 if count > 10: # limit to 10 tags
523 if count > 10: # limit to 10 tags
524 break;
524 break;
525
525
526 yield self.t("tagentry",
526 yield self.t("tagentry",
527 parity=self.stripes(parity),
527 parity=self.stripes(parity),
528 tag=k,
528 tag=k,
529 node=hex(n),
529 node=hex(n),
530 date=self.repo.changectx(n).date())
530 date=self.repo.changectx(n).date())
531 parity += 1
531 parity += 1
532
532
533 def heads(**map):
533 def heads(**map):
534 parity = 0
534 parity = 0
535 count = 0
535 count = 0
536
536
537 for node in self.repo.heads():
537 for node in self.repo.heads():
538 count += 1
538 count += 1
539 if count > 10:
539 if count > 10:
540 break;
540 break;
541
541
542 ctx = self.repo.changectx(node)
542 ctx = self.repo.changectx(node)
543
543
544 yield {'parity': self.stripes(parity),
544 yield {'parity': self.stripes(parity),
545 'branch': ctx.branch(),
545 'branch': ctx.branch(),
546 'node': hex(node),
546 'node': hex(node),
547 'date': ctx.date()}
547 'date': ctx.date()}
548 parity += 1
548 parity += 1
549
549
550 def changelist(**map):
550 def changelist(**map):
551 parity = 0
551 parity = 0
552 l = [] # build a list in forward order for efficiency
552 l = [] # build a list in forward order for efficiency
553 for i in xrange(start, end):
553 for i in xrange(start, end):
554 ctx = self.repo.changectx(i)
554 ctx = self.repo.changectx(i)
555 hn = hex(ctx.node())
555 hn = hex(ctx.node())
556
556
557 l.insert(0, self.t(
557 l.insert(0, self.t(
558 'shortlogentry',
558 'shortlogentry',
559 parity=parity,
559 parity=parity,
560 author=ctx.user(),
560 author=ctx.user(),
561 desc=ctx.description(),
561 desc=ctx.description(),
562 date=ctx.date(),
562 date=ctx.date(),
563 rev=i,
563 rev=i,
564 node=hn))
564 node=hn))
565 parity = 1 - parity
565 parity = 1 - parity
566
566
567 yield l
567 yield l
568
568
569 cl = self.repo.changelog
569 cl = self.repo.changelog
570 count = cl.count()
570 count = cl.count()
571 start = max(0, count - self.maxchanges)
571 start = max(0, count - self.maxchanges)
572 end = min(count, start + self.maxchanges)
572 end = min(count, start + self.maxchanges)
573
573
574 yield self.t("summary",
574 yield self.t("summary",
575 desc=self.config("web", "description", "unknown"),
575 desc=self.config("web", "description", "unknown"),
576 owner=(self.config("ui", "username") or # preferred
576 owner=(self.config("ui", "username") or # preferred
577 self.config("web", "contact") or # deprecated
577 self.config("web", "contact") or # deprecated
578 self.config("web", "author", "unknown")), # also
578 self.config("web", "author", "unknown")), # also
579 lastchange=cl.read(cl.tip())[2],
579 lastchange=cl.read(cl.tip())[2],
580 tags=tagentries,
580 tags=tagentries,
581 heads=heads,
581 heads=heads,
582 shortlog=changelist,
582 shortlog=changelist,
583 node=hex(cl.tip()),
583 node=hex(cl.tip()),
584 archives=self.archivelist("tip"))
584 archives=self.archivelist("tip"))
585
585
586 def filediff(self, fctx):
586 def filediff(self, fctx):
587 n = fctx.node()
587 n = fctx.node()
588 path = fctx.path()
588 path = fctx.path()
589 parents = fctx.parents()
589 parents = fctx.parents()
590 p1 = parents and parents[0].node() or nullid
590 p1 = parents and parents[0].node() or nullid
591
591
592 def diff(**map):
592 def diff(**map):
593 yield self.diff(p1, n, [path])
593 yield self.diff(p1, n, [path])
594
594
595 yield self.t("filediff",
595 yield self.t("filediff",
596 file=path,
596 file=path,
597 node=hex(n),
597 node=hex(n),
598 rev=fctx.rev(),
598 rev=fctx.rev(),
599 parent=self.siblings(parents),
599 parent=self.siblings(parents),
600 child=self.siblings(fctx.children()),
600 child=self.siblings(fctx.children()),
601 diff=diff)
601 diff=diff)
602
602
603 archive_specs = {
603 archive_specs = {
604 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
604 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
605 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
605 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
606 'zip': ('application/zip', 'zip', '.zip', None),
606 'zip': ('application/zip', 'zip', '.zip', None),
607 }
607 }
608
608
609 def archive(self, req, id, type_):
609 def archive(self, req, id, type_):
610 reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
610 reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
611 cnode = self.repo.lookup(id)
611 cnode = self.repo.lookup(id)
612 arch_version = id
612 arch_version = id
613 if cnode == id:
613 if cnode == id:
614 arch_version = short(cnode)
614 arch_version = short(cnode)
615 name = "%s-%s" % (reponame, arch_version)
615 name = "%s-%s" % (reponame, arch_version)
616 mimetype, artype, extension, encoding = self.archive_specs[type_]
616 mimetype, artype, extension, encoding = self.archive_specs[type_]
617 headers = [('Content-type', mimetype),
617 headers = [('Content-type', mimetype),
618 ('Content-disposition', 'attachment; filename=%s%s' %
618 ('Content-disposition', 'attachment; filename=%s%s' %
619 (name, extension))]
619 (name, extension))]
620 if encoding:
620 if encoding:
621 headers.append(('Content-encoding', encoding))
621 headers.append(('Content-encoding', encoding))
622 req.header(headers)
622 req.header(headers)
623 archival.archive(self.repo, req.out, cnode, artype, prefix=name)
623 archival.archive(self.repo, req.out, cnode, artype, prefix=name)
624
624
625 # add tags to things
625 # add tags to things
626 # tags -> list of changesets corresponding to tags
626 # tags -> list of changesets corresponding to tags
627 # find tag, changeset, file
627 # find tag, changeset, file
628
628
629 def cleanpath(self, path):
629 def cleanpath(self, path):
630 path = path.lstrip('/')
630 path = path.lstrip('/')
631 return util.canonpath(self.repo.root, '', path)
631 return util.canonpath(self.repo.root, '', path)
632
632
633 def run(self):
633 def run(self):
634 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
634 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
635 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
635 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
636 import mercurial.hgweb.wsgicgi as wsgicgi
636 import mercurial.hgweb.wsgicgi as wsgicgi
637 from request import wsgiapplication
637 from request import wsgiapplication
638 def make_web_app():
638 def make_web_app():
639 return self
639 return self
640 wsgicgi.launch(wsgiapplication(make_web_app))
640 wsgicgi.launch(wsgiapplication(make_web_app))
641
641
642 def run_wsgi(self, req):
642 def run_wsgi(self, req):
643 def header(**map):
643 def header(**map):
644 header_file = cStringIO.StringIO(
644 header_file = cStringIO.StringIO(
645 ''.join(self.t("header", encoding=util._encoding, **map)))
645 ''.join(self.t("header", encoding=util._encoding, **map)))
646 msg = mimetools.Message(header_file, 0)
646 msg = mimetools.Message(header_file, 0)
647 req.header(msg.items())
647 req.header(msg.items())
648 yield header_file.read()
648 yield header_file.read()
649
649
650 def rawfileheader(**map):
650 def rawfileheader(**map):
651 req.header([('Content-type', map['mimetype']),
651 req.header([('Content-type', map['mimetype']),
652 ('Content-disposition', 'filename=%s' % map['file']),
652 ('Content-disposition', 'filename=%s' % map['file']),
653 ('Content-length', str(len(map['raw'])))])
653 ('Content-length', str(len(map['raw'])))])
654 yield ''
654 yield ''
655
655
656 def footer(**map):
656 def footer(**map):
657 yield self.t("footer", **map)
657 yield self.t("footer", **map)
658
658
659 def motd(**map):
659 def motd(**map):
660 yield self.config("web", "motd", "")
660 yield self.config("web", "motd", "")
661
661
662 def expand_form(form):
662 def expand_form(form):
663 shortcuts = {
663 shortcuts = {
664 'cl': [('cmd', ['changelog']), ('rev', None)],
664 'cl': [('cmd', ['changelog']), ('rev', None)],
665 'sl': [('cmd', ['shortlog']), ('rev', None)],
665 'sl': [('cmd', ['shortlog']), ('rev', None)],
666 'cs': [('cmd', ['changeset']), ('node', None)],
666 'cs': [('cmd', ['changeset']), ('node', None)],
667 'f': [('cmd', ['file']), ('filenode', None)],
667 'f': [('cmd', ['file']), ('filenode', None)],
668 'fl': [('cmd', ['filelog']), ('filenode', None)],
668 'fl': [('cmd', ['filelog']), ('filenode', None)],
669 'fd': [('cmd', ['filediff']), ('node', None)],
669 'fd': [('cmd', ['filediff']), ('node', None)],
670 'fa': [('cmd', ['annotate']), ('filenode', None)],
670 'fa': [('cmd', ['annotate']), ('filenode', None)],
671 'mf': [('cmd', ['manifest']), ('manifest', None)],
671 'mf': [('cmd', ['manifest']), ('manifest', None)],
672 'ca': [('cmd', ['archive']), ('node', None)],
672 'ca': [('cmd', ['archive']), ('node', None)],
673 'tags': [('cmd', ['tags'])],
673 'tags': [('cmd', ['tags'])],
674 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
674 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
675 'static': [('cmd', ['static']), ('file', None)]
675 'static': [('cmd', ['static']), ('file', None)]
676 }
676 }
677
677
678 for k in shortcuts.iterkeys():
678 for k in shortcuts.iterkeys():
679 if form.has_key(k):
679 if form.has_key(k):
680 for name, value in shortcuts[k]:
680 for name, value in shortcuts[k]:
681 if value is None:
681 if value is None:
682 value = form[k]
682 value = form[k]
683 form[name] = value
683 form[name] = value
684 del form[k]
684 del form[k]
685
685
686 def rewrite_request(req):
686 def rewrite_request(req):
687 '''translate new web interface to traditional format'''
687 '''translate new web interface to traditional format'''
688
688
689 def spliturl(req):
689 def spliturl(req):
690 def firstitem(query):
690 def firstitem(query):
691 return query.split('&', 1)[0].split(';', 1)[0]
691 return query.split('&', 1)[0].split(';', 1)[0]
692
692
693 def normurl(url):
693 def normurl(url):
694 inner = '/'.join([x for x in url.split('/') if x])
694 inner = '/'.join([x for x in url.split('/') if x])
695 tl = len(url) > 1 and url.endswith('/') and '/' or ''
695 tl = len(url) > 1 and url.endswith('/') and '/' or ''
696
696
697 return '%s%s%s' % (url.startswith('/') and '/' or '',
697 return '%s%s%s' % (url.startswith('/') and '/' or '',
698 inner, tl)
698 inner, tl)
699
699
700 root = normurl(urllib.unquote(req.env.get('REQUEST_URI', '').split('?', 1)[0]))
700 root = normurl(urllib.unquote(req.env.get('REQUEST_URI', '').split('?', 1)[0]))
701 pi = normurl(req.env.get('PATH_INFO', ''))
701 pi = normurl(req.env.get('PATH_INFO', ''))
702 if pi:
702 if pi:
703 # strip leading /
703 # strip leading /
704 pi = pi[1:]
704 pi = pi[1:]
705 if pi:
705 if pi:
706 root = root[:-len(pi)]
706 root = root[:-len(pi)]
707 if req.env.has_key('REPO_NAME'):
707 if req.env.has_key('REPO_NAME'):
708 rn = req.env['REPO_NAME'] + '/'
708 rn = req.env['REPO_NAME'] + '/'
709 root += rn
709 root += rn
710 query = pi[len(rn):]
710 query = pi[len(rn):]
711 else:
711 else:
712 query = pi
712 query = pi
713 else:
713 else:
714 root += '?'
714 root += '?'
715 query = firstitem(req.env['QUERY_STRING'])
715 query = firstitem(req.env['QUERY_STRING'])
716
716
717 return (root, query)
717 return (root, query)
718
718
719 req.url, query = spliturl(req)
719 req.url, query = spliturl(req)
720
720
721 if req.form.has_key('cmd'):
721 if req.form.has_key('cmd'):
722 # old style
722 # old style
723 return
723 return
724
724
725 args = query.split('/', 2)
725 args = query.split('/', 2)
726 if not args or not args[0]:
726 if not args or not args[0]:
727 return
727 return
728
728
729 cmd = args.pop(0)
729 cmd = args.pop(0)
730 style = cmd.rfind('-')
730 style = cmd.rfind('-')
731 if style != -1:
731 if style != -1:
732 req.form['style'] = [cmd[:style]]
732 req.form['style'] = [cmd[:style]]
733 cmd = cmd[style+1:]
733 cmd = cmd[style+1:]
734 # avoid accepting e.g. style parameter as command
734 # avoid accepting e.g. style parameter as command
735 if hasattr(self, 'do_' + cmd):
735 if hasattr(self, 'do_' + cmd):
736 req.form['cmd'] = [cmd]
736 req.form['cmd'] = [cmd]
737
737
738 if args and args[0]:
738 if args and args[0]:
739 node = args.pop(0)
739 node = args.pop(0)
740 req.form['node'] = [node]
740 req.form['node'] = [node]
741 if args:
741 if args:
742 req.form['file'] = args
742 req.form['file'] = args
743
743
744 if cmd == 'static':
744 if cmd == 'static':
745 req.form['file'] = req.form['node']
745 req.form['file'] = req.form['node']
746 elif cmd == 'archive':
746 elif cmd == 'archive':
747 fn = req.form['node'][0]
747 fn = req.form['node'][0]
748 for type_, spec in self.archive_specs.iteritems():
748 for type_, spec in self.archive_specs.iteritems():
749 ext = spec[2]
749 ext = spec[2]
750 if fn.endswith(ext):
750 if fn.endswith(ext):
751 req.form['node'] = [fn[:-len(ext)]]
751 req.form['node'] = [fn[:-len(ext)]]
752 req.form['type'] = [type_]
752 req.form['type'] = [type_]
753
753
754 def sessionvars(**map):
754 def sessionvars(**map):
755 fields = []
755 fields = []
756 if req.form.has_key('style'):
756 if req.form.has_key('style'):
757 style = req.form['style'][0]
757 style = req.form['style'][0]
758 if style != self.config('web', 'style', ''):
758 if style != self.config('web', 'style', ''):
759 fields.append(('style', style))
759 fields.append(('style', style))
760
760
761 separator = req.url[-1] == '?' and ';' or '?'
761 separator = req.url[-1] == '?' and ';' or '?'
762 for name, value in fields:
762 for name, value in fields:
763 yield dict(name=name, value=value, separator=separator)
763 yield dict(name=name, value=value, separator=separator)
764 separator = ';'
764 separator = ';'
765
765
766 self.refresh()
766 self.refresh()
767
767
768 expand_form(req.form)
768 expand_form(req.form)
769 rewrite_request(req)
769 rewrite_request(req)
770
770
771 style = self.config("web", "style", "")
771 style = self.config("web", "style", "")
772 if req.form.has_key('style'):
772 if req.form.has_key('style'):
773 style = req.form['style'][0]
773 style = req.form['style'][0]
774 mapfile = style_map(self.templatepath, style)
774 mapfile = style_map(self.templatepath, style)
775
775
776 port = req.env["SERVER_PORT"]
776 port = req.env["SERVER_PORT"]
777 port = port != "80" and (":" + port) or ""
777 port = port != "80" and (":" + port) or ""
778 urlbase = 'http://%s%s' % (req.env['SERVER_NAME'], port)
778 urlbase = 'http://%s%s' % (req.env['SERVER_NAME'], port)
779 staticurl = self.config("web", "staticurl") or req.url + 'static/'
779 staticurl = self.config("web", "staticurl") or req.url + 'static/'
780 if not staticurl.endswith('/'):
780 if not staticurl.endswith('/'):
781 staticurl += '/'
781 staticurl += '/'
782
782
783 if not self.reponame:
783 if not self.reponame:
784 self.reponame = (self.config("web", "name")
784 self.reponame = (self.config("web", "name")
785 or req.env.get('REPO_NAME')
785 or req.env.get('REPO_NAME')
786 or req.url.strip('/') or self.repo.root)
786 or req.url.strip('/') or self.repo.root)
787
787
788 self.t = templater.templater(mapfile, templater.common_filters,
788 self.t = templater.templater(mapfile, templater.common_filters,
789 defaults={"url": req.url,
789 defaults={"url": req.url,
790 "staticurl": staticurl,
790 "staticurl": staticurl,
791 "urlbase": urlbase,
791 "urlbase": urlbase,
792 "repo": self.reponame,
792 "repo": self.reponame,
793 "header": header,
793 "header": header,
794 "footer": footer,
794 "footer": footer,
795 "motd": motd,
795 "motd": motd,
796 "rawfileheader": rawfileheader,
796 "rawfileheader": rawfileheader,
797 "sessionvars": sessionvars
797 "sessionvars": sessionvars
798 })
798 })
799
799
800 try:
800 try:
801 if not req.form.has_key('cmd'):
801 if not req.form.has_key('cmd'):
802 req.form['cmd'] = [self.t.cache['default']]
802 req.form['cmd'] = [self.t.cache['default']]
803
803
804 cmd = req.form['cmd'][0]
804 cmd = req.form['cmd'][0]
805
805
806 method = getattr(self, 'do_' + cmd, None)
806 method = getattr(self, 'do_' + cmd, None)
807 if method:
807 if method:
808 try:
808 try:
809 method(req)
809 method(req)
810 except (hg.RepoError, revlog.RevlogError), inst:
810 except (hg.RepoError, revlog.RevlogError), inst:
811 req.write(self.t("error", error=str(inst)))
811 req.write(self.t("error", error=str(inst)))
812 else:
812 else:
813 req.write(self.t("error", error='No such method: ' + cmd))
813 req.write(self.t("error", error='No such method: ' + cmd))
814 finally:
814 finally:
815 self.t = None
815 self.t = None
816
816
817 def changectx(self, req):
817 def changectx(self, req):
818 if req.form.has_key('node'):
818 if req.form.has_key('node'):
819 changeid = req.form['node'][0]
819 changeid = req.form['node'][0]
820 elif req.form.has_key('manifest'):
820 elif req.form.has_key('manifest'):
821 changeid = req.form['manifest'][0]
821 changeid = req.form['manifest'][0]
822 else:
822 else:
823 changeid = self.repo.changelog.count() - 1
823 changeid = self.repo.changelog.count() - 1
824
824
825 try:
825 try:
826 ctx = self.repo.changectx(changeid)
826 ctx = self.repo.changectx(changeid)
827 except hg.RepoError:
827 except hg.RepoError:
828 man = self.repo.manifest
828 man = self.repo.manifest
829 mn = man.lookup(changeid)
829 mn = man.lookup(changeid)
830 ctx = self.repo.changectx(man.linkrev(mn))
830 ctx = self.repo.changectx(man.linkrev(mn))
831
831
832 return ctx
832 return ctx
833
833
834 def filectx(self, req):
834 def filectx(self, req):
835 path = self.cleanpath(req.form['file'][0])
835 path = self.cleanpath(req.form['file'][0])
836 if req.form.has_key('node'):
836 if req.form.has_key('node'):
837 changeid = req.form['node'][0]
837 changeid = req.form['node'][0]
838 else:
838 else:
839 changeid = req.form['filenode'][0]
839 changeid = req.form['filenode'][0]
840 try:
840 try:
841 ctx = self.repo.changectx(changeid)
841 ctx = self.repo.changectx(changeid)
842 fctx = ctx.filectx(path)
842 fctx = ctx.filectx(path)
843 except hg.RepoError:
843 except hg.RepoError:
844 fctx = self.repo.filectx(path, fileid=changeid)
844 fctx = self.repo.filectx(path, fileid=changeid)
845
845
846 return fctx
846 return fctx
847
847
848 def stripes(self, parity):
848 def stripes(self, parity):
849 "make horizontal stripes for easier reading"
849 "make horizontal stripes for easier reading"
850 if self.stripecount:
850 if self.stripecount:
851 return (1 + parity / self.stripecount) & 1
851 return (1 + parity / self.stripecount) & 1
852 else:
852 else:
853 return 0
853 return 0
854
854
855 def do_log(self, req):
855 def do_log(self, req):
856 if req.form.has_key('file') and req.form['file'][0]:
856 if req.form.has_key('file') and req.form['file'][0]:
857 self.do_filelog(req)
857 self.do_filelog(req)
858 else:
858 else:
859 self.do_changelog(req)
859 self.do_changelog(req)
860
860
861 def do_rev(self, req):
861 def do_rev(self, req):
862 self.do_changeset(req)
862 self.do_changeset(req)
863
863
864 def do_file(self, req):
864 def do_file(self, req):
865 path = self.cleanpath(req.form.get('file', [''])[0])
865 path = self.cleanpath(req.form.get('file', [''])[0])
866 if path:
866 if path:
867 try:
867 try:
868 req.write(self.filerevision(self.filectx(req)))
868 req.write(self.filerevision(self.filectx(req)))
869 return
869 return
870 except revlog.LookupError:
870 except revlog.LookupError:
871 pass
871 pass
872
872
873 req.write(self.manifest(self.changectx(req), path))
873 req.write(self.manifest(self.changectx(req), path))
874
874
875 def do_diff(self, req):
875 def do_diff(self, req):
876 self.do_filediff(req)
876 self.do_filediff(req)
877
877
878 def do_changelog(self, req, shortlog = False):
878 def do_changelog(self, req, shortlog = False):
879 if req.form.has_key('node'):
879 if req.form.has_key('node'):
880 ctx = self.changectx(req)
880 ctx = self.changectx(req)
881 else:
881 else:
882 if req.form.has_key('rev'):
882 if req.form.has_key('rev'):
883 hi = req.form['rev'][0]
883 hi = req.form['rev'][0]
884 else:
884 else:
885 hi = self.repo.changelog.count() - 1
885 hi = self.repo.changelog.count() - 1
886 try:
886 try:
887 ctx = self.repo.changectx(hi)
887 ctx = self.repo.changectx(hi)
888 except hg.RepoError:
888 except hg.RepoError:
889 req.write(self.search(hi)) # XXX redirect to 404 page?
889 req.write(self.search(hi)) # XXX redirect to 404 page?
890 return
890 return
891
891
892 req.write(self.changelog(ctx, shortlog = shortlog))
892 req.write(self.changelog(ctx, shortlog = shortlog))
893
893
894 def do_shortlog(self, req):
894 def do_shortlog(self, req):
895 self.do_changelog(req, shortlog = True)
895 self.do_changelog(req, shortlog = True)
896
896
897 def do_changeset(self, req):
897 def do_changeset(self, req):
898 req.write(self.changeset(self.changectx(req)))
898 req.write(self.changeset(self.changectx(req)))
899
899
900 def do_manifest(self, req):
900 def do_manifest(self, req):
901 req.write(self.manifest(self.changectx(req),
901 req.write(self.manifest(self.changectx(req),
902 self.cleanpath(req.form['path'][0])))
902 self.cleanpath(req.form['path'][0])))
903
903
904 def do_tags(self, req):
904 def do_tags(self, req):
905 req.write(self.tags())
905 req.write(self.tags())
906
906
907 def do_summary(self, req):
907 def do_summary(self, req):
908 req.write(self.summary())
908 req.write(self.summary())
909
909
910 def do_filediff(self, req):
910 def do_filediff(self, req):
911 req.write(self.filediff(self.filectx(req)))
911 req.write(self.filediff(self.filectx(req)))
912
912
913 def do_annotate(self, req):
913 def do_annotate(self, req):
914 req.write(self.fileannotate(self.filectx(req)))
914 req.write(self.fileannotate(self.filectx(req)))
915
915
916 def do_filelog(self, req):
916 def do_filelog(self, req):
917 req.write(self.filelog(self.filectx(req)))
917 req.write(self.filelog(self.filectx(req)))
918
918
919 def do_lookup(self, req):
919 def do_lookup(self, req):
920 try:
920 try:
921 r = hex(self.repo.lookup(req.form['key'][0]))
921 r = hex(self.repo.lookup(req.form['key'][0]))
922 success = 1
922 success = 1
923 except Exception,inst:
923 except Exception,inst:
924 r = str(inst)
924 r = str(inst)
925 success = 0
925 success = 0
926 resp = "%s %s\n" % (success, r)
926 resp = "%s %s\n" % (success, r)
927 req.httphdr("application/mercurial-0.1", length=len(resp))
927 req.httphdr("application/mercurial-0.1", length=len(resp))
928 req.write(resp)
928 req.write(resp)
929
929
930 def do_heads(self, req):
930 def do_heads(self, req):
931 resp = " ".join(map(hex, self.repo.heads())) + "\n"
931 resp = " ".join(map(hex, self.repo.heads())) + "\n"
932 req.httphdr("application/mercurial-0.1", length=len(resp))
932 req.httphdr("application/mercurial-0.1", length=len(resp))
933 req.write(resp)
933 req.write(resp)
934
934
935 def do_branches(self, req):
935 def do_branches(self, req):
936 nodes = []
936 nodes = []
937 if req.form.has_key('nodes'):
937 if req.form.has_key('nodes'):
938 nodes = map(bin, req.form['nodes'][0].split(" "))
938 nodes = map(bin, req.form['nodes'][0].split(" "))
939 resp = cStringIO.StringIO()
939 resp = cStringIO.StringIO()
940 for b in self.repo.branches(nodes):
940 for b in self.repo.branches(nodes):
941 resp.write(" ".join(map(hex, b)) + "\n")
941 resp.write(" ".join(map(hex, b)) + "\n")
942 resp = resp.getvalue()
942 resp = resp.getvalue()
943 req.httphdr("application/mercurial-0.1", length=len(resp))
943 req.httphdr("application/mercurial-0.1", length=len(resp))
944 req.write(resp)
944 req.write(resp)
945
945
946 def do_between(self, req):
946 def do_between(self, req):
947 if req.form.has_key('pairs'):
947 if req.form.has_key('pairs'):
948 pairs = [map(bin, p.split("-"))
948 pairs = [map(bin, p.split("-"))
949 for p in req.form['pairs'][0].split(" ")]
949 for p in req.form['pairs'][0].split(" ")]
950 resp = cStringIO.StringIO()
950 resp = cStringIO.StringIO()
951 for b in self.repo.between(pairs):
951 for b in self.repo.between(pairs):
952 resp.write(" ".join(map(hex, b)) + "\n")
952 resp.write(" ".join(map(hex, b)) + "\n")
953 resp = resp.getvalue()
953 resp = resp.getvalue()
954 req.httphdr("application/mercurial-0.1", length=len(resp))
954 req.httphdr("application/mercurial-0.1", length=len(resp))
955 req.write(resp)
955 req.write(resp)
956
956
957 def do_changegroup(self, req):
957 def do_changegroup(self, req):
958 req.httphdr("application/mercurial-0.1")
958 req.httphdr("application/mercurial-0.1")
959 nodes = []
959 nodes = []
960 if not self.allowpull:
960 if not self.allowpull:
961 return
961 return
962
962
963 if req.form.has_key('roots'):
963 if req.form.has_key('roots'):
964 nodes = map(bin, req.form['roots'][0].split(" "))
964 nodes = map(bin, req.form['roots'][0].split(" "))
965
965
966 z = zlib.compressobj()
966 z = zlib.compressobj()
967 f = self.repo.changegroup(nodes, 'serve')
967 f = self.repo.changegroup(nodes, 'serve')
968 while 1:
968 while 1:
969 chunk = f.read(4096)
969 chunk = f.read(4096)
970 if not chunk:
970 if not chunk:
971 break
971 break
972 req.write(z.compress(chunk))
972 req.write(z.compress(chunk))
973
973
974 req.write(z.flush())
974 req.write(z.flush())
975
975
976 def do_changegroupsubset(self, req):
976 def do_changegroupsubset(self, req):
977 req.httphdr("application/mercurial-0.1")
977 req.httphdr("application/mercurial-0.1")
978 bases = []
978 bases = []
979 heads = []
979 heads = []
980 if not self.allowpull:
980 if not self.allowpull:
981 return
981 return
982
982
983 if req.form.has_key('bases'):
983 if req.form.has_key('bases'):
984 bases = [bin(x) for x in req.form['bases'][0].split(' ')]
984 bases = [bin(x) for x in req.form['bases'][0].split(' ')]
985 if req.form.has_key('heads'):
985 if req.form.has_key('heads'):
986 heads = [bin(x) for x in req.form['heads'][0].split(' ')]
986 heads = [bin(x) for x in req.form['heads'][0].split(' ')]
987
987
988 z = zlib.compressobj()
988 z = zlib.compressobj()
989 f = self.repo.changegroupsubset(bases, heads, 'serve')
989 f = self.repo.changegroupsubset(bases, heads, 'serve')
990 while 1:
990 while 1:
991 chunk = f.read(4096)
991 chunk = f.read(4096)
992 if not chunk:
992 if not chunk:
993 break
993 break
994 req.write(z.compress(chunk))
994 req.write(z.compress(chunk))
995
995
996 req.write(z.flush())
996 req.write(z.flush())
997
997
998 def do_archive(self, req):
998 def do_archive(self, req):
999 type_ = req.form['type'][0]
999 type_ = req.form['type'][0]
1000 allowed = self.configlist("web", "allow_archive")
1000 allowed = self.configlist("web", "allow_archive")
1001 if (type_ in self.archives and (type_ in allowed or
1001 if (type_ in self.archives and (type_ in allowed or
1002 self.configbool("web", "allow" + type_, False))):
1002 self.configbool("web", "allow" + type_, False))):
1003 self.archive(req, req.form['node'][0], type_)
1003 self.archive(req, req.form['node'][0], type_)
1004 return
1004 return
1005
1005
1006 req.write(self.t("error"))
1006 req.write(self.t("error"))
1007
1007
1008 def do_static(self, req):
1008 def do_static(self, req):
1009 fname = req.form['file'][0]
1009 fname = req.form['file'][0]
1010 # a repo owner may set web.static in .hg/hgrc to get any file
1010 # a repo owner may set web.static in .hg/hgrc to get any file
1011 # readable by the user running the CGI script
1011 # readable by the user running the CGI script
1012 static = self.config("web", "static",
1012 static = self.config("web", "static",
1013 os.path.join(self.templatepath, "static"),
1013 os.path.join(self.templatepath, "static"),
1014 untrusted=False)
1014 untrusted=False)
1015 req.write(staticfile(static, fname, req)
1015 req.write(staticfile(static, fname, req)
1016 or self.t("error", error="%r not found" % fname))
1016 or self.t("error", error="%r not found" % fname))
1017
1017
1018 def do_capabilities(self, req):
1018 def do_capabilities(self, req):
1019 caps = ['lookup', 'changegroupsubset']
1019 caps = ['lookup', 'changegroupsubset']
1020 if self.configbool('server', 'uncompressed'):
1020 if self.configbool('server', 'uncompressed'):
1021 caps.append('stream=%d' % self.repo.revlogversion)
1021 caps.append('stream=%d' % self.repo.changelog.version)
1022 # XXX: make configurable and/or share code with do_unbundle:
1022 # XXX: make configurable and/or share code with do_unbundle:
1023 unbundleversions = ['HG10GZ', 'HG10BZ', 'HG10UN']
1023 unbundleversions = ['HG10GZ', 'HG10BZ', 'HG10UN']
1024 if unbundleversions:
1024 if unbundleversions:
1025 caps.append('unbundle=%s' % ','.join(unbundleversions))
1025 caps.append('unbundle=%s' % ','.join(unbundleversions))
1026 resp = ' '.join(caps)
1026 resp = ' '.join(caps)
1027 req.httphdr("application/mercurial-0.1", length=len(resp))
1027 req.httphdr("application/mercurial-0.1", length=len(resp))
1028 req.write(resp)
1028 req.write(resp)
1029
1029
1030 def check_perm(self, req, op, default):
1030 def check_perm(self, req, op, default):
1031 '''check permission for operation based on user auth.
1031 '''check permission for operation based on user auth.
1032 return true if op allowed, else false.
1032 return true if op allowed, else false.
1033 default is policy to use if no config given.'''
1033 default is policy to use if no config given.'''
1034
1034
1035 user = req.env.get('REMOTE_USER')
1035 user = req.env.get('REMOTE_USER')
1036
1036
1037 deny = self.configlist('web', 'deny_' + op)
1037 deny = self.configlist('web', 'deny_' + op)
1038 if deny and (not user or deny == ['*'] or user in deny):
1038 if deny and (not user or deny == ['*'] or user in deny):
1039 return False
1039 return False
1040
1040
1041 allow = self.configlist('web', 'allow_' + op)
1041 allow = self.configlist('web', 'allow_' + op)
1042 return (allow and (allow == ['*'] or user in allow)) or default
1042 return (allow and (allow == ['*'] or user in allow)) or default
1043
1043
1044 def do_unbundle(self, req):
1044 def do_unbundle(self, req):
1045 def bail(response, headers={}):
1045 def bail(response, headers={}):
1046 length = int(req.env['CONTENT_LENGTH'])
1046 length = int(req.env['CONTENT_LENGTH'])
1047 for s in util.filechunkiter(req, limit=length):
1047 for s in util.filechunkiter(req, limit=length):
1048 # drain incoming bundle, else client will not see
1048 # drain incoming bundle, else client will not see
1049 # response when run outside cgi script
1049 # response when run outside cgi script
1050 pass
1050 pass
1051 req.httphdr("application/mercurial-0.1", headers=headers)
1051 req.httphdr("application/mercurial-0.1", headers=headers)
1052 req.write('0\n')
1052 req.write('0\n')
1053 req.write(response)
1053 req.write(response)
1054
1054
1055 # require ssl by default, auth info cannot be sniffed and
1055 # require ssl by default, auth info cannot be sniffed and
1056 # replayed
1056 # replayed
1057 ssl_req = self.configbool('web', 'push_ssl', True)
1057 ssl_req = self.configbool('web', 'push_ssl', True)
1058 if ssl_req:
1058 if ssl_req:
1059 if not req.env.get('HTTPS'):
1059 if not req.env.get('HTTPS'):
1060 bail(_('ssl required\n'))
1060 bail(_('ssl required\n'))
1061 return
1061 return
1062 proto = 'https'
1062 proto = 'https'
1063 else:
1063 else:
1064 proto = 'http'
1064 proto = 'http'
1065
1065
1066 # do not allow push unless explicitly allowed
1066 # do not allow push unless explicitly allowed
1067 if not self.check_perm(req, 'push', False):
1067 if not self.check_perm(req, 'push', False):
1068 bail(_('push not authorized\n'),
1068 bail(_('push not authorized\n'),
1069 headers={'status': '401 Unauthorized'})
1069 headers={'status': '401 Unauthorized'})
1070 return
1070 return
1071
1071
1072 their_heads = req.form['heads'][0].split(' ')
1072 their_heads = req.form['heads'][0].split(' ')
1073
1073
1074 def check_heads():
1074 def check_heads():
1075 heads = map(hex, self.repo.heads())
1075 heads = map(hex, self.repo.heads())
1076 return their_heads == [hex('force')] or their_heads == heads
1076 return their_heads == [hex('force')] or their_heads == heads
1077
1077
1078 # fail early if possible
1078 # fail early if possible
1079 if not check_heads():
1079 if not check_heads():
1080 bail(_('unsynced changes\n'))
1080 bail(_('unsynced changes\n'))
1081 return
1081 return
1082
1082
1083 req.httphdr("application/mercurial-0.1")
1083 req.httphdr("application/mercurial-0.1")
1084
1084
1085 # do not lock repo until all changegroup data is
1085 # do not lock repo until all changegroup data is
1086 # streamed. save to temporary file.
1086 # streamed. save to temporary file.
1087
1087
1088 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
1088 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
1089 fp = os.fdopen(fd, 'wb+')
1089 fp = os.fdopen(fd, 'wb+')
1090 try:
1090 try:
1091 length = int(req.env['CONTENT_LENGTH'])
1091 length = int(req.env['CONTENT_LENGTH'])
1092 for s in util.filechunkiter(req, limit=length):
1092 for s in util.filechunkiter(req, limit=length):
1093 fp.write(s)
1093 fp.write(s)
1094
1094
1095 try:
1095 try:
1096 lock = self.repo.lock()
1096 lock = self.repo.lock()
1097 try:
1097 try:
1098 if not check_heads():
1098 if not check_heads():
1099 req.write('0\n')
1099 req.write('0\n')
1100 req.write(_('unsynced changes\n'))
1100 req.write(_('unsynced changes\n'))
1101 return
1101 return
1102
1102
1103 fp.seek(0)
1103 fp.seek(0)
1104 header = fp.read(6)
1104 header = fp.read(6)
1105 if not header.startswith("HG"):
1105 if not header.startswith("HG"):
1106 # old client with uncompressed bundle
1106 # old client with uncompressed bundle
1107 def generator(f):
1107 def generator(f):
1108 yield header
1108 yield header
1109 for chunk in f:
1109 for chunk in f:
1110 yield chunk
1110 yield chunk
1111 elif not header.startswith("HG10"):
1111 elif not header.startswith("HG10"):
1112 req.write("0\n")
1112 req.write("0\n")
1113 req.write(_("unknown bundle version\n"))
1113 req.write(_("unknown bundle version\n"))
1114 return
1114 return
1115 elif header == "HG10GZ":
1115 elif header == "HG10GZ":
1116 def generator(f):
1116 def generator(f):
1117 zd = zlib.decompressobj()
1117 zd = zlib.decompressobj()
1118 for chunk in f:
1118 for chunk in f:
1119 yield zd.decompress(chunk)
1119 yield zd.decompress(chunk)
1120 elif header == "HG10BZ":
1120 elif header == "HG10BZ":
1121 def generator(f):
1121 def generator(f):
1122 zd = bz2.BZ2Decompressor()
1122 zd = bz2.BZ2Decompressor()
1123 zd.decompress("BZ")
1123 zd.decompress("BZ")
1124 for chunk in f:
1124 for chunk in f:
1125 yield zd.decompress(chunk)
1125 yield zd.decompress(chunk)
1126 elif header == "HG10UN":
1126 elif header == "HG10UN":
1127 def generator(f):
1127 def generator(f):
1128 for chunk in f:
1128 for chunk in f:
1129 yield chunk
1129 yield chunk
1130 else:
1130 else:
1131 req.write("0\n")
1131 req.write("0\n")
1132 req.write(_("unknown bundle compression type\n"))
1132 req.write(_("unknown bundle compression type\n"))
1133 return
1133 return
1134 gen = generator(util.filechunkiter(fp, 4096))
1134 gen = generator(util.filechunkiter(fp, 4096))
1135
1135
1136 # send addchangegroup output to client
1136 # send addchangegroup output to client
1137
1137
1138 old_stdout = sys.stdout
1138 old_stdout = sys.stdout
1139 sys.stdout = cStringIO.StringIO()
1139 sys.stdout = cStringIO.StringIO()
1140
1140
1141 try:
1141 try:
1142 url = 'remote:%s:%s' % (proto,
1142 url = 'remote:%s:%s' % (proto,
1143 req.env.get('REMOTE_HOST', ''))
1143 req.env.get('REMOTE_HOST', ''))
1144 try:
1144 try:
1145 ret = self.repo.addchangegroup(
1145 ret = self.repo.addchangegroup(
1146 util.chunkbuffer(gen), 'serve', url)
1146 util.chunkbuffer(gen), 'serve', url)
1147 except util.Abort, inst:
1147 except util.Abort, inst:
1148 sys.stdout.write("abort: %s\n" % inst)
1148 sys.stdout.write("abort: %s\n" % inst)
1149 ret = 0
1149 ret = 0
1150 finally:
1150 finally:
1151 val = sys.stdout.getvalue()
1151 val = sys.stdout.getvalue()
1152 sys.stdout = old_stdout
1152 sys.stdout = old_stdout
1153 req.write('%d\n' % ret)
1153 req.write('%d\n' % ret)
1154 req.write(val)
1154 req.write(val)
1155 finally:
1155 finally:
1156 lock.release()
1156 lock.release()
1157 except (OSError, IOError), inst:
1157 except (OSError, IOError), inst:
1158 req.write('0\n')
1158 req.write('0\n')
1159 filename = getattr(inst, 'filename', '')
1159 filename = getattr(inst, 'filename', '')
1160 # Don't send our filesystem layout to the client
1160 # Don't send our filesystem layout to the client
1161 if filename.startswith(self.repo.root):
1161 if filename.startswith(self.repo.root):
1162 filename = filename[len(self.repo.root)+1:]
1162 filename = filename[len(self.repo.root)+1:]
1163 else:
1163 else:
1164 filename = ''
1164 filename = ''
1165 error = getattr(inst, 'strerror', 'Unknown error')
1165 error = getattr(inst, 'strerror', 'Unknown error')
1166 req.write('%s: %s\n' % (error, filename))
1166 req.write('%s: %s\n' % (error, filename))
1167 finally:
1167 finally:
1168 fp.close()
1168 fp.close()
1169 os.unlink(tempname)
1169 os.unlink(tempname)
1170
1170
1171 def do_stream_out(self, req):
1171 def do_stream_out(self, req):
1172 req.httphdr("application/mercurial-0.1")
1172 req.httphdr("application/mercurial-0.1")
1173 streamclone.stream_out(self.repo, req)
1173 streamclone.stream_out(self.repo, req)
@@ -1,1964 +1,1941 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import repo, appendfile, changegroup
10 import repo, appendfile, changegroup
11 import changelog, dirstate, filelog, manifest, context
11 import changelog, dirstate, filelog, manifest, context
12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
13 import os, revlog, time, util
13 import os, revlog, time, util
14
14
15 class localrepository(repo.repository):
15 class localrepository(repo.repository):
16 capabilities = ('lookup', 'changegroupsubset')
16 capabilities = ('lookup', 'changegroupsubset')
17 supported = ('revlogv1', 'store')
17 supported = ('revlogv1', 'store')
18
18
19 def __del__(self):
19 def __del__(self):
20 self.transhandle = None
20 self.transhandle = None
21 def __init__(self, parentui, path=None, create=0):
21 def __init__(self, parentui, path=None, create=0):
22 repo.repository.__init__(self)
22 repo.repository.__init__(self)
23 if not path:
23 if not path:
24 p = os.getcwd()
24 p = os.getcwd()
25 while not os.path.isdir(os.path.join(p, ".hg")):
25 while not os.path.isdir(os.path.join(p, ".hg")):
26 oldp = p
26 oldp = p
27 p = os.path.dirname(p)
27 p = os.path.dirname(p)
28 if p == oldp:
28 if p == oldp:
29 raise repo.RepoError(_("There is no Mercurial repository"
29 raise repo.RepoError(_("There is no Mercurial repository"
30 " here (.hg not found)"))
30 " here (.hg not found)"))
31 path = p
31 path = p
32
32
33 self.root = os.path.realpath(path)
33 self.root = os.path.realpath(path)
34 self.path = os.path.join(self.root, ".hg")
34 self.path = os.path.join(self.root, ".hg")
35 self.origroot = path
35 self.origroot = path
36 self.opener = util.opener(self.path)
36 self.opener = util.opener(self.path)
37 self.wopener = util.opener(self.root)
37 self.wopener = util.opener(self.root)
38
38
39 if not os.path.isdir(self.path):
39 if not os.path.isdir(self.path):
40 if create:
40 if create:
41 if not os.path.exists(path):
41 if not os.path.exists(path):
42 os.mkdir(path)
42 os.mkdir(path)
43 os.mkdir(self.path)
43 os.mkdir(self.path)
44 requirements = ["revlogv1"]
44 requirements = ["revlogv1"]
45 if parentui.configbool('format', 'usestore', True):
45 if parentui.configbool('format', 'usestore', True):
46 os.mkdir(os.path.join(self.path, "store"))
46 os.mkdir(os.path.join(self.path, "store"))
47 requirements.append("store")
47 requirements.append("store")
48 # create an invalid changelog
48 # create an invalid changelog
49 self.opener("00changelog.i", "a").write(
49 self.opener("00changelog.i", "a").write(
50 '\0\0\0\2' # represents revlogv2
50 '\0\0\0\2' # represents revlogv2
51 ' dummy changelog to prevent using the old repo layout'
51 ' dummy changelog to prevent using the old repo layout'
52 )
52 )
53 reqfile = self.opener("requires", "w")
53 reqfile = self.opener("requires", "w")
54 for r in requirements:
54 for r in requirements:
55 reqfile.write("%s\n" % r)
55 reqfile.write("%s\n" % r)
56 reqfile.close()
56 reqfile.close()
57 else:
57 else:
58 raise repo.RepoError(_("repository %s not found") % path)
58 raise repo.RepoError(_("repository %s not found") % path)
59 elif create:
59 elif create:
60 raise repo.RepoError(_("repository %s already exists") % path)
60 raise repo.RepoError(_("repository %s already exists") % path)
61 else:
61 else:
62 # find requirements
62 # find requirements
63 try:
63 try:
64 requirements = self.opener("requires").read().splitlines()
64 requirements = self.opener("requires").read().splitlines()
65 except IOError, inst:
65 except IOError, inst:
66 if inst.errno != errno.ENOENT:
66 if inst.errno != errno.ENOENT:
67 raise
67 raise
68 requirements = []
68 requirements = []
69 # check them
69 # check them
70 for r in requirements:
70 for r in requirements:
71 if r not in self.supported:
71 if r not in self.supported:
72 raise repo.RepoError(_("requirement '%s' not supported") % r)
72 raise repo.RepoError(_("requirement '%s' not supported") % r)
73
73
74 # setup store
74 # setup store
75 if "store" in requirements:
75 if "store" in requirements:
76 self.encodefn = util.encodefilename
76 self.encodefn = util.encodefilename
77 self.decodefn = util.decodefilename
77 self.decodefn = util.decodefilename
78 self.spath = os.path.join(self.path, "store")
78 self.spath = os.path.join(self.path, "store")
79 else:
79 else:
80 self.encodefn = lambda x: x
80 self.encodefn = lambda x: x
81 self.decodefn = lambda x: x
81 self.decodefn = lambda x: x
82 self.spath = self.path
82 self.spath = self.path
83 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
83 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
84
84
85 self.ui = ui.ui(parentui=parentui)
85 self.ui = ui.ui(parentui=parentui)
86 try:
86 try:
87 self.ui.readconfig(self.join("hgrc"), self.root)
87 self.ui.readconfig(self.join("hgrc"), self.root)
88 except IOError:
88 except IOError:
89 pass
89 pass
90
90
91 v = self.ui.configrevlog()
91 self.changelog = changelog.changelog(self.sopener)
92 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
92 self.sopener.defversion = self.changelog.version
93 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
93 self.manifest = manifest.manifest(self.sopener)
94 fl = v.get('flags', None)
95 flags = 0
96 if fl != None:
97 for x in fl.split():
98 flags |= revlog.flagstr(x)
99 elif self.revlogv1:
100 flags = revlog.REVLOG_DEFAULT_FLAGS
101
102 v = self.revlogversion | flags
103 self.manifest = manifest.manifest(self.sopener, v)
104 self.changelog = changelog.changelog(self.sopener, v)
105
94
106 fallback = self.ui.config('ui', 'fallbackencoding')
95 fallback = self.ui.config('ui', 'fallbackencoding')
107 if fallback:
96 if fallback:
108 util._fallbackencoding = fallback
97 util._fallbackencoding = fallback
109
98
110 # the changelog might not have the inline index flag
111 # on. If the format of the changelog is the same as found in
112 # .hgrc, apply any flags found in the .hgrc as well.
113 # Otherwise, just version from the changelog
114 v = self.changelog.version
115 if v == self.revlogversion:
116 v |= flags
117 self.revlogversion = v
118
119 self.tagscache = None
99 self.tagscache = None
120 self.branchcache = None
100 self.branchcache = None
121 self.nodetagscache = None
101 self.nodetagscache = None
122 self.filterpats = {}
102 self.filterpats = {}
123 self.transhandle = None
103 self.transhandle = None
124
104
125 self._link = lambda x: False
105 self._link = lambda x: False
126 if util.checklink(self.root):
106 if util.checklink(self.root):
127 r = self.root # avoid circular reference in lambda
107 r = self.root # avoid circular reference in lambda
128 self._link = lambda x: util.is_link(os.path.join(r, x))
108 self._link = lambda x: util.is_link(os.path.join(r, x))
129
109
130 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
110 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
131
111
132 def url(self):
112 def url(self):
133 return 'file:' + self.root
113 return 'file:' + self.root
134
114
135 def hook(self, name, throw=False, **args):
115 def hook(self, name, throw=False, **args):
136 def callhook(hname, funcname):
116 def callhook(hname, funcname):
137 '''call python hook. hook is callable object, looked up as
117 '''call python hook. hook is callable object, looked up as
138 name in python module. if callable returns "true", hook
118 name in python module. if callable returns "true", hook
139 fails, else passes. if hook raises exception, treated as
119 fails, else passes. if hook raises exception, treated as
140 hook failure. exception propagates if throw is "true".
120 hook failure. exception propagates if throw is "true".
141
121
142 reason for "true" meaning "hook failed" is so that
122 reason for "true" meaning "hook failed" is so that
143 unmodified commands (e.g. mercurial.commands.update) can
123 unmodified commands (e.g. mercurial.commands.update) can
144 be run as hooks without wrappers to convert return values.'''
124 be run as hooks without wrappers to convert return values.'''
145
125
146 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
126 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
147 obj = funcname
127 obj = funcname
148 if not callable(obj):
128 if not callable(obj):
149 d = funcname.rfind('.')
129 d = funcname.rfind('.')
150 if d == -1:
130 if d == -1:
151 raise util.Abort(_('%s hook is invalid ("%s" not in '
131 raise util.Abort(_('%s hook is invalid ("%s" not in '
152 'a module)') % (hname, funcname))
132 'a module)') % (hname, funcname))
153 modname = funcname[:d]
133 modname = funcname[:d]
154 try:
134 try:
155 obj = __import__(modname)
135 obj = __import__(modname)
156 except ImportError:
136 except ImportError:
157 try:
137 try:
158 # extensions are loaded with hgext_ prefix
138 # extensions are loaded with hgext_ prefix
159 obj = __import__("hgext_%s" % modname)
139 obj = __import__("hgext_%s" % modname)
160 except ImportError:
140 except ImportError:
161 raise util.Abort(_('%s hook is invalid '
141 raise util.Abort(_('%s hook is invalid '
162 '(import of "%s" failed)') %
142 '(import of "%s" failed)') %
163 (hname, modname))
143 (hname, modname))
164 try:
144 try:
165 for p in funcname.split('.')[1:]:
145 for p in funcname.split('.')[1:]:
166 obj = getattr(obj, p)
146 obj = getattr(obj, p)
167 except AttributeError, err:
147 except AttributeError, err:
168 raise util.Abort(_('%s hook is invalid '
148 raise util.Abort(_('%s hook is invalid '
169 '("%s" is not defined)') %
149 '("%s" is not defined)') %
170 (hname, funcname))
150 (hname, funcname))
171 if not callable(obj):
151 if not callable(obj):
172 raise util.Abort(_('%s hook is invalid '
152 raise util.Abort(_('%s hook is invalid '
173 '("%s" is not callable)') %
153 '("%s" is not callable)') %
174 (hname, funcname))
154 (hname, funcname))
175 try:
155 try:
176 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
156 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
177 except (KeyboardInterrupt, util.SignalInterrupt):
157 except (KeyboardInterrupt, util.SignalInterrupt):
178 raise
158 raise
179 except Exception, exc:
159 except Exception, exc:
180 if isinstance(exc, util.Abort):
160 if isinstance(exc, util.Abort):
181 self.ui.warn(_('error: %s hook failed: %s\n') %
161 self.ui.warn(_('error: %s hook failed: %s\n') %
182 (hname, exc.args[0]))
162 (hname, exc.args[0]))
183 else:
163 else:
184 self.ui.warn(_('error: %s hook raised an exception: '
164 self.ui.warn(_('error: %s hook raised an exception: '
185 '%s\n') % (hname, exc))
165 '%s\n') % (hname, exc))
186 if throw:
166 if throw:
187 raise
167 raise
188 self.ui.print_exc()
168 self.ui.print_exc()
189 return True
169 return True
190 if r:
170 if r:
191 if throw:
171 if throw:
192 raise util.Abort(_('%s hook failed') % hname)
172 raise util.Abort(_('%s hook failed') % hname)
193 self.ui.warn(_('warning: %s hook failed\n') % hname)
173 self.ui.warn(_('warning: %s hook failed\n') % hname)
194 return r
174 return r
195
175
196 def runhook(name, cmd):
176 def runhook(name, cmd):
197 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
177 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
198 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
178 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
199 r = util.system(cmd, environ=env, cwd=self.root)
179 r = util.system(cmd, environ=env, cwd=self.root)
200 if r:
180 if r:
201 desc, r = util.explain_exit(r)
181 desc, r = util.explain_exit(r)
202 if throw:
182 if throw:
203 raise util.Abort(_('%s hook %s') % (name, desc))
183 raise util.Abort(_('%s hook %s') % (name, desc))
204 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
184 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
205 return r
185 return r
206
186
207 r = False
187 r = False
208 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
188 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
209 if hname.split(".", 1)[0] == name and cmd]
189 if hname.split(".", 1)[0] == name and cmd]
210 hooks.sort()
190 hooks.sort()
211 for hname, cmd in hooks:
191 for hname, cmd in hooks:
212 if callable(cmd):
192 if callable(cmd):
213 r = callhook(hname, cmd) or r
193 r = callhook(hname, cmd) or r
214 elif cmd.startswith('python:'):
194 elif cmd.startswith('python:'):
215 r = callhook(hname, cmd[7:].strip()) or r
195 r = callhook(hname, cmd[7:].strip()) or r
216 else:
196 else:
217 r = runhook(hname, cmd) or r
197 r = runhook(hname, cmd) or r
218 return r
198 return r
219
199
220 tag_disallowed = ':\r\n'
200 tag_disallowed = ':\r\n'
221
201
222 def _tag(self, name, node, message, local, user, date, parent=None):
202 def _tag(self, name, node, message, local, user, date, parent=None):
223 use_dirstate = parent is None
203 use_dirstate = parent is None
224
204
225 for c in self.tag_disallowed:
205 for c in self.tag_disallowed:
226 if c in name:
206 if c in name:
227 raise util.Abort(_('%r cannot be used in a tag name') % c)
207 raise util.Abort(_('%r cannot be used in a tag name') % c)
228
208
229 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
209 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
230
210
231 if local:
211 if local:
232 # local tags are stored in the current charset
212 # local tags are stored in the current charset
233 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
213 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
234 self.hook('tag', node=hex(node), tag=name, local=local)
214 self.hook('tag', node=hex(node), tag=name, local=local)
235 return
215 return
236
216
237 # committed tags are stored in UTF-8
217 # committed tags are stored in UTF-8
238 line = '%s %s\n' % (hex(node), util.fromlocal(name))
218 line = '%s %s\n' % (hex(node), util.fromlocal(name))
239 if use_dirstate:
219 if use_dirstate:
240 self.wfile('.hgtags', 'ab').write(line)
220 self.wfile('.hgtags', 'ab').write(line)
241 else:
221 else:
242 ntags = self.filectx('.hgtags', parent).data()
222 ntags = self.filectx('.hgtags', parent).data()
243 self.wfile('.hgtags', 'ab').write(ntags + line)
223 self.wfile('.hgtags', 'ab').write(ntags + line)
244 if use_dirstate and self.dirstate.state('.hgtags') == '?':
224 if use_dirstate and self.dirstate.state('.hgtags') == '?':
245 self.add(['.hgtags'])
225 self.add(['.hgtags'])
246
226
247 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent)
227 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent)
248
228
249 self.hook('tag', node=hex(node), tag=name, local=local)
229 self.hook('tag', node=hex(node), tag=name, local=local)
250
230
251 return tagnode
231 return tagnode
252
232
253 def tag(self, name, node, message, local, user, date):
233 def tag(self, name, node, message, local, user, date):
254 '''tag a revision with a symbolic name.
234 '''tag a revision with a symbolic name.
255
235
256 if local is True, the tag is stored in a per-repository file.
236 if local is True, the tag is stored in a per-repository file.
257 otherwise, it is stored in the .hgtags file, and a new
237 otherwise, it is stored in the .hgtags file, and a new
258 changeset is committed with the change.
238 changeset is committed with the change.
259
239
260 keyword arguments:
240 keyword arguments:
261
241
262 local: whether to store tag in non-version-controlled file
242 local: whether to store tag in non-version-controlled file
263 (default False)
243 (default False)
264
244
265 message: commit message to use if committing
245 message: commit message to use if committing
266
246
267 user: name of user to use if committing
247 user: name of user to use if committing
268
248
269 date: date tuple to use if committing'''
249 date: date tuple to use if committing'''
270
250
271 for x in self.status()[:5]:
251 for x in self.status()[:5]:
272 if '.hgtags' in x:
252 if '.hgtags' in x:
273 raise util.Abort(_('working copy of .hgtags is changed '
253 raise util.Abort(_('working copy of .hgtags is changed '
274 '(please commit .hgtags manually)'))
254 '(please commit .hgtags manually)'))
275
255
276
256
277 self._tag(name, node, message, local, user, date)
257 self._tag(name, node, message, local, user, date)
278
258
279 def tags(self):
259 def tags(self):
280 '''return a mapping of tag to node'''
260 '''return a mapping of tag to node'''
281 if self.tagscache:
261 if self.tagscache:
282 return self.tagscache
262 return self.tagscache
283
263
284 globaltags = {}
264 globaltags = {}
285
265
286 def readtags(lines, fn):
266 def readtags(lines, fn):
287 filetags = {}
267 filetags = {}
288 count = 0
268 count = 0
289
269
290 def warn(msg):
270 def warn(msg):
291 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
271 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
292
272
293 for l in lines:
273 for l in lines:
294 count += 1
274 count += 1
295 if not l:
275 if not l:
296 continue
276 continue
297 s = l.split(" ", 1)
277 s = l.split(" ", 1)
298 if len(s) != 2:
278 if len(s) != 2:
299 warn(_("cannot parse entry"))
279 warn(_("cannot parse entry"))
300 continue
280 continue
301 node, key = s
281 node, key = s
302 key = util.tolocal(key.strip()) # stored in UTF-8
282 key = util.tolocal(key.strip()) # stored in UTF-8
303 try:
283 try:
304 bin_n = bin(node)
284 bin_n = bin(node)
305 except TypeError:
285 except TypeError:
306 warn(_("node '%s' is not well formed") % node)
286 warn(_("node '%s' is not well formed") % node)
307 continue
287 continue
308 if bin_n not in self.changelog.nodemap:
288 if bin_n not in self.changelog.nodemap:
309 warn(_("tag '%s' refers to unknown node") % key)
289 warn(_("tag '%s' refers to unknown node") % key)
310 continue
290 continue
311
291
312 h = {}
292 h = {}
313 if key in filetags:
293 if key in filetags:
314 n, h = filetags[key]
294 n, h = filetags[key]
315 h[n] = True
295 h[n] = True
316 filetags[key] = (bin_n, h)
296 filetags[key] = (bin_n, h)
317
297
318 for k,nh in filetags.items():
298 for k,nh in filetags.items():
319 if k not in globaltags:
299 if k not in globaltags:
320 globaltags[k] = nh
300 globaltags[k] = nh
321 continue
301 continue
322 # we prefer the global tag if:
302 # we prefer the global tag if:
323 # it supercedes us OR
303 # it supercedes us OR
324 # mutual supercedes and it has a higher rank
304 # mutual supercedes and it has a higher rank
325 # otherwise we win because we're tip-most
305 # otherwise we win because we're tip-most
326 an, ah = nh
306 an, ah = nh
327 bn, bh = globaltags[k]
307 bn, bh = globaltags[k]
328 if bn != an and an in bh and \
308 if bn != an and an in bh and \
329 (bn not in ah or len(bh) > len(ah)):
309 (bn not in ah or len(bh) > len(ah)):
330 an = bn
310 an = bn
331 ah.update(bh)
311 ah.update(bh)
332 globaltags[k] = an, ah
312 globaltags[k] = an, ah
333
313
334 # read the tags file from each head, ending with the tip
314 # read the tags file from each head, ending with the tip
335 f = None
315 f = None
336 for rev, node, fnode in self._hgtagsnodes():
316 for rev, node, fnode in self._hgtagsnodes():
337 f = (f and f.filectx(fnode) or
317 f = (f and f.filectx(fnode) or
338 self.filectx('.hgtags', fileid=fnode))
318 self.filectx('.hgtags', fileid=fnode))
339 readtags(f.data().splitlines(), f)
319 readtags(f.data().splitlines(), f)
340
320
341 try:
321 try:
342 data = util.fromlocal(self.opener("localtags").read())
322 data = util.fromlocal(self.opener("localtags").read())
343 # localtags are stored in the local character set
323 # localtags are stored in the local character set
344 # while the internal tag table is stored in UTF-8
324 # while the internal tag table is stored in UTF-8
345 readtags(data.splitlines(), "localtags")
325 readtags(data.splitlines(), "localtags")
346 except IOError:
326 except IOError:
347 pass
327 pass
348
328
349 self.tagscache = {}
329 self.tagscache = {}
350 for k,nh in globaltags.items():
330 for k,nh in globaltags.items():
351 n = nh[0]
331 n = nh[0]
352 if n != nullid:
332 if n != nullid:
353 self.tagscache[k] = n
333 self.tagscache[k] = n
354 self.tagscache['tip'] = self.changelog.tip()
334 self.tagscache['tip'] = self.changelog.tip()
355
335
356 return self.tagscache
336 return self.tagscache
357
337
358 def _hgtagsnodes(self):
338 def _hgtagsnodes(self):
359 heads = self.heads()
339 heads = self.heads()
360 heads.reverse()
340 heads.reverse()
361 last = {}
341 last = {}
362 ret = []
342 ret = []
363 for node in heads:
343 for node in heads:
364 c = self.changectx(node)
344 c = self.changectx(node)
365 rev = c.rev()
345 rev = c.rev()
366 try:
346 try:
367 fnode = c.filenode('.hgtags')
347 fnode = c.filenode('.hgtags')
368 except revlog.LookupError:
348 except revlog.LookupError:
369 continue
349 continue
370 ret.append((rev, node, fnode))
350 ret.append((rev, node, fnode))
371 if fnode in last:
351 if fnode in last:
372 ret[last[fnode]] = None
352 ret[last[fnode]] = None
373 last[fnode] = len(ret) - 1
353 last[fnode] = len(ret) - 1
374 return [item for item in ret if item]
354 return [item for item in ret if item]
375
355
376 def tagslist(self):
356 def tagslist(self):
377 '''return a list of tags ordered by revision'''
357 '''return a list of tags ordered by revision'''
378 l = []
358 l = []
379 for t, n in self.tags().items():
359 for t, n in self.tags().items():
380 try:
360 try:
381 r = self.changelog.rev(n)
361 r = self.changelog.rev(n)
382 except:
362 except:
383 r = -2 # sort to the beginning of the list if unknown
363 r = -2 # sort to the beginning of the list if unknown
384 l.append((r, t, n))
364 l.append((r, t, n))
385 l.sort()
365 l.sort()
386 return [(t, n) for r, t, n in l]
366 return [(t, n) for r, t, n in l]
387
367
388 def nodetags(self, node):
368 def nodetags(self, node):
389 '''return the tags associated with a node'''
369 '''return the tags associated with a node'''
390 if not self.nodetagscache:
370 if not self.nodetagscache:
391 self.nodetagscache = {}
371 self.nodetagscache = {}
392 for t, n in self.tags().items():
372 for t, n in self.tags().items():
393 self.nodetagscache.setdefault(n, []).append(t)
373 self.nodetagscache.setdefault(n, []).append(t)
394 return self.nodetagscache.get(node, [])
374 return self.nodetagscache.get(node, [])
395
375
396 def _branchtags(self):
376 def _branchtags(self):
397 partial, last, lrev = self._readbranchcache()
377 partial, last, lrev = self._readbranchcache()
398
378
399 tiprev = self.changelog.count() - 1
379 tiprev = self.changelog.count() - 1
400 if lrev != tiprev:
380 if lrev != tiprev:
401 self._updatebranchcache(partial, lrev+1, tiprev+1)
381 self._updatebranchcache(partial, lrev+1, tiprev+1)
402 self._writebranchcache(partial, self.changelog.tip(), tiprev)
382 self._writebranchcache(partial, self.changelog.tip(), tiprev)
403
383
404 return partial
384 return partial
405
385
406 def branchtags(self):
386 def branchtags(self):
407 if self.branchcache is not None:
387 if self.branchcache is not None:
408 return self.branchcache
388 return self.branchcache
409
389
410 self.branchcache = {} # avoid recursion in changectx
390 self.branchcache = {} # avoid recursion in changectx
411 partial = self._branchtags()
391 partial = self._branchtags()
412
392
413 # the branch cache is stored on disk as UTF-8, but in the local
393 # the branch cache is stored on disk as UTF-8, but in the local
414 # charset internally
394 # charset internally
415 for k, v in partial.items():
395 for k, v in partial.items():
416 self.branchcache[util.tolocal(k)] = v
396 self.branchcache[util.tolocal(k)] = v
417 return self.branchcache
397 return self.branchcache
418
398
419 def _readbranchcache(self):
399 def _readbranchcache(self):
420 partial = {}
400 partial = {}
421 try:
401 try:
422 f = self.opener("branch.cache")
402 f = self.opener("branch.cache")
423 lines = f.read().split('\n')
403 lines = f.read().split('\n')
424 f.close()
404 f.close()
425 last, lrev = lines.pop(0).split(" ", 1)
405 last, lrev = lines.pop(0).split(" ", 1)
426 last, lrev = bin(last), int(lrev)
406 last, lrev = bin(last), int(lrev)
427 if not (lrev < self.changelog.count() and
407 if not (lrev < self.changelog.count() and
428 self.changelog.node(lrev) == last): # sanity check
408 self.changelog.node(lrev) == last): # sanity check
429 # invalidate the cache
409 # invalidate the cache
430 raise ValueError('Invalid branch cache: unknown tip')
410 raise ValueError('Invalid branch cache: unknown tip')
431 for l in lines:
411 for l in lines:
432 if not l: continue
412 if not l: continue
433 node, label = l.split(" ", 1)
413 node, label = l.split(" ", 1)
434 partial[label.strip()] = bin(node)
414 partial[label.strip()] = bin(node)
435 except (KeyboardInterrupt, util.SignalInterrupt):
415 except (KeyboardInterrupt, util.SignalInterrupt):
436 raise
416 raise
437 except Exception, inst:
417 except Exception, inst:
438 if self.ui.debugflag:
418 if self.ui.debugflag:
439 self.ui.warn(str(inst), '\n')
419 self.ui.warn(str(inst), '\n')
440 partial, last, lrev = {}, nullid, nullrev
420 partial, last, lrev = {}, nullid, nullrev
441 return partial, last, lrev
421 return partial, last, lrev
442
422
443 def _writebranchcache(self, branches, tip, tiprev):
423 def _writebranchcache(self, branches, tip, tiprev):
444 try:
424 try:
445 f = self.opener("branch.cache", "w")
425 f = self.opener("branch.cache", "w")
446 f.write("%s %s\n" % (hex(tip), tiprev))
426 f.write("%s %s\n" % (hex(tip), tiprev))
447 for label, node in branches.iteritems():
427 for label, node in branches.iteritems():
448 f.write("%s %s\n" % (hex(node), label))
428 f.write("%s %s\n" % (hex(node), label))
449 except IOError:
429 except IOError:
450 pass
430 pass
451
431
452 def _updatebranchcache(self, partial, start, end):
432 def _updatebranchcache(self, partial, start, end):
453 for r in xrange(start, end):
433 for r in xrange(start, end):
454 c = self.changectx(r)
434 c = self.changectx(r)
455 b = c.branch()
435 b = c.branch()
456 partial[b] = c.node()
436 partial[b] = c.node()
457
437
458 def lookup(self, key):
438 def lookup(self, key):
459 if key == '.':
439 if key == '.':
460 key = self.dirstate.parents()[0]
440 key = self.dirstate.parents()[0]
461 if key == nullid:
441 if key == nullid:
462 raise repo.RepoError(_("no revision checked out"))
442 raise repo.RepoError(_("no revision checked out"))
463 elif key == 'null':
443 elif key == 'null':
464 return nullid
444 return nullid
465 n = self.changelog._match(key)
445 n = self.changelog._match(key)
466 if n:
446 if n:
467 return n
447 return n
468 if key in self.tags():
448 if key in self.tags():
469 return self.tags()[key]
449 return self.tags()[key]
470 if key in self.branchtags():
450 if key in self.branchtags():
471 return self.branchtags()[key]
451 return self.branchtags()[key]
472 n = self.changelog._partialmatch(key)
452 n = self.changelog._partialmatch(key)
473 if n:
453 if n:
474 return n
454 return n
475 raise repo.RepoError(_("unknown revision '%s'") % key)
455 raise repo.RepoError(_("unknown revision '%s'") % key)
476
456
477 def dev(self):
457 def dev(self):
478 return os.lstat(self.path).st_dev
458 return os.lstat(self.path).st_dev
479
459
480 def local(self):
460 def local(self):
481 return True
461 return True
482
462
483 def join(self, f):
463 def join(self, f):
484 return os.path.join(self.path, f)
464 return os.path.join(self.path, f)
485
465
486 def sjoin(self, f):
466 def sjoin(self, f):
487 f = self.encodefn(f)
467 f = self.encodefn(f)
488 return os.path.join(self.spath, f)
468 return os.path.join(self.spath, f)
489
469
490 def wjoin(self, f):
470 def wjoin(self, f):
491 return os.path.join(self.root, f)
471 return os.path.join(self.root, f)
492
472
493 def file(self, f):
473 def file(self, f):
494 if f[0] == '/':
474 if f[0] == '/':
495 f = f[1:]
475 f = f[1:]
496 return filelog.filelog(self.sopener, f, self.revlogversion)
476 return filelog.filelog(self.sopener, f)
497
477
498 def changectx(self, changeid=None):
478 def changectx(self, changeid=None):
499 return context.changectx(self, changeid)
479 return context.changectx(self, changeid)
500
480
501 def workingctx(self):
481 def workingctx(self):
502 return context.workingctx(self)
482 return context.workingctx(self)
503
483
504 def parents(self, changeid=None):
484 def parents(self, changeid=None):
505 '''
485 '''
506 get list of changectxs for parents of changeid or working directory
486 get list of changectxs for parents of changeid or working directory
507 '''
487 '''
508 if changeid is None:
488 if changeid is None:
509 pl = self.dirstate.parents()
489 pl = self.dirstate.parents()
510 else:
490 else:
511 n = self.changelog.lookup(changeid)
491 n = self.changelog.lookup(changeid)
512 pl = self.changelog.parents(n)
492 pl = self.changelog.parents(n)
513 if pl[1] == nullid:
493 if pl[1] == nullid:
514 return [self.changectx(pl[0])]
494 return [self.changectx(pl[0])]
515 return [self.changectx(pl[0]), self.changectx(pl[1])]
495 return [self.changectx(pl[0]), self.changectx(pl[1])]
516
496
517 def filectx(self, path, changeid=None, fileid=None):
497 def filectx(self, path, changeid=None, fileid=None):
518 """changeid can be a changeset revision, node, or tag.
498 """changeid can be a changeset revision, node, or tag.
519 fileid can be a file revision or node."""
499 fileid can be a file revision or node."""
520 return context.filectx(self, path, changeid, fileid)
500 return context.filectx(self, path, changeid, fileid)
521
501
522 def getcwd(self):
502 def getcwd(self):
523 return self.dirstate.getcwd()
503 return self.dirstate.getcwd()
524
504
525 def wfile(self, f, mode='r'):
505 def wfile(self, f, mode='r'):
526 return self.wopener(f, mode)
506 return self.wopener(f, mode)
527
507
528 def _filter(self, filter, filename, data):
508 def _filter(self, filter, filename, data):
529 if filter not in self.filterpats:
509 if filter not in self.filterpats:
530 l = []
510 l = []
531 for pat, cmd in self.ui.configitems(filter):
511 for pat, cmd in self.ui.configitems(filter):
532 mf = util.matcher(self.root, "", [pat], [], [])[1]
512 mf = util.matcher(self.root, "", [pat], [], [])[1]
533 l.append((mf, cmd))
513 l.append((mf, cmd))
534 self.filterpats[filter] = l
514 self.filterpats[filter] = l
535
515
536 for mf, cmd in self.filterpats[filter]:
516 for mf, cmd in self.filterpats[filter]:
537 if mf(filename):
517 if mf(filename):
538 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
518 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
539 data = util.filter(data, cmd)
519 data = util.filter(data, cmd)
540 break
520 break
541
521
542 return data
522 return data
543
523
544 def wread(self, filename):
524 def wread(self, filename):
545 if self._link(filename):
525 if self._link(filename):
546 data = os.readlink(self.wjoin(filename))
526 data = os.readlink(self.wjoin(filename))
547 else:
527 else:
548 data = self.wopener(filename, 'r').read()
528 data = self.wopener(filename, 'r').read()
549 return self._filter("encode", filename, data)
529 return self._filter("encode", filename, data)
550
530
551 def wwrite(self, filename, data, flags):
531 def wwrite(self, filename, data, flags):
552 data = self._filter("decode", filename, data)
532 data = self._filter("decode", filename, data)
553 if "l" in flags:
533 if "l" in flags:
554 f = self.wjoin(filename)
534 f = self.wjoin(filename)
555 try:
535 try:
556 os.unlink(f)
536 os.unlink(f)
557 except OSError:
537 except OSError:
558 pass
538 pass
559 d = os.path.dirname(f)
539 d = os.path.dirname(f)
560 if not os.path.exists(d):
540 if not os.path.exists(d):
561 os.makedirs(d)
541 os.makedirs(d)
562 os.symlink(data, f)
542 os.symlink(data, f)
563 else:
543 else:
564 try:
544 try:
565 if self._link(filename):
545 if self._link(filename):
566 os.unlink(self.wjoin(filename))
546 os.unlink(self.wjoin(filename))
567 except OSError:
547 except OSError:
568 pass
548 pass
569 self.wopener(filename, 'w').write(data)
549 self.wopener(filename, 'w').write(data)
570 util.set_exec(self.wjoin(filename), "x" in flags)
550 util.set_exec(self.wjoin(filename), "x" in flags)
571
551
572 def wwritedata(self, filename, data):
552 def wwritedata(self, filename, data):
573 return self._filter("decode", filename, data)
553 return self._filter("decode", filename, data)
574
554
575 def transaction(self):
555 def transaction(self):
576 tr = self.transhandle
556 tr = self.transhandle
577 if tr != None and tr.running():
557 if tr != None and tr.running():
578 return tr.nest()
558 return tr.nest()
579
559
580 # save dirstate for rollback
560 # save dirstate for rollback
581 try:
561 try:
582 ds = self.opener("dirstate").read()
562 ds = self.opener("dirstate").read()
583 except IOError:
563 except IOError:
584 ds = ""
564 ds = ""
585 self.opener("journal.dirstate", "w").write(ds)
565 self.opener("journal.dirstate", "w").write(ds)
586
566
587 renames = [(self.sjoin("journal"), self.sjoin("undo")),
567 renames = [(self.sjoin("journal"), self.sjoin("undo")),
588 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
568 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
589 tr = transaction.transaction(self.ui.warn, self.sopener,
569 tr = transaction.transaction(self.ui.warn, self.sopener,
590 self.sjoin("journal"),
570 self.sjoin("journal"),
591 aftertrans(renames))
571 aftertrans(renames))
592 self.transhandle = tr
572 self.transhandle = tr
593 return tr
573 return tr
594
574
595 def recover(self):
575 def recover(self):
596 l = self.lock()
576 l = self.lock()
597 if os.path.exists(self.sjoin("journal")):
577 if os.path.exists(self.sjoin("journal")):
598 self.ui.status(_("rolling back interrupted transaction\n"))
578 self.ui.status(_("rolling back interrupted transaction\n"))
599 transaction.rollback(self.sopener, self.sjoin("journal"))
579 transaction.rollback(self.sopener, self.sjoin("journal"))
600 self.reload()
580 self.reload()
601 return True
581 return True
602 else:
582 else:
603 self.ui.warn(_("no interrupted transaction available\n"))
583 self.ui.warn(_("no interrupted transaction available\n"))
604 return False
584 return False
605
585
606 def rollback(self, wlock=None):
586 def rollback(self, wlock=None):
607 if not wlock:
587 if not wlock:
608 wlock = self.wlock()
588 wlock = self.wlock()
609 l = self.lock()
589 l = self.lock()
610 if os.path.exists(self.sjoin("undo")):
590 if os.path.exists(self.sjoin("undo")):
611 self.ui.status(_("rolling back last transaction\n"))
591 self.ui.status(_("rolling back last transaction\n"))
612 transaction.rollback(self.sopener, self.sjoin("undo"))
592 transaction.rollback(self.sopener, self.sjoin("undo"))
613 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
593 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
614 self.reload()
594 self.reload()
615 self.wreload()
595 self.wreload()
616 else:
596 else:
617 self.ui.warn(_("no rollback information available\n"))
597 self.ui.warn(_("no rollback information available\n"))
618
598
619 def wreload(self):
599 def wreload(self):
620 self.dirstate.read()
600 self.dirstate.read()
621
601
622 def reload(self):
602 def reload(self):
623 self.changelog.load()
603 self.changelog.load()
624 self.manifest.load()
604 self.manifest.load()
625 self.tagscache = None
605 self.tagscache = None
626 self.nodetagscache = None
606 self.nodetagscache = None
627
607
628 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
608 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
629 desc=None):
609 desc=None):
630 try:
610 try:
631 l = lock.lock(lockname, 0, releasefn, desc=desc)
611 l = lock.lock(lockname, 0, releasefn, desc=desc)
632 except lock.LockHeld, inst:
612 except lock.LockHeld, inst:
633 if not wait:
613 if not wait:
634 raise
614 raise
635 self.ui.warn(_("waiting for lock on %s held by %r\n") %
615 self.ui.warn(_("waiting for lock on %s held by %r\n") %
636 (desc, inst.locker))
616 (desc, inst.locker))
637 # default to 600 seconds timeout
617 # default to 600 seconds timeout
638 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
618 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
639 releasefn, desc=desc)
619 releasefn, desc=desc)
640 if acquirefn:
620 if acquirefn:
641 acquirefn()
621 acquirefn()
642 return l
622 return l
643
623
644 def lock(self, wait=1):
624 def lock(self, wait=1):
645 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
625 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
646 desc=_('repository %s') % self.origroot)
626 desc=_('repository %s') % self.origroot)
647
627
648 def wlock(self, wait=1):
628 def wlock(self, wait=1):
649 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
629 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
650 self.wreload,
630 self.wreload,
651 desc=_('working directory of %s') % self.origroot)
631 desc=_('working directory of %s') % self.origroot)
652
632
653 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
633 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
654 """
634 """
655 commit an individual file as part of a larger transaction
635 commit an individual file as part of a larger transaction
656 """
636 """
657
637
658 t = self.wread(fn)
638 t = self.wread(fn)
659 fl = self.file(fn)
639 fl = self.file(fn)
660 fp1 = manifest1.get(fn, nullid)
640 fp1 = manifest1.get(fn, nullid)
661 fp2 = manifest2.get(fn, nullid)
641 fp2 = manifest2.get(fn, nullid)
662
642
663 meta = {}
643 meta = {}
664 cp = self.dirstate.copied(fn)
644 cp = self.dirstate.copied(fn)
665 if cp:
645 if cp:
666 # Mark the new revision of this file as a copy of another
646 # Mark the new revision of this file as a copy of another
667 # file. This copy data will effectively act as a parent
647 # file. This copy data will effectively act as a parent
668 # of this new revision. If this is a merge, the first
648 # of this new revision. If this is a merge, the first
669 # parent will be the nullid (meaning "look up the copy data")
649 # parent will be the nullid (meaning "look up the copy data")
670 # and the second one will be the other parent. For example:
650 # and the second one will be the other parent. For example:
671 #
651 #
672 # 0 --- 1 --- 3 rev1 changes file foo
652 # 0 --- 1 --- 3 rev1 changes file foo
673 # \ / rev2 renames foo to bar and changes it
653 # \ / rev2 renames foo to bar and changes it
674 # \- 2 -/ rev3 should have bar with all changes and
654 # \- 2 -/ rev3 should have bar with all changes and
675 # should record that bar descends from
655 # should record that bar descends from
676 # bar in rev2 and foo in rev1
656 # bar in rev2 and foo in rev1
677 #
657 #
678 # this allows this merge to succeed:
658 # this allows this merge to succeed:
679 #
659 #
680 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
660 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
681 # \ / merging rev3 and rev4 should use bar@rev2
661 # \ / merging rev3 and rev4 should use bar@rev2
682 # \- 2 --- 4 as the merge base
662 # \- 2 --- 4 as the merge base
683 #
663 #
684 meta["copy"] = cp
664 meta["copy"] = cp
685 if not manifest2: # not a branch merge
665 if not manifest2: # not a branch merge
686 meta["copyrev"] = hex(manifest1.get(cp, nullid))
666 meta["copyrev"] = hex(manifest1.get(cp, nullid))
687 fp2 = nullid
667 fp2 = nullid
688 elif fp2 != nullid: # copied on remote side
668 elif fp2 != nullid: # copied on remote side
689 meta["copyrev"] = hex(manifest1.get(cp, nullid))
669 meta["copyrev"] = hex(manifest1.get(cp, nullid))
690 elif fp1 != nullid: # copied on local side, reversed
670 elif fp1 != nullid: # copied on local side, reversed
691 meta["copyrev"] = hex(manifest2.get(cp))
671 meta["copyrev"] = hex(manifest2.get(cp))
692 fp2 = fp1
672 fp2 = fp1
693 else: # directory rename
673 else: # directory rename
694 meta["copyrev"] = hex(manifest1.get(cp, nullid))
674 meta["copyrev"] = hex(manifest1.get(cp, nullid))
695 self.ui.debug(_(" %s: copy %s:%s\n") %
675 self.ui.debug(_(" %s: copy %s:%s\n") %
696 (fn, cp, meta["copyrev"]))
676 (fn, cp, meta["copyrev"]))
697 fp1 = nullid
677 fp1 = nullid
698 elif fp2 != nullid:
678 elif fp2 != nullid:
699 # is one parent an ancestor of the other?
679 # is one parent an ancestor of the other?
700 fpa = fl.ancestor(fp1, fp2)
680 fpa = fl.ancestor(fp1, fp2)
701 if fpa == fp1:
681 if fpa == fp1:
702 fp1, fp2 = fp2, nullid
682 fp1, fp2 = fp2, nullid
703 elif fpa == fp2:
683 elif fpa == fp2:
704 fp2 = nullid
684 fp2 = nullid
705
685
706 # is the file unmodified from the parent? report existing entry
686 # is the file unmodified from the parent? report existing entry
707 if fp2 == nullid and not fl.cmp(fp1, t):
687 if fp2 == nullid and not fl.cmp(fp1, t):
708 return fp1
688 return fp1
709
689
710 changelist.append(fn)
690 changelist.append(fn)
711 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
691 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
712
692
713 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
693 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
714 if p1 is None:
694 if p1 is None:
715 p1, p2 = self.dirstate.parents()
695 p1, p2 = self.dirstate.parents()
716 return self.commit(files=files, text=text, user=user, date=date,
696 return self.commit(files=files, text=text, user=user, date=date,
717 p1=p1, p2=p2, wlock=wlock, extra=extra)
697 p1=p1, p2=p2, wlock=wlock, extra=extra)
718
698
719 def commit(self, files=None, text="", user=None, date=None,
699 def commit(self, files=None, text="", user=None, date=None,
720 match=util.always, force=False, lock=None, wlock=None,
700 match=util.always, force=False, lock=None, wlock=None,
721 force_editor=False, p1=None, p2=None, extra={}):
701 force_editor=False, p1=None, p2=None, extra={}):
722
702
723 commit = []
703 commit = []
724 remove = []
704 remove = []
725 changed = []
705 changed = []
726 use_dirstate = (p1 is None) # not rawcommit
706 use_dirstate = (p1 is None) # not rawcommit
727 extra = extra.copy()
707 extra = extra.copy()
728
708
729 if use_dirstate:
709 if use_dirstate:
730 if files:
710 if files:
731 for f in files:
711 for f in files:
732 s = self.dirstate.state(f)
712 s = self.dirstate.state(f)
733 if s in 'nmai':
713 if s in 'nmai':
734 commit.append(f)
714 commit.append(f)
735 elif s == 'r':
715 elif s == 'r':
736 remove.append(f)
716 remove.append(f)
737 else:
717 else:
738 self.ui.warn(_("%s not tracked!\n") % f)
718 self.ui.warn(_("%s not tracked!\n") % f)
739 else:
719 else:
740 changes = self.status(match=match)[:5]
720 changes = self.status(match=match)[:5]
741 modified, added, removed, deleted, unknown = changes
721 modified, added, removed, deleted, unknown = changes
742 commit = modified + added
722 commit = modified + added
743 remove = removed
723 remove = removed
744 else:
724 else:
745 commit = files
725 commit = files
746
726
747 if use_dirstate:
727 if use_dirstate:
748 p1, p2 = self.dirstate.parents()
728 p1, p2 = self.dirstate.parents()
749 update_dirstate = True
729 update_dirstate = True
750 else:
730 else:
751 p1, p2 = p1, p2 or nullid
731 p1, p2 = p1, p2 or nullid
752 update_dirstate = (self.dirstate.parents()[0] == p1)
732 update_dirstate = (self.dirstate.parents()[0] == p1)
753
733
754 c1 = self.changelog.read(p1)
734 c1 = self.changelog.read(p1)
755 c2 = self.changelog.read(p2)
735 c2 = self.changelog.read(p2)
756 m1 = self.manifest.read(c1[0]).copy()
736 m1 = self.manifest.read(c1[0]).copy()
757 m2 = self.manifest.read(c2[0])
737 m2 = self.manifest.read(c2[0])
758
738
759 if use_dirstate:
739 if use_dirstate:
760 branchname = self.workingctx().branch()
740 branchname = self.workingctx().branch()
761 try:
741 try:
762 branchname = branchname.decode('UTF-8').encode('UTF-8')
742 branchname = branchname.decode('UTF-8').encode('UTF-8')
763 except UnicodeDecodeError:
743 except UnicodeDecodeError:
764 raise util.Abort(_('branch name not in UTF-8!'))
744 raise util.Abort(_('branch name not in UTF-8!'))
765 else:
745 else:
766 branchname = ""
746 branchname = ""
767
747
768 if use_dirstate:
748 if use_dirstate:
769 oldname = c1[5].get("branch") # stored in UTF-8
749 oldname = c1[5].get("branch") # stored in UTF-8
770 if not commit and not remove and not force and p2 == nullid and \
750 if not commit and not remove and not force and p2 == nullid and \
771 branchname == oldname:
751 branchname == oldname:
772 self.ui.status(_("nothing changed\n"))
752 self.ui.status(_("nothing changed\n"))
773 return None
753 return None
774
754
775 xp1 = hex(p1)
755 xp1 = hex(p1)
776 if p2 == nullid: xp2 = ''
756 if p2 == nullid: xp2 = ''
777 else: xp2 = hex(p2)
757 else: xp2 = hex(p2)
778
758
779 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
759 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
780
760
781 if not wlock:
761 if not wlock:
782 wlock = self.wlock()
762 wlock = self.wlock()
783 if not lock:
763 if not lock:
784 lock = self.lock()
764 lock = self.lock()
785 tr = self.transaction()
765 tr = self.transaction()
786
766
787 # check in files
767 # check in files
788 new = {}
768 new = {}
789 linkrev = self.changelog.count()
769 linkrev = self.changelog.count()
790 commit.sort()
770 commit.sort()
791 is_exec = util.execfunc(self.root, m1.execf)
771 is_exec = util.execfunc(self.root, m1.execf)
792 is_link = util.linkfunc(self.root, m1.linkf)
772 is_link = util.linkfunc(self.root, m1.linkf)
793 for f in commit:
773 for f in commit:
794 self.ui.note(f + "\n")
774 self.ui.note(f + "\n")
795 try:
775 try:
796 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
776 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
797 m1.set(f, is_exec(f), is_link(f))
777 m1.set(f, is_exec(f), is_link(f))
798 except (OSError, IOError):
778 except (OSError, IOError):
799 if use_dirstate:
779 if use_dirstate:
800 self.ui.warn(_("trouble committing %s!\n") % f)
780 self.ui.warn(_("trouble committing %s!\n") % f)
801 raise
781 raise
802 else:
782 else:
803 remove.append(f)
783 remove.append(f)
804
784
805 # update manifest
785 # update manifest
806 m1.update(new)
786 m1.update(new)
807 remove.sort()
787 remove.sort()
808 removed = []
788 removed = []
809
789
810 for f in remove:
790 for f in remove:
811 if f in m1:
791 if f in m1:
812 del m1[f]
792 del m1[f]
813 removed.append(f)
793 removed.append(f)
814 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
794 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
815
795
816 # add changeset
796 # add changeset
817 new = new.keys()
797 new = new.keys()
818 new.sort()
798 new.sort()
819
799
820 user = user or self.ui.username()
800 user = user or self.ui.username()
821 if not text or force_editor:
801 if not text or force_editor:
822 edittext = []
802 edittext = []
823 if text:
803 if text:
824 edittext.append(text)
804 edittext.append(text)
825 edittext.append("")
805 edittext.append("")
826 edittext.append("HG: user: %s" % user)
806 edittext.append("HG: user: %s" % user)
827 if p2 != nullid:
807 if p2 != nullid:
828 edittext.append("HG: branch merge")
808 edittext.append("HG: branch merge")
829 if branchname:
809 if branchname:
830 edittext.append("HG: branch %s" % util.tolocal(branchname))
810 edittext.append("HG: branch %s" % util.tolocal(branchname))
831 edittext.extend(["HG: changed %s" % f for f in changed])
811 edittext.extend(["HG: changed %s" % f for f in changed])
832 edittext.extend(["HG: removed %s" % f for f in removed])
812 edittext.extend(["HG: removed %s" % f for f in removed])
833 if not changed and not remove:
813 if not changed and not remove:
834 edittext.append("HG: no files changed")
814 edittext.append("HG: no files changed")
835 edittext.append("")
815 edittext.append("")
836 # run editor in the repository root
816 # run editor in the repository root
837 olddir = os.getcwd()
817 olddir = os.getcwd()
838 os.chdir(self.root)
818 os.chdir(self.root)
839 text = self.ui.edit("\n".join(edittext), user)
819 text = self.ui.edit("\n".join(edittext), user)
840 os.chdir(olddir)
820 os.chdir(olddir)
841
821
842 lines = [line.rstrip() for line in text.rstrip().splitlines()]
822 lines = [line.rstrip() for line in text.rstrip().splitlines()]
843 while lines and not lines[0]:
823 while lines and not lines[0]:
844 del lines[0]
824 del lines[0]
845 if not lines:
825 if not lines:
846 return None
826 return None
847 text = '\n'.join(lines)
827 text = '\n'.join(lines)
848 if branchname:
828 if branchname:
849 extra["branch"] = branchname
829 extra["branch"] = branchname
850 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
830 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
851 user, date, extra)
831 user, date, extra)
852 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
832 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
853 parent2=xp2)
833 parent2=xp2)
854 tr.close()
834 tr.close()
855
835
856 if self.branchcache and "branch" in extra:
836 if self.branchcache and "branch" in extra:
857 self.branchcache[util.tolocal(extra["branch"])] = n
837 self.branchcache[util.tolocal(extra["branch"])] = n
858
838
859 if use_dirstate or update_dirstate:
839 if use_dirstate or update_dirstate:
860 self.dirstate.setparents(n)
840 self.dirstate.setparents(n)
861 if use_dirstate:
841 if use_dirstate:
862 self.dirstate.update(new, "n")
842 self.dirstate.update(new, "n")
863 self.dirstate.forget(removed)
843 self.dirstate.forget(removed)
864
844
865 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
845 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
866 return n
846 return n
867
847
868 def walk(self, node=None, files=[], match=util.always, badmatch=None):
848 def walk(self, node=None, files=[], match=util.always, badmatch=None):
869 '''
849 '''
870 walk recursively through the directory tree or a given
850 walk recursively through the directory tree or a given
871 changeset, finding all files matched by the match
851 changeset, finding all files matched by the match
872 function
852 function
873
853
874 results are yielded in a tuple (src, filename), where src
854 results are yielded in a tuple (src, filename), where src
875 is one of:
855 is one of:
876 'f' the file was found in the directory tree
856 'f' the file was found in the directory tree
877 'm' the file was only in the dirstate and not in the tree
857 'm' the file was only in the dirstate and not in the tree
878 'b' file was not found and matched badmatch
858 'b' file was not found and matched badmatch
879 '''
859 '''
880
860
881 if node:
861 if node:
882 fdict = dict.fromkeys(files)
862 fdict = dict.fromkeys(files)
883 # for dirstate.walk, files=['.'] means "walk the whole tree".
863 # for dirstate.walk, files=['.'] means "walk the whole tree".
884 # follow that here, too
864 # follow that here, too
885 fdict.pop('.', None)
865 fdict.pop('.', None)
886 mdict = self.manifest.read(self.changelog.read(node)[0])
866 mdict = self.manifest.read(self.changelog.read(node)[0])
887 mfiles = mdict.keys()
867 mfiles = mdict.keys()
888 mfiles.sort()
868 mfiles.sort()
889 for fn in mfiles:
869 for fn in mfiles:
890 for ffn in fdict:
870 for ffn in fdict:
891 # match if the file is the exact name or a directory
871 # match if the file is the exact name or a directory
892 if ffn == fn or fn.startswith("%s/" % ffn):
872 if ffn == fn or fn.startswith("%s/" % ffn):
893 del fdict[ffn]
873 del fdict[ffn]
894 break
874 break
895 if match(fn):
875 if match(fn):
896 yield 'm', fn
876 yield 'm', fn
897 ffiles = fdict.keys()
877 ffiles = fdict.keys()
898 ffiles.sort()
878 ffiles.sort()
899 for fn in ffiles:
879 for fn in ffiles:
900 if badmatch and badmatch(fn):
880 if badmatch and badmatch(fn):
901 if match(fn):
881 if match(fn):
902 yield 'b', fn
882 yield 'b', fn
903 else:
883 else:
904 self.ui.warn(_('%s: No such file in rev %s\n') % (
884 self.ui.warn(_('%s: No such file in rev %s\n') % (
905 util.pathto(self.root, self.getcwd(), fn), short(node)))
885 util.pathto(self.root, self.getcwd(), fn), short(node)))
906 else:
886 else:
907 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
887 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
908 yield src, fn
888 yield src, fn
909
889
910 def status(self, node1=None, node2=None, files=[], match=util.always,
890 def status(self, node1=None, node2=None, files=[], match=util.always,
911 wlock=None, list_ignored=False, list_clean=False):
891 wlock=None, list_ignored=False, list_clean=False):
912 """return status of files between two nodes or node and working directory
892 """return status of files between two nodes or node and working directory
913
893
914 If node1 is None, use the first dirstate parent instead.
894 If node1 is None, use the first dirstate parent instead.
915 If node2 is None, compare node1 with working directory.
895 If node2 is None, compare node1 with working directory.
916 """
896 """
917
897
918 def fcmp(fn, getnode):
898 def fcmp(fn, getnode):
919 t1 = self.wread(fn)
899 t1 = self.wread(fn)
920 return self.file(fn).cmp(getnode(fn), t1)
900 return self.file(fn).cmp(getnode(fn), t1)
921
901
922 def mfmatches(node):
902 def mfmatches(node):
923 change = self.changelog.read(node)
903 change = self.changelog.read(node)
924 mf = self.manifest.read(change[0]).copy()
904 mf = self.manifest.read(change[0]).copy()
925 for fn in mf.keys():
905 for fn in mf.keys():
926 if not match(fn):
906 if not match(fn):
927 del mf[fn]
907 del mf[fn]
928 return mf
908 return mf
929
909
930 modified, added, removed, deleted, unknown = [], [], [], [], []
910 modified, added, removed, deleted, unknown = [], [], [], [], []
931 ignored, clean = [], []
911 ignored, clean = [], []
932
912
933 compareworking = False
913 compareworking = False
934 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
914 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
935 compareworking = True
915 compareworking = True
936
916
937 if not compareworking:
917 if not compareworking:
938 # read the manifest from node1 before the manifest from node2,
918 # read the manifest from node1 before the manifest from node2,
939 # so that we'll hit the manifest cache if we're going through
919 # so that we'll hit the manifest cache if we're going through
940 # all the revisions in parent->child order.
920 # all the revisions in parent->child order.
941 mf1 = mfmatches(node1)
921 mf1 = mfmatches(node1)
942
922
943 # are we comparing the working directory?
923 # are we comparing the working directory?
944 if not node2:
924 if not node2:
945 if not wlock:
925 if not wlock:
946 try:
926 try:
947 wlock = self.wlock(wait=0)
927 wlock = self.wlock(wait=0)
948 except lock.LockException:
928 except lock.LockException:
949 wlock = None
929 wlock = None
950 (lookup, modified, added, removed, deleted, unknown,
930 (lookup, modified, added, removed, deleted, unknown,
951 ignored, clean) = self.dirstate.status(files, match,
931 ignored, clean) = self.dirstate.status(files, match,
952 list_ignored, list_clean)
932 list_ignored, list_clean)
953
933
954 # are we comparing working dir against its parent?
934 # are we comparing working dir against its parent?
955 if compareworking:
935 if compareworking:
956 if lookup:
936 if lookup:
957 # do a full compare of any files that might have changed
937 # do a full compare of any files that might have changed
958 mnode = self.changelog.read(self.dirstate.parents()[0])[0]
938 mnode = self.changelog.read(self.dirstate.parents()[0])[0]
959 getnode = lambda fn: (self.manifest.find(mnode, fn)[0] or
939 getnode = lambda fn: (self.manifest.find(mnode, fn)[0] or
960 nullid)
940 nullid)
961 for f in lookup:
941 for f in lookup:
962 if fcmp(f, getnode):
942 if fcmp(f, getnode):
963 modified.append(f)
943 modified.append(f)
964 else:
944 else:
965 clean.append(f)
945 clean.append(f)
966 if wlock is not None:
946 if wlock is not None:
967 self.dirstate.update([f], "n")
947 self.dirstate.update([f], "n")
968 else:
948 else:
969 # we are comparing working dir against non-parent
949 # we are comparing working dir against non-parent
970 # generate a pseudo-manifest for the working dir
950 # generate a pseudo-manifest for the working dir
971 # XXX: create it in dirstate.py ?
951 # XXX: create it in dirstate.py ?
972 mf2 = mfmatches(self.dirstate.parents()[0])
952 mf2 = mfmatches(self.dirstate.parents()[0])
973 is_exec = util.execfunc(self.root, mf2.execf)
953 is_exec = util.execfunc(self.root, mf2.execf)
974 is_link = util.linkfunc(self.root, mf2.linkf)
954 is_link = util.linkfunc(self.root, mf2.linkf)
975 for f in lookup + modified + added:
955 for f in lookup + modified + added:
976 mf2[f] = ""
956 mf2[f] = ""
977 mf2.set(f, is_exec(f), is_link(f))
957 mf2.set(f, is_exec(f), is_link(f))
978 for f in removed:
958 for f in removed:
979 if f in mf2:
959 if f in mf2:
980 del mf2[f]
960 del mf2[f]
981 else:
961 else:
982 # we are comparing two revisions
962 # we are comparing two revisions
983 mf2 = mfmatches(node2)
963 mf2 = mfmatches(node2)
984
964
985 if not compareworking:
965 if not compareworking:
986 # flush lists from dirstate before comparing manifests
966 # flush lists from dirstate before comparing manifests
987 modified, added, clean = [], [], []
967 modified, added, clean = [], [], []
988
968
989 # make sure to sort the files so we talk to the disk in a
969 # make sure to sort the files so we talk to the disk in a
990 # reasonable order
970 # reasonable order
991 mf2keys = mf2.keys()
971 mf2keys = mf2.keys()
992 mf2keys.sort()
972 mf2keys.sort()
993 getnode = lambda fn: mf1.get(fn, nullid)
973 getnode = lambda fn: mf1.get(fn, nullid)
994 for fn in mf2keys:
974 for fn in mf2keys:
995 if mf1.has_key(fn):
975 if mf1.has_key(fn):
996 if mf1.flags(fn) != mf2.flags(fn) or \
976 if mf1.flags(fn) != mf2.flags(fn) or \
997 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or
977 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or
998 fcmp(fn, getnode))):
978 fcmp(fn, getnode))):
999 modified.append(fn)
979 modified.append(fn)
1000 elif list_clean:
980 elif list_clean:
1001 clean.append(fn)
981 clean.append(fn)
1002 del mf1[fn]
982 del mf1[fn]
1003 else:
983 else:
1004 added.append(fn)
984 added.append(fn)
1005
985
1006 removed = mf1.keys()
986 removed = mf1.keys()
1007
987
1008 # sort and return results:
988 # sort and return results:
1009 for l in modified, added, removed, deleted, unknown, ignored, clean:
989 for l in modified, added, removed, deleted, unknown, ignored, clean:
1010 l.sort()
990 l.sort()
1011 return (modified, added, removed, deleted, unknown, ignored, clean)
991 return (modified, added, removed, deleted, unknown, ignored, clean)
1012
992
1013 def add(self, list, wlock=None):
993 def add(self, list, wlock=None):
1014 if not wlock:
994 if not wlock:
1015 wlock = self.wlock()
995 wlock = self.wlock()
1016 for f in list:
996 for f in list:
1017 p = self.wjoin(f)
997 p = self.wjoin(f)
1018 islink = os.path.islink(p)
998 islink = os.path.islink(p)
1019 if not islink and not os.path.exists(p):
999 if not islink and not os.path.exists(p):
1020 self.ui.warn(_("%s does not exist!\n") % f)
1000 self.ui.warn(_("%s does not exist!\n") % f)
1021 elif not islink and not os.path.isfile(p):
1001 elif not islink and not os.path.isfile(p):
1022 self.ui.warn(_("%s not added: only files and symlinks "
1002 self.ui.warn(_("%s not added: only files and symlinks "
1023 "supported currently\n") % f)
1003 "supported currently\n") % f)
1024 elif self.dirstate.state(f) in 'an':
1004 elif self.dirstate.state(f) in 'an':
1025 self.ui.warn(_("%s already tracked!\n") % f)
1005 self.ui.warn(_("%s already tracked!\n") % f)
1026 else:
1006 else:
1027 self.dirstate.update([f], "a")
1007 self.dirstate.update([f], "a")
1028
1008
1029 def forget(self, list, wlock=None):
1009 def forget(self, list, wlock=None):
1030 if not wlock:
1010 if not wlock:
1031 wlock = self.wlock()
1011 wlock = self.wlock()
1032 for f in list:
1012 for f in list:
1033 if self.dirstate.state(f) not in 'ai':
1013 if self.dirstate.state(f) not in 'ai':
1034 self.ui.warn(_("%s not added!\n") % f)
1014 self.ui.warn(_("%s not added!\n") % f)
1035 else:
1015 else:
1036 self.dirstate.forget([f])
1016 self.dirstate.forget([f])
1037
1017
1038 def remove(self, list, unlink=False, wlock=None):
1018 def remove(self, list, unlink=False, wlock=None):
1039 if unlink:
1019 if unlink:
1040 for f in list:
1020 for f in list:
1041 try:
1021 try:
1042 util.unlink(self.wjoin(f))
1022 util.unlink(self.wjoin(f))
1043 except OSError, inst:
1023 except OSError, inst:
1044 if inst.errno != errno.ENOENT:
1024 if inst.errno != errno.ENOENT:
1045 raise
1025 raise
1046 if not wlock:
1026 if not wlock:
1047 wlock = self.wlock()
1027 wlock = self.wlock()
1048 for f in list:
1028 for f in list:
1049 p = self.wjoin(f)
1029 p = self.wjoin(f)
1050 if os.path.exists(p):
1030 if os.path.exists(p):
1051 self.ui.warn(_("%s still exists!\n") % f)
1031 self.ui.warn(_("%s still exists!\n") % f)
1052 elif self.dirstate.state(f) == 'a':
1032 elif self.dirstate.state(f) == 'a':
1053 self.dirstate.forget([f])
1033 self.dirstate.forget([f])
1054 elif f not in self.dirstate:
1034 elif f not in self.dirstate:
1055 self.ui.warn(_("%s not tracked!\n") % f)
1035 self.ui.warn(_("%s not tracked!\n") % f)
1056 else:
1036 else:
1057 self.dirstate.update([f], "r")
1037 self.dirstate.update([f], "r")
1058
1038
1059 def undelete(self, list, wlock=None):
1039 def undelete(self, list, wlock=None):
1060 p = self.dirstate.parents()[0]
1040 p = self.dirstate.parents()[0]
1061 mn = self.changelog.read(p)[0]
1041 mn = self.changelog.read(p)[0]
1062 m = self.manifest.read(mn)
1042 m = self.manifest.read(mn)
1063 if not wlock:
1043 if not wlock:
1064 wlock = self.wlock()
1044 wlock = self.wlock()
1065 for f in list:
1045 for f in list:
1066 if self.dirstate.state(f) not in "r":
1046 if self.dirstate.state(f) not in "r":
1067 self.ui.warn("%s not removed!\n" % f)
1047 self.ui.warn("%s not removed!\n" % f)
1068 else:
1048 else:
1069 t = self.file(f).read(m[f])
1049 t = self.file(f).read(m[f])
1070 self.wwrite(f, t, m.flags(f))
1050 self.wwrite(f, t, m.flags(f))
1071 self.dirstate.update([f], "n")
1051 self.dirstate.update([f], "n")
1072
1052
1073 def copy(self, source, dest, wlock=None):
1053 def copy(self, source, dest, wlock=None):
1074 p = self.wjoin(dest)
1054 p = self.wjoin(dest)
1075 if not os.path.exists(p):
1055 if not os.path.exists(p):
1076 self.ui.warn(_("%s does not exist!\n") % dest)
1056 self.ui.warn(_("%s does not exist!\n") % dest)
1077 elif not os.path.isfile(p):
1057 elif not os.path.isfile(p):
1078 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
1058 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
1079 else:
1059 else:
1080 if not wlock:
1060 if not wlock:
1081 wlock = self.wlock()
1061 wlock = self.wlock()
1082 if self.dirstate.state(dest) == '?':
1062 if self.dirstate.state(dest) == '?':
1083 self.dirstate.update([dest], "a")
1063 self.dirstate.update([dest], "a")
1084 self.dirstate.copy(source, dest)
1064 self.dirstate.copy(source, dest)
1085
1065
1086 def heads(self, start=None):
1066 def heads(self, start=None):
1087 heads = self.changelog.heads(start)
1067 heads = self.changelog.heads(start)
1088 # sort the output in rev descending order
1068 # sort the output in rev descending order
1089 heads = [(-self.changelog.rev(h), h) for h in heads]
1069 heads = [(-self.changelog.rev(h), h) for h in heads]
1090 heads.sort()
1070 heads.sort()
1091 return [n for (r, n) in heads]
1071 return [n for (r, n) in heads]
1092
1072
1093 def branches(self, nodes):
1073 def branches(self, nodes):
1094 if not nodes:
1074 if not nodes:
1095 nodes = [self.changelog.tip()]
1075 nodes = [self.changelog.tip()]
1096 b = []
1076 b = []
1097 for n in nodes:
1077 for n in nodes:
1098 t = n
1078 t = n
1099 while 1:
1079 while 1:
1100 p = self.changelog.parents(n)
1080 p = self.changelog.parents(n)
1101 if p[1] != nullid or p[0] == nullid:
1081 if p[1] != nullid or p[0] == nullid:
1102 b.append((t, n, p[0], p[1]))
1082 b.append((t, n, p[0], p[1]))
1103 break
1083 break
1104 n = p[0]
1084 n = p[0]
1105 return b
1085 return b
1106
1086
1107 def between(self, pairs):
1087 def between(self, pairs):
1108 r = []
1088 r = []
1109
1089
1110 for top, bottom in pairs:
1090 for top, bottom in pairs:
1111 n, l, i = top, [], 0
1091 n, l, i = top, [], 0
1112 f = 1
1092 f = 1
1113
1093
1114 while n != bottom:
1094 while n != bottom:
1115 p = self.changelog.parents(n)[0]
1095 p = self.changelog.parents(n)[0]
1116 if i == f:
1096 if i == f:
1117 l.append(n)
1097 l.append(n)
1118 f = f * 2
1098 f = f * 2
1119 n = p
1099 n = p
1120 i += 1
1100 i += 1
1121
1101
1122 r.append(l)
1102 r.append(l)
1123
1103
1124 return r
1104 return r
1125
1105
1126 def findincoming(self, remote, base=None, heads=None, force=False):
1106 def findincoming(self, remote, base=None, heads=None, force=False):
1127 """Return list of roots of the subsets of missing nodes from remote
1107 """Return list of roots of the subsets of missing nodes from remote
1128
1108
1129 If base dict is specified, assume that these nodes and their parents
1109 If base dict is specified, assume that these nodes and their parents
1130 exist on the remote side and that no child of a node of base exists
1110 exist on the remote side and that no child of a node of base exists
1131 in both remote and self.
1111 in both remote and self.
1132 Furthermore base will be updated to include the nodes that exists
1112 Furthermore base will be updated to include the nodes that exists
1133 in self and remote but no children exists in self and remote.
1113 in self and remote but no children exists in self and remote.
1134 If a list of heads is specified, return only nodes which are heads
1114 If a list of heads is specified, return only nodes which are heads
1135 or ancestors of these heads.
1115 or ancestors of these heads.
1136
1116
1137 All the ancestors of base are in self and in remote.
1117 All the ancestors of base are in self and in remote.
1138 All the descendants of the list returned are missing in self.
1118 All the descendants of the list returned are missing in self.
1139 (and so we know that the rest of the nodes are missing in remote, see
1119 (and so we know that the rest of the nodes are missing in remote, see
1140 outgoing)
1120 outgoing)
1141 """
1121 """
1142 m = self.changelog.nodemap
1122 m = self.changelog.nodemap
1143 search = []
1123 search = []
1144 fetch = {}
1124 fetch = {}
1145 seen = {}
1125 seen = {}
1146 seenbranch = {}
1126 seenbranch = {}
1147 if base == None:
1127 if base == None:
1148 base = {}
1128 base = {}
1149
1129
1150 if not heads:
1130 if not heads:
1151 heads = remote.heads()
1131 heads = remote.heads()
1152
1132
1153 if self.changelog.tip() == nullid:
1133 if self.changelog.tip() == nullid:
1154 base[nullid] = 1
1134 base[nullid] = 1
1155 if heads != [nullid]:
1135 if heads != [nullid]:
1156 return [nullid]
1136 return [nullid]
1157 return []
1137 return []
1158
1138
1159 # assume we're closer to the tip than the root
1139 # assume we're closer to the tip than the root
1160 # and start by examining the heads
1140 # and start by examining the heads
1161 self.ui.status(_("searching for changes\n"))
1141 self.ui.status(_("searching for changes\n"))
1162
1142
1163 unknown = []
1143 unknown = []
1164 for h in heads:
1144 for h in heads:
1165 if h not in m:
1145 if h not in m:
1166 unknown.append(h)
1146 unknown.append(h)
1167 else:
1147 else:
1168 base[h] = 1
1148 base[h] = 1
1169
1149
1170 if not unknown:
1150 if not unknown:
1171 return []
1151 return []
1172
1152
1173 req = dict.fromkeys(unknown)
1153 req = dict.fromkeys(unknown)
1174 reqcnt = 0
1154 reqcnt = 0
1175
1155
1176 # search through remote branches
1156 # search through remote branches
1177 # a 'branch' here is a linear segment of history, with four parts:
1157 # a 'branch' here is a linear segment of history, with four parts:
1178 # head, root, first parent, second parent
1158 # head, root, first parent, second parent
1179 # (a branch always has two parents (or none) by definition)
1159 # (a branch always has two parents (or none) by definition)
1180 unknown = remote.branches(unknown)
1160 unknown = remote.branches(unknown)
1181 while unknown:
1161 while unknown:
1182 r = []
1162 r = []
1183 while unknown:
1163 while unknown:
1184 n = unknown.pop(0)
1164 n = unknown.pop(0)
1185 if n[0] in seen:
1165 if n[0] in seen:
1186 continue
1166 continue
1187
1167
1188 self.ui.debug(_("examining %s:%s\n")
1168 self.ui.debug(_("examining %s:%s\n")
1189 % (short(n[0]), short(n[1])))
1169 % (short(n[0]), short(n[1])))
1190 if n[0] == nullid: # found the end of the branch
1170 if n[0] == nullid: # found the end of the branch
1191 pass
1171 pass
1192 elif n in seenbranch:
1172 elif n in seenbranch:
1193 self.ui.debug(_("branch already found\n"))
1173 self.ui.debug(_("branch already found\n"))
1194 continue
1174 continue
1195 elif n[1] and n[1] in m: # do we know the base?
1175 elif n[1] and n[1] in m: # do we know the base?
1196 self.ui.debug(_("found incomplete branch %s:%s\n")
1176 self.ui.debug(_("found incomplete branch %s:%s\n")
1197 % (short(n[0]), short(n[1])))
1177 % (short(n[0]), short(n[1])))
1198 search.append(n) # schedule branch range for scanning
1178 search.append(n) # schedule branch range for scanning
1199 seenbranch[n] = 1
1179 seenbranch[n] = 1
1200 else:
1180 else:
1201 if n[1] not in seen and n[1] not in fetch:
1181 if n[1] not in seen and n[1] not in fetch:
1202 if n[2] in m and n[3] in m:
1182 if n[2] in m and n[3] in m:
1203 self.ui.debug(_("found new changeset %s\n") %
1183 self.ui.debug(_("found new changeset %s\n") %
1204 short(n[1]))
1184 short(n[1]))
1205 fetch[n[1]] = 1 # earliest unknown
1185 fetch[n[1]] = 1 # earliest unknown
1206 for p in n[2:4]:
1186 for p in n[2:4]:
1207 if p in m:
1187 if p in m:
1208 base[p] = 1 # latest known
1188 base[p] = 1 # latest known
1209
1189
1210 for p in n[2:4]:
1190 for p in n[2:4]:
1211 if p not in req and p not in m:
1191 if p not in req and p not in m:
1212 r.append(p)
1192 r.append(p)
1213 req[p] = 1
1193 req[p] = 1
1214 seen[n[0]] = 1
1194 seen[n[0]] = 1
1215
1195
1216 if r:
1196 if r:
1217 reqcnt += 1
1197 reqcnt += 1
1218 self.ui.debug(_("request %d: %s\n") %
1198 self.ui.debug(_("request %d: %s\n") %
1219 (reqcnt, " ".join(map(short, r))))
1199 (reqcnt, " ".join(map(short, r))))
1220 for p in xrange(0, len(r), 10):
1200 for p in xrange(0, len(r), 10):
1221 for b in remote.branches(r[p:p+10]):
1201 for b in remote.branches(r[p:p+10]):
1222 self.ui.debug(_("received %s:%s\n") %
1202 self.ui.debug(_("received %s:%s\n") %
1223 (short(b[0]), short(b[1])))
1203 (short(b[0]), short(b[1])))
1224 unknown.append(b)
1204 unknown.append(b)
1225
1205
1226 # do binary search on the branches we found
1206 # do binary search on the branches we found
1227 while search:
1207 while search:
1228 n = search.pop(0)
1208 n = search.pop(0)
1229 reqcnt += 1
1209 reqcnt += 1
1230 l = remote.between([(n[0], n[1])])[0]
1210 l = remote.between([(n[0], n[1])])[0]
1231 l.append(n[1])
1211 l.append(n[1])
1232 p = n[0]
1212 p = n[0]
1233 f = 1
1213 f = 1
1234 for i in l:
1214 for i in l:
1235 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1215 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1236 if i in m:
1216 if i in m:
1237 if f <= 2:
1217 if f <= 2:
1238 self.ui.debug(_("found new branch changeset %s\n") %
1218 self.ui.debug(_("found new branch changeset %s\n") %
1239 short(p))
1219 short(p))
1240 fetch[p] = 1
1220 fetch[p] = 1
1241 base[i] = 1
1221 base[i] = 1
1242 else:
1222 else:
1243 self.ui.debug(_("narrowed branch search to %s:%s\n")
1223 self.ui.debug(_("narrowed branch search to %s:%s\n")
1244 % (short(p), short(i)))
1224 % (short(p), short(i)))
1245 search.append((p, i))
1225 search.append((p, i))
1246 break
1226 break
1247 p, f = i, f * 2
1227 p, f = i, f * 2
1248
1228
1249 # sanity check our fetch list
1229 # sanity check our fetch list
1250 for f in fetch.keys():
1230 for f in fetch.keys():
1251 if f in m:
1231 if f in m:
1252 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1232 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1253
1233
1254 if base.keys() == [nullid]:
1234 if base.keys() == [nullid]:
1255 if force:
1235 if force:
1256 self.ui.warn(_("warning: repository is unrelated\n"))
1236 self.ui.warn(_("warning: repository is unrelated\n"))
1257 else:
1237 else:
1258 raise util.Abort(_("repository is unrelated"))
1238 raise util.Abort(_("repository is unrelated"))
1259
1239
1260 self.ui.debug(_("found new changesets starting at ") +
1240 self.ui.debug(_("found new changesets starting at ") +
1261 " ".join([short(f) for f in fetch]) + "\n")
1241 " ".join([short(f) for f in fetch]) + "\n")
1262
1242
1263 self.ui.debug(_("%d total queries\n") % reqcnt)
1243 self.ui.debug(_("%d total queries\n") % reqcnt)
1264
1244
1265 return fetch.keys()
1245 return fetch.keys()
1266
1246
1267 def findoutgoing(self, remote, base=None, heads=None, force=False):
1247 def findoutgoing(self, remote, base=None, heads=None, force=False):
1268 """Return list of nodes that are roots of subsets not in remote
1248 """Return list of nodes that are roots of subsets not in remote
1269
1249
1270 If base dict is specified, assume that these nodes and their parents
1250 If base dict is specified, assume that these nodes and their parents
1271 exist on the remote side.
1251 exist on the remote side.
1272 If a list of heads is specified, return only nodes which are heads
1252 If a list of heads is specified, return only nodes which are heads
1273 or ancestors of these heads, and return a second element which
1253 or ancestors of these heads, and return a second element which
1274 contains all remote heads which get new children.
1254 contains all remote heads which get new children.
1275 """
1255 """
1276 if base == None:
1256 if base == None:
1277 base = {}
1257 base = {}
1278 self.findincoming(remote, base, heads, force=force)
1258 self.findincoming(remote, base, heads, force=force)
1279
1259
1280 self.ui.debug(_("common changesets up to ")
1260 self.ui.debug(_("common changesets up to ")
1281 + " ".join(map(short, base.keys())) + "\n")
1261 + " ".join(map(short, base.keys())) + "\n")
1282
1262
1283 remain = dict.fromkeys(self.changelog.nodemap)
1263 remain = dict.fromkeys(self.changelog.nodemap)
1284
1264
1285 # prune everything remote has from the tree
1265 # prune everything remote has from the tree
1286 del remain[nullid]
1266 del remain[nullid]
1287 remove = base.keys()
1267 remove = base.keys()
1288 while remove:
1268 while remove:
1289 n = remove.pop(0)
1269 n = remove.pop(0)
1290 if n in remain:
1270 if n in remain:
1291 del remain[n]
1271 del remain[n]
1292 for p in self.changelog.parents(n):
1272 for p in self.changelog.parents(n):
1293 remove.append(p)
1273 remove.append(p)
1294
1274
1295 # find every node whose parents have been pruned
1275 # find every node whose parents have been pruned
1296 subset = []
1276 subset = []
1297 # find every remote head that will get new children
1277 # find every remote head that will get new children
1298 updated_heads = {}
1278 updated_heads = {}
1299 for n in remain:
1279 for n in remain:
1300 p1, p2 = self.changelog.parents(n)
1280 p1, p2 = self.changelog.parents(n)
1301 if p1 not in remain and p2 not in remain:
1281 if p1 not in remain and p2 not in remain:
1302 subset.append(n)
1282 subset.append(n)
1303 if heads:
1283 if heads:
1304 if p1 in heads:
1284 if p1 in heads:
1305 updated_heads[p1] = True
1285 updated_heads[p1] = True
1306 if p2 in heads:
1286 if p2 in heads:
1307 updated_heads[p2] = True
1287 updated_heads[p2] = True
1308
1288
1309 # this is the set of all roots we have to push
1289 # this is the set of all roots we have to push
1310 if heads:
1290 if heads:
1311 return subset, updated_heads.keys()
1291 return subset, updated_heads.keys()
1312 else:
1292 else:
1313 return subset
1293 return subset
1314
1294
1315 def pull(self, remote, heads=None, force=False, lock=None):
1295 def pull(self, remote, heads=None, force=False, lock=None):
1316 mylock = False
1296 mylock = False
1317 if not lock:
1297 if not lock:
1318 lock = self.lock()
1298 lock = self.lock()
1319 mylock = True
1299 mylock = True
1320
1300
1321 try:
1301 try:
1322 fetch = self.findincoming(remote, force=force)
1302 fetch = self.findincoming(remote, force=force)
1323 if fetch == [nullid]:
1303 if fetch == [nullid]:
1324 self.ui.status(_("requesting all changes\n"))
1304 self.ui.status(_("requesting all changes\n"))
1325
1305
1326 if not fetch:
1306 if not fetch:
1327 self.ui.status(_("no changes found\n"))
1307 self.ui.status(_("no changes found\n"))
1328 return 0
1308 return 0
1329
1309
1330 if heads is None:
1310 if heads is None:
1331 cg = remote.changegroup(fetch, 'pull')
1311 cg = remote.changegroup(fetch, 'pull')
1332 else:
1312 else:
1333 if 'changegroupsubset' not in remote.capabilities:
1313 if 'changegroupsubset' not in remote.capabilities:
1334 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1314 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1335 cg = remote.changegroupsubset(fetch, heads, 'pull')
1315 cg = remote.changegroupsubset(fetch, heads, 'pull')
1336 return self.addchangegroup(cg, 'pull', remote.url())
1316 return self.addchangegroup(cg, 'pull', remote.url())
1337 finally:
1317 finally:
1338 if mylock:
1318 if mylock:
1339 lock.release()
1319 lock.release()
1340
1320
1341 def push(self, remote, force=False, revs=None):
1321 def push(self, remote, force=False, revs=None):
1342 # there are two ways to push to remote repo:
1322 # there are two ways to push to remote repo:
1343 #
1323 #
1344 # addchangegroup assumes local user can lock remote
1324 # addchangegroup assumes local user can lock remote
1345 # repo (local filesystem, old ssh servers).
1325 # repo (local filesystem, old ssh servers).
1346 #
1326 #
1347 # unbundle assumes local user cannot lock remote repo (new ssh
1327 # unbundle assumes local user cannot lock remote repo (new ssh
1348 # servers, http servers).
1328 # servers, http servers).
1349
1329
1350 if remote.capable('unbundle'):
1330 if remote.capable('unbundle'):
1351 return self.push_unbundle(remote, force, revs)
1331 return self.push_unbundle(remote, force, revs)
1352 return self.push_addchangegroup(remote, force, revs)
1332 return self.push_addchangegroup(remote, force, revs)
1353
1333
1354 def prepush(self, remote, force, revs):
1334 def prepush(self, remote, force, revs):
1355 base = {}
1335 base = {}
1356 remote_heads = remote.heads()
1336 remote_heads = remote.heads()
1357 inc = self.findincoming(remote, base, remote_heads, force=force)
1337 inc = self.findincoming(remote, base, remote_heads, force=force)
1358
1338
1359 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1339 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1360 if revs is not None:
1340 if revs is not None:
1361 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1341 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1362 else:
1342 else:
1363 bases, heads = update, self.changelog.heads()
1343 bases, heads = update, self.changelog.heads()
1364
1344
1365 if not bases:
1345 if not bases:
1366 self.ui.status(_("no changes found\n"))
1346 self.ui.status(_("no changes found\n"))
1367 return None, 1
1347 return None, 1
1368 elif not force:
1348 elif not force:
1369 # check if we're creating new remote heads
1349 # check if we're creating new remote heads
1370 # to be a remote head after push, node must be either
1350 # to be a remote head after push, node must be either
1371 # - unknown locally
1351 # - unknown locally
1372 # - a local outgoing head descended from update
1352 # - a local outgoing head descended from update
1373 # - a remote head that's known locally and not
1353 # - a remote head that's known locally and not
1374 # ancestral to an outgoing head
1354 # ancestral to an outgoing head
1375
1355
1376 warn = 0
1356 warn = 0
1377
1357
1378 if remote_heads == [nullid]:
1358 if remote_heads == [nullid]:
1379 warn = 0
1359 warn = 0
1380 elif not revs and len(heads) > len(remote_heads):
1360 elif not revs and len(heads) > len(remote_heads):
1381 warn = 1
1361 warn = 1
1382 else:
1362 else:
1383 newheads = list(heads)
1363 newheads = list(heads)
1384 for r in remote_heads:
1364 for r in remote_heads:
1385 if r in self.changelog.nodemap:
1365 if r in self.changelog.nodemap:
1386 desc = self.changelog.heads(r, heads)
1366 desc = self.changelog.heads(r, heads)
1387 l = [h for h in heads if h in desc]
1367 l = [h for h in heads if h in desc]
1388 if not l:
1368 if not l:
1389 newheads.append(r)
1369 newheads.append(r)
1390 else:
1370 else:
1391 newheads.append(r)
1371 newheads.append(r)
1392 if len(newheads) > len(remote_heads):
1372 if len(newheads) > len(remote_heads):
1393 warn = 1
1373 warn = 1
1394
1374
1395 if warn:
1375 if warn:
1396 self.ui.warn(_("abort: push creates new remote branches!\n"))
1376 self.ui.warn(_("abort: push creates new remote branches!\n"))
1397 self.ui.status(_("(did you forget to merge?"
1377 self.ui.status(_("(did you forget to merge?"
1398 " use push -f to force)\n"))
1378 " use push -f to force)\n"))
1399 return None, 1
1379 return None, 1
1400 elif inc:
1380 elif inc:
1401 self.ui.warn(_("note: unsynced remote changes!\n"))
1381 self.ui.warn(_("note: unsynced remote changes!\n"))
1402
1382
1403
1383
1404 if revs is None:
1384 if revs is None:
1405 cg = self.changegroup(update, 'push')
1385 cg = self.changegroup(update, 'push')
1406 else:
1386 else:
1407 cg = self.changegroupsubset(update, revs, 'push')
1387 cg = self.changegroupsubset(update, revs, 'push')
1408 return cg, remote_heads
1388 return cg, remote_heads
1409
1389
1410 def push_addchangegroup(self, remote, force, revs):
1390 def push_addchangegroup(self, remote, force, revs):
1411 lock = remote.lock()
1391 lock = remote.lock()
1412
1392
1413 ret = self.prepush(remote, force, revs)
1393 ret = self.prepush(remote, force, revs)
1414 if ret[0] is not None:
1394 if ret[0] is not None:
1415 cg, remote_heads = ret
1395 cg, remote_heads = ret
1416 return remote.addchangegroup(cg, 'push', self.url())
1396 return remote.addchangegroup(cg, 'push', self.url())
1417 return ret[1]
1397 return ret[1]
1418
1398
1419 def push_unbundle(self, remote, force, revs):
1399 def push_unbundle(self, remote, force, revs):
1420 # local repo finds heads on server, finds out what revs it
1400 # local repo finds heads on server, finds out what revs it
1421 # must push. once revs transferred, if server finds it has
1401 # must push. once revs transferred, if server finds it has
1422 # different heads (someone else won commit/push race), server
1402 # different heads (someone else won commit/push race), server
1423 # aborts.
1403 # aborts.
1424
1404
1425 ret = self.prepush(remote, force, revs)
1405 ret = self.prepush(remote, force, revs)
1426 if ret[0] is not None:
1406 if ret[0] is not None:
1427 cg, remote_heads = ret
1407 cg, remote_heads = ret
1428 if force: remote_heads = ['force']
1408 if force: remote_heads = ['force']
1429 return remote.unbundle(cg, remote_heads, 'push')
1409 return remote.unbundle(cg, remote_heads, 'push')
1430 return ret[1]
1410 return ret[1]
1431
1411
1432 def changegroupinfo(self, nodes):
1412 def changegroupinfo(self, nodes):
1433 self.ui.note(_("%d changesets found\n") % len(nodes))
1413 self.ui.note(_("%d changesets found\n") % len(nodes))
1434 if self.ui.debugflag:
1414 if self.ui.debugflag:
1435 self.ui.debug(_("List of changesets:\n"))
1415 self.ui.debug(_("List of changesets:\n"))
1436 for node in nodes:
1416 for node in nodes:
1437 self.ui.debug("%s\n" % hex(node))
1417 self.ui.debug("%s\n" % hex(node))
1438
1418
1439 def changegroupsubset(self, bases, heads, source):
1419 def changegroupsubset(self, bases, heads, source):
1440 """This function generates a changegroup consisting of all the nodes
1420 """This function generates a changegroup consisting of all the nodes
1441 that are descendents of any of the bases, and ancestors of any of
1421 that are descendents of any of the bases, and ancestors of any of
1442 the heads.
1422 the heads.
1443
1423
1444 It is fairly complex as determining which filenodes and which
1424 It is fairly complex as determining which filenodes and which
1445 manifest nodes need to be included for the changeset to be complete
1425 manifest nodes need to be included for the changeset to be complete
1446 is non-trivial.
1426 is non-trivial.
1447
1427
1448 Another wrinkle is doing the reverse, figuring out which changeset in
1428 Another wrinkle is doing the reverse, figuring out which changeset in
1449 the changegroup a particular filenode or manifestnode belongs to."""
1429 the changegroup a particular filenode or manifestnode belongs to."""
1450
1430
1451 self.hook('preoutgoing', throw=True, source=source)
1431 self.hook('preoutgoing', throw=True, source=source)
1452
1432
1453 # Set up some initial variables
1433 # Set up some initial variables
1454 # Make it easy to refer to self.changelog
1434 # Make it easy to refer to self.changelog
1455 cl = self.changelog
1435 cl = self.changelog
1456 # msng is short for missing - compute the list of changesets in this
1436 # msng is short for missing - compute the list of changesets in this
1457 # changegroup.
1437 # changegroup.
1458 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1438 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1459 self.changegroupinfo(msng_cl_lst)
1439 self.changegroupinfo(msng_cl_lst)
1460 # Some bases may turn out to be superfluous, and some heads may be
1440 # Some bases may turn out to be superfluous, and some heads may be
1461 # too. nodesbetween will return the minimal set of bases and heads
1441 # too. nodesbetween will return the minimal set of bases and heads
1462 # necessary to re-create the changegroup.
1442 # necessary to re-create the changegroup.
1463
1443
1464 # Known heads are the list of heads that it is assumed the recipient
1444 # Known heads are the list of heads that it is assumed the recipient
1465 # of this changegroup will know about.
1445 # of this changegroup will know about.
1466 knownheads = {}
1446 knownheads = {}
1467 # We assume that all parents of bases are known heads.
1447 # We assume that all parents of bases are known heads.
1468 for n in bases:
1448 for n in bases:
1469 for p in cl.parents(n):
1449 for p in cl.parents(n):
1470 if p != nullid:
1450 if p != nullid:
1471 knownheads[p] = 1
1451 knownheads[p] = 1
1472 knownheads = knownheads.keys()
1452 knownheads = knownheads.keys()
1473 if knownheads:
1453 if knownheads:
1474 # Now that we know what heads are known, we can compute which
1454 # Now that we know what heads are known, we can compute which
1475 # changesets are known. The recipient must know about all
1455 # changesets are known. The recipient must know about all
1476 # changesets required to reach the known heads from the null
1456 # changesets required to reach the known heads from the null
1477 # changeset.
1457 # changeset.
1478 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1458 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1479 junk = None
1459 junk = None
1480 # Transform the list into an ersatz set.
1460 # Transform the list into an ersatz set.
1481 has_cl_set = dict.fromkeys(has_cl_set)
1461 has_cl_set = dict.fromkeys(has_cl_set)
1482 else:
1462 else:
1483 # If there were no known heads, the recipient cannot be assumed to
1463 # If there were no known heads, the recipient cannot be assumed to
1484 # know about any changesets.
1464 # know about any changesets.
1485 has_cl_set = {}
1465 has_cl_set = {}
1486
1466
1487 # Make it easy to refer to self.manifest
1467 # Make it easy to refer to self.manifest
1488 mnfst = self.manifest
1468 mnfst = self.manifest
1489 # We don't know which manifests are missing yet
1469 # We don't know which manifests are missing yet
1490 msng_mnfst_set = {}
1470 msng_mnfst_set = {}
1491 # Nor do we know which filenodes are missing.
1471 # Nor do we know which filenodes are missing.
1492 msng_filenode_set = {}
1472 msng_filenode_set = {}
1493
1473
1494 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1474 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1495 junk = None
1475 junk = None
1496
1476
1497 # A changeset always belongs to itself, so the changenode lookup
1477 # A changeset always belongs to itself, so the changenode lookup
1498 # function for a changenode is identity.
1478 # function for a changenode is identity.
1499 def identity(x):
1479 def identity(x):
1500 return x
1480 return x
1501
1481
1502 # A function generating function. Sets up an environment for the
1482 # A function generating function. Sets up an environment for the
1503 # inner function.
1483 # inner function.
1504 def cmp_by_rev_func(revlog):
1484 def cmp_by_rev_func(revlog):
1505 # Compare two nodes by their revision number in the environment's
1485 # Compare two nodes by their revision number in the environment's
1506 # revision history. Since the revision number both represents the
1486 # revision history. Since the revision number both represents the
1507 # most efficient order to read the nodes in, and represents a
1487 # most efficient order to read the nodes in, and represents a
1508 # topological sorting of the nodes, this function is often useful.
1488 # topological sorting of the nodes, this function is often useful.
1509 def cmp_by_rev(a, b):
1489 def cmp_by_rev(a, b):
1510 return cmp(revlog.rev(a), revlog.rev(b))
1490 return cmp(revlog.rev(a), revlog.rev(b))
1511 return cmp_by_rev
1491 return cmp_by_rev
1512
1492
1513 # If we determine that a particular file or manifest node must be a
1493 # If we determine that a particular file or manifest node must be a
1514 # node that the recipient of the changegroup will already have, we can
1494 # node that the recipient of the changegroup will already have, we can
1515 # also assume the recipient will have all the parents. This function
1495 # also assume the recipient will have all the parents. This function
1516 # prunes them from the set of missing nodes.
1496 # prunes them from the set of missing nodes.
1517 def prune_parents(revlog, hasset, msngset):
1497 def prune_parents(revlog, hasset, msngset):
1518 haslst = hasset.keys()
1498 haslst = hasset.keys()
1519 haslst.sort(cmp_by_rev_func(revlog))
1499 haslst.sort(cmp_by_rev_func(revlog))
1520 for node in haslst:
1500 for node in haslst:
1521 parentlst = [p for p in revlog.parents(node) if p != nullid]
1501 parentlst = [p for p in revlog.parents(node) if p != nullid]
1522 while parentlst:
1502 while parentlst:
1523 n = parentlst.pop()
1503 n = parentlst.pop()
1524 if n not in hasset:
1504 if n not in hasset:
1525 hasset[n] = 1
1505 hasset[n] = 1
1526 p = [p for p in revlog.parents(n) if p != nullid]
1506 p = [p for p in revlog.parents(n) if p != nullid]
1527 parentlst.extend(p)
1507 parentlst.extend(p)
1528 for n in hasset:
1508 for n in hasset:
1529 msngset.pop(n, None)
1509 msngset.pop(n, None)
1530
1510
1531 # This is a function generating function used to set up an environment
1511 # This is a function generating function used to set up an environment
1532 # for the inner function to execute in.
1512 # for the inner function to execute in.
1533 def manifest_and_file_collector(changedfileset):
1513 def manifest_and_file_collector(changedfileset):
1534 # This is an information gathering function that gathers
1514 # This is an information gathering function that gathers
1535 # information from each changeset node that goes out as part of
1515 # information from each changeset node that goes out as part of
1536 # the changegroup. The information gathered is a list of which
1516 # the changegroup. The information gathered is a list of which
1537 # manifest nodes are potentially required (the recipient may
1517 # manifest nodes are potentially required (the recipient may
1538 # already have them) and total list of all files which were
1518 # already have them) and total list of all files which were
1539 # changed in any changeset in the changegroup.
1519 # changed in any changeset in the changegroup.
1540 #
1520 #
1541 # We also remember the first changenode we saw any manifest
1521 # We also remember the first changenode we saw any manifest
1542 # referenced by so we can later determine which changenode 'owns'
1522 # referenced by so we can later determine which changenode 'owns'
1543 # the manifest.
1523 # the manifest.
1544 def collect_manifests_and_files(clnode):
1524 def collect_manifests_and_files(clnode):
1545 c = cl.read(clnode)
1525 c = cl.read(clnode)
1546 for f in c[3]:
1526 for f in c[3]:
1547 # This is to make sure we only have one instance of each
1527 # This is to make sure we only have one instance of each
1548 # filename string for each filename.
1528 # filename string for each filename.
1549 changedfileset.setdefault(f, f)
1529 changedfileset.setdefault(f, f)
1550 msng_mnfst_set.setdefault(c[0], clnode)
1530 msng_mnfst_set.setdefault(c[0], clnode)
1551 return collect_manifests_and_files
1531 return collect_manifests_and_files
1552
1532
1553 # Figure out which manifest nodes (of the ones we think might be part
1533 # Figure out which manifest nodes (of the ones we think might be part
1554 # of the changegroup) the recipient must know about and remove them
1534 # of the changegroup) the recipient must know about and remove them
1555 # from the changegroup.
1535 # from the changegroup.
1556 def prune_manifests():
1536 def prune_manifests():
1557 has_mnfst_set = {}
1537 has_mnfst_set = {}
1558 for n in msng_mnfst_set:
1538 for n in msng_mnfst_set:
1559 # If a 'missing' manifest thinks it belongs to a changenode
1539 # If a 'missing' manifest thinks it belongs to a changenode
1560 # the recipient is assumed to have, obviously the recipient
1540 # the recipient is assumed to have, obviously the recipient
1561 # must have that manifest.
1541 # must have that manifest.
1562 linknode = cl.node(mnfst.linkrev(n))
1542 linknode = cl.node(mnfst.linkrev(n))
1563 if linknode in has_cl_set:
1543 if linknode in has_cl_set:
1564 has_mnfst_set[n] = 1
1544 has_mnfst_set[n] = 1
1565 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1545 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1566
1546
1567 # Use the information collected in collect_manifests_and_files to say
1547 # Use the information collected in collect_manifests_and_files to say
1568 # which changenode any manifestnode belongs to.
1548 # which changenode any manifestnode belongs to.
1569 def lookup_manifest_link(mnfstnode):
1549 def lookup_manifest_link(mnfstnode):
1570 return msng_mnfst_set[mnfstnode]
1550 return msng_mnfst_set[mnfstnode]
1571
1551
1572 # A function generating function that sets up the initial environment
1552 # A function generating function that sets up the initial environment
1573 # the inner function.
1553 # the inner function.
1574 def filenode_collector(changedfiles):
1554 def filenode_collector(changedfiles):
1575 next_rev = [0]
1555 next_rev = [0]
1576 # This gathers information from each manifestnode included in the
1556 # This gathers information from each manifestnode included in the
1577 # changegroup about which filenodes the manifest node references
1557 # changegroup about which filenodes the manifest node references
1578 # so we can include those in the changegroup too.
1558 # so we can include those in the changegroup too.
1579 #
1559 #
1580 # It also remembers which changenode each filenode belongs to. It
1560 # It also remembers which changenode each filenode belongs to. It
1581 # does this by assuming the a filenode belongs to the changenode
1561 # does this by assuming the a filenode belongs to the changenode
1582 # the first manifest that references it belongs to.
1562 # the first manifest that references it belongs to.
1583 def collect_msng_filenodes(mnfstnode):
1563 def collect_msng_filenodes(mnfstnode):
1584 r = mnfst.rev(mnfstnode)
1564 r = mnfst.rev(mnfstnode)
1585 if r == next_rev[0]:
1565 if r == next_rev[0]:
1586 # If the last rev we looked at was the one just previous,
1566 # If the last rev we looked at was the one just previous,
1587 # we only need to see a diff.
1567 # we only need to see a diff.
1588 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1568 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1589 # For each line in the delta
1569 # For each line in the delta
1590 for dline in delta.splitlines():
1570 for dline in delta.splitlines():
1591 # get the filename and filenode for that line
1571 # get the filename and filenode for that line
1592 f, fnode = dline.split('\0')
1572 f, fnode = dline.split('\0')
1593 fnode = bin(fnode[:40])
1573 fnode = bin(fnode[:40])
1594 f = changedfiles.get(f, None)
1574 f = changedfiles.get(f, None)
1595 # And if the file is in the list of files we care
1575 # And if the file is in the list of files we care
1596 # about.
1576 # about.
1597 if f is not None:
1577 if f is not None:
1598 # Get the changenode this manifest belongs to
1578 # Get the changenode this manifest belongs to
1599 clnode = msng_mnfst_set[mnfstnode]
1579 clnode = msng_mnfst_set[mnfstnode]
1600 # Create the set of filenodes for the file if
1580 # Create the set of filenodes for the file if
1601 # there isn't one already.
1581 # there isn't one already.
1602 ndset = msng_filenode_set.setdefault(f, {})
1582 ndset = msng_filenode_set.setdefault(f, {})
1603 # And set the filenode's changelog node to the
1583 # And set the filenode's changelog node to the
1604 # manifest's if it hasn't been set already.
1584 # manifest's if it hasn't been set already.
1605 ndset.setdefault(fnode, clnode)
1585 ndset.setdefault(fnode, clnode)
1606 else:
1586 else:
1607 # Otherwise we need a full manifest.
1587 # Otherwise we need a full manifest.
1608 m = mnfst.read(mnfstnode)
1588 m = mnfst.read(mnfstnode)
1609 # For every file in we care about.
1589 # For every file in we care about.
1610 for f in changedfiles:
1590 for f in changedfiles:
1611 fnode = m.get(f, None)
1591 fnode = m.get(f, None)
1612 # If it's in the manifest
1592 # If it's in the manifest
1613 if fnode is not None:
1593 if fnode is not None:
1614 # See comments above.
1594 # See comments above.
1615 clnode = msng_mnfst_set[mnfstnode]
1595 clnode = msng_mnfst_set[mnfstnode]
1616 ndset = msng_filenode_set.setdefault(f, {})
1596 ndset = msng_filenode_set.setdefault(f, {})
1617 ndset.setdefault(fnode, clnode)
1597 ndset.setdefault(fnode, clnode)
1618 # Remember the revision we hope to see next.
1598 # Remember the revision we hope to see next.
1619 next_rev[0] = r + 1
1599 next_rev[0] = r + 1
1620 return collect_msng_filenodes
1600 return collect_msng_filenodes
1621
1601
1622 # We have a list of filenodes we think we need for a file, lets remove
1602 # We have a list of filenodes we think we need for a file, lets remove
1623 # all those we now the recipient must have.
1603 # all those we now the recipient must have.
1624 def prune_filenodes(f, filerevlog):
1604 def prune_filenodes(f, filerevlog):
1625 msngset = msng_filenode_set[f]
1605 msngset = msng_filenode_set[f]
1626 hasset = {}
1606 hasset = {}
1627 # If a 'missing' filenode thinks it belongs to a changenode we
1607 # If a 'missing' filenode thinks it belongs to a changenode we
1628 # assume the recipient must have, then the recipient must have
1608 # assume the recipient must have, then the recipient must have
1629 # that filenode.
1609 # that filenode.
1630 for n in msngset:
1610 for n in msngset:
1631 clnode = cl.node(filerevlog.linkrev(n))
1611 clnode = cl.node(filerevlog.linkrev(n))
1632 if clnode in has_cl_set:
1612 if clnode in has_cl_set:
1633 hasset[n] = 1
1613 hasset[n] = 1
1634 prune_parents(filerevlog, hasset, msngset)
1614 prune_parents(filerevlog, hasset, msngset)
1635
1615
1636 # A function generator function that sets up the a context for the
1616 # A function generator function that sets up the a context for the
1637 # inner function.
1617 # inner function.
1638 def lookup_filenode_link_func(fname):
1618 def lookup_filenode_link_func(fname):
1639 msngset = msng_filenode_set[fname]
1619 msngset = msng_filenode_set[fname]
1640 # Lookup the changenode the filenode belongs to.
1620 # Lookup the changenode the filenode belongs to.
1641 def lookup_filenode_link(fnode):
1621 def lookup_filenode_link(fnode):
1642 return msngset[fnode]
1622 return msngset[fnode]
1643 return lookup_filenode_link
1623 return lookup_filenode_link
1644
1624
1645 # Now that we have all theses utility functions to help out and
1625 # Now that we have all theses utility functions to help out and
1646 # logically divide up the task, generate the group.
1626 # logically divide up the task, generate the group.
1647 def gengroup():
1627 def gengroup():
1648 # The set of changed files starts empty.
1628 # The set of changed files starts empty.
1649 changedfiles = {}
1629 changedfiles = {}
1650 # Create a changenode group generator that will call our functions
1630 # Create a changenode group generator that will call our functions
1651 # back to lookup the owning changenode and collect information.
1631 # back to lookup the owning changenode and collect information.
1652 group = cl.group(msng_cl_lst, identity,
1632 group = cl.group(msng_cl_lst, identity,
1653 manifest_and_file_collector(changedfiles))
1633 manifest_and_file_collector(changedfiles))
1654 for chnk in group:
1634 for chnk in group:
1655 yield chnk
1635 yield chnk
1656
1636
1657 # The list of manifests has been collected by the generator
1637 # The list of manifests has been collected by the generator
1658 # calling our functions back.
1638 # calling our functions back.
1659 prune_manifests()
1639 prune_manifests()
1660 msng_mnfst_lst = msng_mnfst_set.keys()
1640 msng_mnfst_lst = msng_mnfst_set.keys()
1661 # Sort the manifestnodes by revision number.
1641 # Sort the manifestnodes by revision number.
1662 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1642 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1663 # Create a generator for the manifestnodes that calls our lookup
1643 # Create a generator for the manifestnodes that calls our lookup
1664 # and data collection functions back.
1644 # and data collection functions back.
1665 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1645 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1666 filenode_collector(changedfiles))
1646 filenode_collector(changedfiles))
1667 for chnk in group:
1647 for chnk in group:
1668 yield chnk
1648 yield chnk
1669
1649
1670 # These are no longer needed, dereference and toss the memory for
1650 # These are no longer needed, dereference and toss the memory for
1671 # them.
1651 # them.
1672 msng_mnfst_lst = None
1652 msng_mnfst_lst = None
1673 msng_mnfst_set.clear()
1653 msng_mnfst_set.clear()
1674
1654
1675 changedfiles = changedfiles.keys()
1655 changedfiles = changedfiles.keys()
1676 changedfiles.sort()
1656 changedfiles.sort()
1677 # Go through all our files in order sorted by name.
1657 # Go through all our files in order sorted by name.
1678 for fname in changedfiles:
1658 for fname in changedfiles:
1679 filerevlog = self.file(fname)
1659 filerevlog = self.file(fname)
1680 # Toss out the filenodes that the recipient isn't really
1660 # Toss out the filenodes that the recipient isn't really
1681 # missing.
1661 # missing.
1682 if msng_filenode_set.has_key(fname):
1662 if msng_filenode_set.has_key(fname):
1683 prune_filenodes(fname, filerevlog)
1663 prune_filenodes(fname, filerevlog)
1684 msng_filenode_lst = msng_filenode_set[fname].keys()
1664 msng_filenode_lst = msng_filenode_set[fname].keys()
1685 else:
1665 else:
1686 msng_filenode_lst = []
1666 msng_filenode_lst = []
1687 # If any filenodes are left, generate the group for them,
1667 # If any filenodes are left, generate the group for them,
1688 # otherwise don't bother.
1668 # otherwise don't bother.
1689 if len(msng_filenode_lst) > 0:
1669 if len(msng_filenode_lst) > 0:
1690 yield changegroup.genchunk(fname)
1670 yield changegroup.genchunk(fname)
1691 # Sort the filenodes by their revision #
1671 # Sort the filenodes by their revision #
1692 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1672 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1693 # Create a group generator and only pass in a changenode
1673 # Create a group generator and only pass in a changenode
1694 # lookup function as we need to collect no information
1674 # lookup function as we need to collect no information
1695 # from filenodes.
1675 # from filenodes.
1696 group = filerevlog.group(msng_filenode_lst,
1676 group = filerevlog.group(msng_filenode_lst,
1697 lookup_filenode_link_func(fname))
1677 lookup_filenode_link_func(fname))
1698 for chnk in group:
1678 for chnk in group:
1699 yield chnk
1679 yield chnk
1700 if msng_filenode_set.has_key(fname):
1680 if msng_filenode_set.has_key(fname):
1701 # Don't need this anymore, toss it to free memory.
1681 # Don't need this anymore, toss it to free memory.
1702 del msng_filenode_set[fname]
1682 del msng_filenode_set[fname]
1703 # Signal that no more groups are left.
1683 # Signal that no more groups are left.
1704 yield changegroup.closechunk()
1684 yield changegroup.closechunk()
1705
1685
1706 if msng_cl_lst:
1686 if msng_cl_lst:
1707 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1687 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1708
1688
1709 return util.chunkbuffer(gengroup())
1689 return util.chunkbuffer(gengroup())
1710
1690
1711 def changegroup(self, basenodes, source):
1691 def changegroup(self, basenodes, source):
1712 """Generate a changegroup of all nodes that we have that a recipient
1692 """Generate a changegroup of all nodes that we have that a recipient
1713 doesn't.
1693 doesn't.
1714
1694
1715 This is much easier than the previous function as we can assume that
1695 This is much easier than the previous function as we can assume that
1716 the recipient has any changenode we aren't sending them."""
1696 the recipient has any changenode we aren't sending them."""
1717
1697
1718 self.hook('preoutgoing', throw=True, source=source)
1698 self.hook('preoutgoing', throw=True, source=source)
1719
1699
1720 cl = self.changelog
1700 cl = self.changelog
1721 nodes = cl.nodesbetween(basenodes, None)[0]
1701 nodes = cl.nodesbetween(basenodes, None)[0]
1722 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1702 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1723 self.changegroupinfo(nodes)
1703 self.changegroupinfo(nodes)
1724
1704
1725 def identity(x):
1705 def identity(x):
1726 return x
1706 return x
1727
1707
1728 def gennodelst(revlog):
1708 def gennodelst(revlog):
1729 for r in xrange(0, revlog.count()):
1709 for r in xrange(0, revlog.count()):
1730 n = revlog.node(r)
1710 n = revlog.node(r)
1731 if revlog.linkrev(n) in revset:
1711 if revlog.linkrev(n) in revset:
1732 yield n
1712 yield n
1733
1713
1734 def changed_file_collector(changedfileset):
1714 def changed_file_collector(changedfileset):
1735 def collect_changed_files(clnode):
1715 def collect_changed_files(clnode):
1736 c = cl.read(clnode)
1716 c = cl.read(clnode)
1737 for fname in c[3]:
1717 for fname in c[3]:
1738 changedfileset[fname] = 1
1718 changedfileset[fname] = 1
1739 return collect_changed_files
1719 return collect_changed_files
1740
1720
1741 def lookuprevlink_func(revlog):
1721 def lookuprevlink_func(revlog):
1742 def lookuprevlink(n):
1722 def lookuprevlink(n):
1743 return cl.node(revlog.linkrev(n))
1723 return cl.node(revlog.linkrev(n))
1744 return lookuprevlink
1724 return lookuprevlink
1745
1725
1746 def gengroup():
1726 def gengroup():
1747 # construct a list of all changed files
1727 # construct a list of all changed files
1748 changedfiles = {}
1728 changedfiles = {}
1749
1729
1750 for chnk in cl.group(nodes, identity,
1730 for chnk in cl.group(nodes, identity,
1751 changed_file_collector(changedfiles)):
1731 changed_file_collector(changedfiles)):
1752 yield chnk
1732 yield chnk
1753 changedfiles = changedfiles.keys()
1733 changedfiles = changedfiles.keys()
1754 changedfiles.sort()
1734 changedfiles.sort()
1755
1735
1756 mnfst = self.manifest
1736 mnfst = self.manifest
1757 nodeiter = gennodelst(mnfst)
1737 nodeiter = gennodelst(mnfst)
1758 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1738 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1759 yield chnk
1739 yield chnk
1760
1740
1761 for fname in changedfiles:
1741 for fname in changedfiles:
1762 filerevlog = self.file(fname)
1742 filerevlog = self.file(fname)
1763 nodeiter = gennodelst(filerevlog)
1743 nodeiter = gennodelst(filerevlog)
1764 nodeiter = list(nodeiter)
1744 nodeiter = list(nodeiter)
1765 if nodeiter:
1745 if nodeiter:
1766 yield changegroup.genchunk(fname)
1746 yield changegroup.genchunk(fname)
1767 lookup = lookuprevlink_func(filerevlog)
1747 lookup = lookuprevlink_func(filerevlog)
1768 for chnk in filerevlog.group(nodeiter, lookup):
1748 for chnk in filerevlog.group(nodeiter, lookup):
1769 yield chnk
1749 yield chnk
1770
1750
1771 yield changegroup.closechunk()
1751 yield changegroup.closechunk()
1772
1752
1773 if nodes:
1753 if nodes:
1774 self.hook('outgoing', node=hex(nodes[0]), source=source)
1754 self.hook('outgoing', node=hex(nodes[0]), source=source)
1775
1755
1776 return util.chunkbuffer(gengroup())
1756 return util.chunkbuffer(gengroup())
1777
1757
1778 def addchangegroup(self, source, srctype, url):
1758 def addchangegroup(self, source, srctype, url):
1779 """add changegroup to repo.
1759 """add changegroup to repo.
1780
1760
1781 return values:
1761 return values:
1782 - nothing changed or no source: 0
1762 - nothing changed or no source: 0
1783 - more heads than before: 1+added heads (2..n)
1763 - more heads than before: 1+added heads (2..n)
1784 - less heads than before: -1-removed heads (-2..-n)
1764 - less heads than before: -1-removed heads (-2..-n)
1785 - number of heads stays the same: 1
1765 - number of heads stays the same: 1
1786 """
1766 """
1787 def csmap(x):
1767 def csmap(x):
1788 self.ui.debug(_("add changeset %s\n") % short(x))
1768 self.ui.debug(_("add changeset %s\n") % short(x))
1789 return cl.count()
1769 return cl.count()
1790
1770
1791 def revmap(x):
1771 def revmap(x):
1792 return cl.rev(x)
1772 return cl.rev(x)
1793
1773
1794 if not source:
1774 if not source:
1795 return 0
1775 return 0
1796
1776
1797 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1777 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1798
1778
1799 changesets = files = revisions = 0
1779 changesets = files = revisions = 0
1800
1780
1801 tr = self.transaction()
1781 tr = self.transaction()
1802
1782
1803 # write changelog data to temp files so concurrent readers will not see
1783 # write changelog data to temp files so concurrent readers will not see
1804 # inconsistent view
1784 # inconsistent view
1805 cl = None
1785 cl = None
1806 try:
1786 try:
1807 cl = appendfile.appendchangelog(self.sopener,
1787 cl = appendfile.appendchangelog(self.sopener)
1808 self.changelog.version)
1809
1810 oldheads = len(cl.heads())
1788 oldheads = len(cl.heads())
1811
1789
1812 # pull off the changeset group
1790 # pull off the changeset group
1813 self.ui.status(_("adding changesets\n"))
1791 self.ui.status(_("adding changesets\n"))
1814 cor = cl.count() - 1
1792 cor = cl.count() - 1
1815 chunkiter = changegroup.chunkiter(source)
1793 chunkiter = changegroup.chunkiter(source)
1816 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1794 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1817 raise util.Abort(_("received changelog group is empty"))
1795 raise util.Abort(_("received changelog group is empty"))
1818 cnr = cl.count() - 1
1796 cnr = cl.count() - 1
1819 changesets = cnr - cor
1797 changesets = cnr - cor
1820
1798
1821 # pull off the manifest group
1799 # pull off the manifest group
1822 self.ui.status(_("adding manifests\n"))
1800 self.ui.status(_("adding manifests\n"))
1823 chunkiter = changegroup.chunkiter(source)
1801 chunkiter = changegroup.chunkiter(source)
1824 # no need to check for empty manifest group here:
1802 # no need to check for empty manifest group here:
1825 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1803 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1826 # no new manifest will be created and the manifest group will
1804 # no new manifest will be created and the manifest group will
1827 # be empty during the pull
1805 # be empty during the pull
1828 self.manifest.addgroup(chunkiter, revmap, tr)
1806 self.manifest.addgroup(chunkiter, revmap, tr)
1829
1807
1830 # process the files
1808 # process the files
1831 self.ui.status(_("adding file changes\n"))
1809 self.ui.status(_("adding file changes\n"))
1832 while 1:
1810 while 1:
1833 f = changegroup.getchunk(source)
1811 f = changegroup.getchunk(source)
1834 if not f:
1812 if not f:
1835 break
1813 break
1836 self.ui.debug(_("adding %s revisions\n") % f)
1814 self.ui.debug(_("adding %s revisions\n") % f)
1837 fl = self.file(f)
1815 fl = self.file(f)
1838 o = fl.count()
1816 o = fl.count()
1839 chunkiter = changegroup.chunkiter(source)
1817 chunkiter = changegroup.chunkiter(source)
1840 if fl.addgroup(chunkiter, revmap, tr) is None:
1818 if fl.addgroup(chunkiter, revmap, tr) is None:
1841 raise util.Abort(_("received file revlog group is empty"))
1819 raise util.Abort(_("received file revlog group is empty"))
1842 revisions += fl.count() - o
1820 revisions += fl.count() - o
1843 files += 1
1821 files += 1
1844
1822
1845 cl.writedata()
1823 cl.writedata()
1846 finally:
1824 finally:
1847 if cl:
1825 if cl:
1848 cl.cleanup()
1826 cl.cleanup()
1849
1827
1850 # make changelog see real files again
1828 # make changelog see real files again
1851 self.changelog = changelog.changelog(self.sopener,
1829 self.changelog = changelog.changelog(self.sopener)
1852 self.changelog.version)
1853 self.changelog.checkinlinesize(tr)
1830 self.changelog.checkinlinesize(tr)
1854
1831
1855 newheads = len(self.changelog.heads())
1832 newheads = len(self.changelog.heads())
1856 heads = ""
1833 heads = ""
1857 if oldheads and newheads != oldheads:
1834 if oldheads and newheads != oldheads:
1858 heads = _(" (%+d heads)") % (newheads - oldheads)
1835 heads = _(" (%+d heads)") % (newheads - oldheads)
1859
1836
1860 self.ui.status(_("added %d changesets"
1837 self.ui.status(_("added %d changesets"
1861 " with %d changes to %d files%s\n")
1838 " with %d changes to %d files%s\n")
1862 % (changesets, revisions, files, heads))
1839 % (changesets, revisions, files, heads))
1863
1840
1864 if changesets > 0:
1841 if changesets > 0:
1865 self.hook('pretxnchangegroup', throw=True,
1842 self.hook('pretxnchangegroup', throw=True,
1866 node=hex(self.changelog.node(cor+1)), source=srctype,
1843 node=hex(self.changelog.node(cor+1)), source=srctype,
1867 url=url)
1844 url=url)
1868
1845
1869 tr.close()
1846 tr.close()
1870
1847
1871 if changesets > 0:
1848 if changesets > 0:
1872 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1849 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1873 source=srctype, url=url)
1850 source=srctype, url=url)
1874
1851
1875 for i in xrange(cor + 1, cnr + 1):
1852 for i in xrange(cor + 1, cnr + 1):
1876 self.hook("incoming", node=hex(self.changelog.node(i)),
1853 self.hook("incoming", node=hex(self.changelog.node(i)),
1877 source=srctype, url=url)
1854 source=srctype, url=url)
1878
1855
1879 # never return 0 here:
1856 # never return 0 here:
1880 if newheads < oldheads:
1857 if newheads < oldheads:
1881 return newheads - oldheads - 1
1858 return newheads - oldheads - 1
1882 else:
1859 else:
1883 return newheads - oldheads + 1
1860 return newheads - oldheads + 1
1884
1861
1885
1862
1886 def stream_in(self, remote):
1863 def stream_in(self, remote):
1887 fp = remote.stream_out()
1864 fp = remote.stream_out()
1888 l = fp.readline()
1865 l = fp.readline()
1889 try:
1866 try:
1890 resp = int(l)
1867 resp = int(l)
1891 except ValueError:
1868 except ValueError:
1892 raise util.UnexpectedOutput(
1869 raise util.UnexpectedOutput(
1893 _('Unexpected response from remote server:'), l)
1870 _('Unexpected response from remote server:'), l)
1894 if resp == 1:
1871 if resp == 1:
1895 raise util.Abort(_('operation forbidden by server'))
1872 raise util.Abort(_('operation forbidden by server'))
1896 elif resp == 2:
1873 elif resp == 2:
1897 raise util.Abort(_('locking the remote repository failed'))
1874 raise util.Abort(_('locking the remote repository failed'))
1898 elif resp != 0:
1875 elif resp != 0:
1899 raise util.Abort(_('the server sent an unknown error code'))
1876 raise util.Abort(_('the server sent an unknown error code'))
1900 self.ui.status(_('streaming all changes\n'))
1877 self.ui.status(_('streaming all changes\n'))
1901 l = fp.readline()
1878 l = fp.readline()
1902 try:
1879 try:
1903 total_files, total_bytes = map(int, l.split(' ', 1))
1880 total_files, total_bytes = map(int, l.split(' ', 1))
1904 except ValueError, TypeError:
1881 except ValueError, TypeError:
1905 raise util.UnexpectedOutput(
1882 raise util.UnexpectedOutput(
1906 _('Unexpected response from remote server:'), l)
1883 _('Unexpected response from remote server:'), l)
1907 self.ui.status(_('%d files to transfer, %s of data\n') %
1884 self.ui.status(_('%d files to transfer, %s of data\n') %
1908 (total_files, util.bytecount(total_bytes)))
1885 (total_files, util.bytecount(total_bytes)))
1909 start = time.time()
1886 start = time.time()
1910 for i in xrange(total_files):
1887 for i in xrange(total_files):
1911 # XXX doesn't support '\n' or '\r' in filenames
1888 # XXX doesn't support '\n' or '\r' in filenames
1912 l = fp.readline()
1889 l = fp.readline()
1913 try:
1890 try:
1914 name, size = l.split('\0', 1)
1891 name, size = l.split('\0', 1)
1915 size = int(size)
1892 size = int(size)
1916 except ValueError, TypeError:
1893 except ValueError, TypeError:
1917 raise util.UnexpectedOutput(
1894 raise util.UnexpectedOutput(
1918 _('Unexpected response from remote server:'), l)
1895 _('Unexpected response from remote server:'), l)
1919 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1896 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1920 ofp = self.sopener(name, 'w')
1897 ofp = self.sopener(name, 'w')
1921 for chunk in util.filechunkiter(fp, limit=size):
1898 for chunk in util.filechunkiter(fp, limit=size):
1922 ofp.write(chunk)
1899 ofp.write(chunk)
1923 ofp.close()
1900 ofp.close()
1924 elapsed = time.time() - start
1901 elapsed = time.time() - start
1925 if elapsed <= 0:
1902 if elapsed <= 0:
1926 elapsed = 0.001
1903 elapsed = 0.001
1927 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1904 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1928 (util.bytecount(total_bytes), elapsed,
1905 (util.bytecount(total_bytes), elapsed,
1929 util.bytecount(total_bytes / elapsed)))
1906 util.bytecount(total_bytes / elapsed)))
1930 self.reload()
1907 self.reload()
1931 return len(self.heads()) + 1
1908 return len(self.heads()) + 1
1932
1909
1933 def clone(self, remote, heads=[], stream=False):
1910 def clone(self, remote, heads=[], stream=False):
1934 '''clone remote repository.
1911 '''clone remote repository.
1935
1912
1936 keyword arguments:
1913 keyword arguments:
1937 heads: list of revs to clone (forces use of pull)
1914 heads: list of revs to clone (forces use of pull)
1938 stream: use streaming clone if possible'''
1915 stream: use streaming clone if possible'''
1939
1916
1940 # now, all clients that can request uncompressed clones can
1917 # now, all clients that can request uncompressed clones can
1941 # read repo formats supported by all servers that can serve
1918 # read repo formats supported by all servers that can serve
1942 # them.
1919 # them.
1943
1920
1944 # if revlog format changes, client will have to check version
1921 # if revlog format changes, client will have to check version
1945 # and format flags on "stream" capability, and use
1922 # and format flags on "stream" capability, and use
1946 # uncompressed only if compatible.
1923 # uncompressed only if compatible.
1947
1924
1948 if stream and not heads and remote.capable('stream'):
1925 if stream and not heads and remote.capable('stream'):
1949 return self.stream_in(remote)
1926 return self.stream_in(remote)
1950 return self.pull(remote, heads)
1927 return self.pull(remote, heads)
1951
1928
1952 # used to avoid circular references so destructors work
1929 # used to avoid circular references so destructors work
1953 def aftertrans(files):
1930 def aftertrans(files):
1954 renamefiles = [tuple(t) for t in files]
1931 renamefiles = [tuple(t) for t in files]
1955 def a():
1932 def a():
1956 for src, dest in renamefiles:
1933 for src, dest in renamefiles:
1957 util.rename(src, dest)
1934 util.rename(src, dest)
1958 return a
1935 return a
1959
1936
1960 def instance(ui, path, create):
1937 def instance(ui, path, create):
1961 return localrepository(ui, util.drop_scheme('file', path), create)
1938 return localrepository(ui, util.drop_scheme('file', path), create)
1962
1939
1963 def islocal(path):
1940 def islocal(path):
1964 return True
1941 return True
@@ -1,215 +1,215 b''
1 # manifest.py - manifest revision class for mercurial
1 # manifest.py - manifest revision class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from revlog import *
8 from revlog import *
9 from i18n import _
9 from i18n import _
10 import array, bisect, struct, mdiff
10 import array, bisect, struct, mdiff
11
11
12 class manifestdict(dict):
12 class manifestdict(dict):
13 def __init__(self, mapping=None, flags=None):
13 def __init__(self, mapping=None, flags=None):
14 if mapping is None: mapping = {}
14 if mapping is None: mapping = {}
15 if flags is None: flags = {}
15 if flags is None: flags = {}
16 dict.__init__(self, mapping)
16 dict.__init__(self, mapping)
17 self._flags = flags
17 self._flags = flags
18 def flags(self, f):
18 def flags(self, f):
19 return self._flags.get(f, "")
19 return self._flags.get(f, "")
20 def execf(self, f):
20 def execf(self, f):
21 "test for executable in manifest flags"
21 "test for executable in manifest flags"
22 return "x" in self.flags(f)
22 return "x" in self.flags(f)
23 def linkf(self, f):
23 def linkf(self, f):
24 "test for symlink in manifest flags"
24 "test for symlink in manifest flags"
25 return "l" in self.flags(f)
25 return "l" in self.flags(f)
26 def rawset(self, f, entry):
26 def rawset(self, f, entry):
27 self[f] = bin(entry[:40])
27 self[f] = bin(entry[:40])
28 fl = entry[40:-1]
28 fl = entry[40:-1]
29 if fl: self._flags[f] = fl
29 if fl: self._flags[f] = fl
30 def set(self, f, execf=False, linkf=False):
30 def set(self, f, execf=False, linkf=False):
31 if linkf: self._flags[f] = "l"
31 if linkf: self._flags[f] = "l"
32 elif execf: self._flags[f] = "x"
32 elif execf: self._flags[f] = "x"
33 else: self._flags[f] = ""
33 else: self._flags[f] = ""
34 def copy(self):
34 def copy(self):
35 return manifestdict(dict.copy(self), dict.copy(self._flags))
35 return manifestdict(dict.copy(self), dict.copy(self._flags))
36
36
37 class manifest(revlog):
37 class manifest(revlog):
38 def __init__(self, opener, defversion=REVLOGV0):
38 def __init__(self, opener):
39 self.mapcache = None
39 self.mapcache = None
40 self.listcache = None
40 self.listcache = None
41 revlog.__init__(self, opener, "00manifest.i", defversion)
41 revlog.__init__(self, opener, "00manifest.i")
42
42
43 def parselines(self, lines):
43 def parselines(self, lines):
44 for l in lines.splitlines(1):
44 for l in lines.splitlines(1):
45 yield l.split('\0')
45 yield l.split('\0')
46
46
47 def readdelta(self, node):
47 def readdelta(self, node):
48 delta = mdiff.patchtext(self.delta(node))
48 delta = mdiff.patchtext(self.delta(node))
49 deltamap = manifestdict()
49 deltamap = manifestdict()
50 for f, n in self.parselines(delta):
50 for f, n in self.parselines(delta):
51 deltamap.rawset(f, n)
51 deltamap.rawset(f, n)
52 return deltamap
52 return deltamap
53
53
54 def read(self, node):
54 def read(self, node):
55 if node == nullid: return manifestdict() # don't upset local cache
55 if node == nullid: return manifestdict() # don't upset local cache
56 if self.mapcache and self.mapcache[0] == node:
56 if self.mapcache and self.mapcache[0] == node:
57 return self.mapcache[1]
57 return self.mapcache[1]
58 text = self.revision(node)
58 text = self.revision(node)
59 self.listcache = array.array('c', text)
59 self.listcache = array.array('c', text)
60 mapping = manifestdict()
60 mapping = manifestdict()
61 for f, n in self.parselines(text):
61 for f, n in self.parselines(text):
62 mapping.rawset(f, n)
62 mapping.rawset(f, n)
63 self.mapcache = (node, mapping)
63 self.mapcache = (node, mapping)
64 return mapping
64 return mapping
65
65
66 def _search(self, m, s, lo=0, hi=None):
66 def _search(self, m, s, lo=0, hi=None):
67 '''return a tuple (start, end) that says where to find s within m.
67 '''return a tuple (start, end) that says where to find s within m.
68
68
69 If the string is found m[start:end] are the line containing
69 If the string is found m[start:end] are the line containing
70 that string. If start == end the string was not found and
70 that string. If start == end the string was not found and
71 they indicate the proper sorted insertion point. This was
71 they indicate the proper sorted insertion point. This was
72 taken from bisect_left, and modified to find line start/end as
72 taken from bisect_left, and modified to find line start/end as
73 it goes along.
73 it goes along.
74
74
75 m should be a buffer or a string
75 m should be a buffer or a string
76 s is a string'''
76 s is a string'''
77 def advance(i, c):
77 def advance(i, c):
78 while i < lenm and m[i] != c:
78 while i < lenm and m[i] != c:
79 i += 1
79 i += 1
80 return i
80 return i
81 lenm = len(m)
81 lenm = len(m)
82 if not hi:
82 if not hi:
83 hi = lenm
83 hi = lenm
84 while lo < hi:
84 while lo < hi:
85 mid = (lo + hi) // 2
85 mid = (lo + hi) // 2
86 start = mid
86 start = mid
87 while start > 0 and m[start-1] != '\n':
87 while start > 0 and m[start-1] != '\n':
88 start -= 1
88 start -= 1
89 end = advance(start, '\0')
89 end = advance(start, '\0')
90 if m[start:end] < s:
90 if m[start:end] < s:
91 # we know that after the null there are 40 bytes of sha1
91 # we know that after the null there are 40 bytes of sha1
92 # this translates to the bisect lo = mid + 1
92 # this translates to the bisect lo = mid + 1
93 lo = advance(end + 40, '\n') + 1
93 lo = advance(end + 40, '\n') + 1
94 else:
94 else:
95 # this translates to the bisect hi = mid
95 # this translates to the bisect hi = mid
96 hi = start
96 hi = start
97 end = advance(lo, '\0')
97 end = advance(lo, '\0')
98 found = m[lo:end]
98 found = m[lo:end]
99 if cmp(s, found) == 0:
99 if cmp(s, found) == 0:
100 # we know that after the null there are 40 bytes of sha1
100 # we know that after the null there are 40 bytes of sha1
101 end = advance(end + 40, '\n')
101 end = advance(end + 40, '\n')
102 return (lo, end+1)
102 return (lo, end+1)
103 else:
103 else:
104 return (lo, lo)
104 return (lo, lo)
105
105
106 def find(self, node, f):
106 def find(self, node, f):
107 '''look up entry for a single file efficiently.
107 '''look up entry for a single file efficiently.
108 return (node, flags) pair if found, (None, None) if not.'''
108 return (node, flags) pair if found, (None, None) if not.'''
109 if self.mapcache and node == self.mapcache[0]:
109 if self.mapcache and node == self.mapcache[0]:
110 return self.mapcache[1].get(f), self.mapcache[1].flags(f)
110 return self.mapcache[1].get(f), self.mapcache[1].flags(f)
111 text = self.revision(node)
111 text = self.revision(node)
112 start, end = self._search(text, f)
112 start, end = self._search(text, f)
113 if start == end:
113 if start == end:
114 return None, None
114 return None, None
115 l = text[start:end]
115 l = text[start:end]
116 f, n = l.split('\0')
116 f, n = l.split('\0')
117 return bin(n[:40]), n[40:-1]
117 return bin(n[:40]), n[40:-1]
118
118
119 def add(self, map, transaction, link, p1=None, p2=None,
119 def add(self, map, transaction, link, p1=None, p2=None,
120 changed=None):
120 changed=None):
121 # apply the changes collected during the bisect loop to our addlist
121 # apply the changes collected during the bisect loop to our addlist
122 # return a delta suitable for addrevision
122 # return a delta suitable for addrevision
123 def addlistdelta(addlist, x):
123 def addlistdelta(addlist, x):
124 # start from the bottom up
124 # start from the bottom up
125 # so changes to the offsets don't mess things up.
125 # so changes to the offsets don't mess things up.
126 i = len(x)
126 i = len(x)
127 while i > 0:
127 while i > 0:
128 i -= 1
128 i -= 1
129 start = x[i][0]
129 start = x[i][0]
130 end = x[i][1]
130 end = x[i][1]
131 if x[i][2]:
131 if x[i][2]:
132 addlist[start:end] = array.array('c', x[i][2])
132 addlist[start:end] = array.array('c', x[i][2])
133 else:
133 else:
134 del addlist[start:end]
134 del addlist[start:end]
135 return "".join([struct.pack(">lll", d[0], d[1], len(d[2])) + d[2] \
135 return "".join([struct.pack(">lll", d[0], d[1], len(d[2])) + d[2] \
136 for d in x ])
136 for d in x ])
137
137
138 def checkforbidden(f):
138 def checkforbidden(f):
139 if '\n' in f or '\r' in f:
139 if '\n' in f or '\r' in f:
140 raise RevlogError(_("'\\n' and '\\r' disallowed in filenames"))
140 raise RevlogError(_("'\\n' and '\\r' disallowed in filenames"))
141
141
142 # if we're using the listcache, make sure it is valid and
142 # if we're using the listcache, make sure it is valid and
143 # parented by the same node we're diffing against
143 # parented by the same node we're diffing against
144 if not changed or not self.listcache or not p1 or \
144 if not changed or not self.listcache or not p1 or \
145 self.mapcache[0] != p1:
145 self.mapcache[0] != p1:
146 files = map.keys()
146 files = map.keys()
147 files.sort()
147 files.sort()
148
148
149 for f in files:
149 for f in files:
150 checkforbidden(f)
150 checkforbidden(f)
151
151
152 # if this is changed to support newlines in filenames,
152 # if this is changed to support newlines in filenames,
153 # be sure to check the templates/ dir again (especially *-raw.tmpl)
153 # be sure to check the templates/ dir again (especially *-raw.tmpl)
154 text = ["%s\000%s%s\n" % (f, hex(map[f]), map.flags(f)) for f in files]
154 text = ["%s\000%s%s\n" % (f, hex(map[f]), map.flags(f)) for f in files]
155 self.listcache = array.array('c', "".join(text))
155 self.listcache = array.array('c', "".join(text))
156 cachedelta = None
156 cachedelta = None
157 else:
157 else:
158 addlist = self.listcache
158 addlist = self.listcache
159
159
160 for f in changed[0]:
160 for f in changed[0]:
161 checkforbidden(f)
161 checkforbidden(f)
162 # combine the changed lists into one list for sorting
162 # combine the changed lists into one list for sorting
163 work = [[x, 0] for x in changed[0]]
163 work = [[x, 0] for x in changed[0]]
164 work[len(work):] = [[x, 1] for x in changed[1]]
164 work[len(work):] = [[x, 1] for x in changed[1]]
165 work.sort()
165 work.sort()
166
166
167 delta = []
167 delta = []
168 dstart = None
168 dstart = None
169 dend = None
169 dend = None
170 dline = [""]
170 dline = [""]
171 start = 0
171 start = 0
172 # zero copy representation of addlist as a buffer
172 # zero copy representation of addlist as a buffer
173 addbuf = buffer(addlist)
173 addbuf = buffer(addlist)
174
174
175 # start with a readonly loop that finds the offset of
175 # start with a readonly loop that finds the offset of
176 # each line and creates the deltas
176 # each line and creates the deltas
177 for w in work:
177 for w in work:
178 f = w[0]
178 f = w[0]
179 # bs will either be the index of the item or the insert point
179 # bs will either be the index of the item or the insert point
180 start, end = self._search(addbuf, f, start)
180 start, end = self._search(addbuf, f, start)
181 if w[1] == 0:
181 if w[1] == 0:
182 l = "%s\000%s%s\n" % (f, hex(map[f]), map.flags(f))
182 l = "%s\000%s%s\n" % (f, hex(map[f]), map.flags(f))
183 else:
183 else:
184 l = ""
184 l = ""
185 if start == end and w[1] == 1:
185 if start == end and w[1] == 1:
186 # item we want to delete was not found, error out
186 # item we want to delete was not found, error out
187 raise AssertionError(
187 raise AssertionError(
188 _("failed to remove %s from manifest") % f)
188 _("failed to remove %s from manifest") % f)
189 if dstart != None and dstart <= start and dend >= start:
189 if dstart != None and dstart <= start and dend >= start:
190 if dend < end:
190 if dend < end:
191 dend = end
191 dend = end
192 if l:
192 if l:
193 dline.append(l)
193 dline.append(l)
194 else:
194 else:
195 if dstart != None:
195 if dstart != None:
196 delta.append([dstart, dend, "".join(dline)])
196 delta.append([dstart, dend, "".join(dline)])
197 dstart = start
197 dstart = start
198 dend = end
198 dend = end
199 dline = [l]
199 dline = [l]
200
200
201 if dstart != None:
201 if dstart != None:
202 delta.append([dstart, dend, "".join(dline)])
202 delta.append([dstart, dend, "".join(dline)])
203 # apply the delta to the addlist, and get a delta for addrevision
203 # apply the delta to the addlist, and get a delta for addrevision
204 cachedelta = addlistdelta(addlist, delta)
204 cachedelta = addlistdelta(addlist, delta)
205
205
206 # the delta is only valid if we've been processing the tip revision
206 # the delta is only valid if we've been processing the tip revision
207 if self.mapcache[0] != self.tip():
207 if self.mapcache[0] != self.tip():
208 cachedelta = None
208 cachedelta = None
209 self.listcache = addlist
209 self.listcache = addlist
210
210
211 n = self.addrevision(buffer(self.listcache), transaction, link, p1, \
211 n = self.addrevision(buffer(self.listcache), transaction, link, p1, \
212 p2, cachedelta)
212 p2, cachedelta)
213 self.mapcache = (n, map)
213 self.mapcache = (n, map)
214
214
215 return n
215 return n
@@ -1,1297 +1,1299 b''
1 """
1 """
2 revlog.py - storage back-end for mercurial
2 revlog.py - storage back-end for mercurial
3
3
4 This provides efficient delta storage with O(1) retrieve and append
4 This provides efficient delta storage with O(1) retrieve and append
5 and O(changes) merge between branches
5 and O(changes) merge between branches
6
6
7 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
7 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
8
8
9 This software may be used and distributed according to the terms
9 This software may be used and distributed according to the terms
10 of the GNU General Public License, incorporated herein by reference.
10 of the GNU General Public License, incorporated herein by reference.
11 """
11 """
12
12
13 from node import *
13 from node import *
14 from i18n import _
14 from i18n import _
15 import binascii, changegroup, errno, ancestor, mdiff, os
15 import binascii, changegroup, errno, ancestor, mdiff, os
16 import sha, struct, util, zlib
16 import sha, struct, util, zlib
17
17
18 # revlog version strings
18 # revlog version strings
19 REVLOGV0 = 0
19 REVLOGV0 = 0
20 REVLOGNG = 1
20 REVLOGNG = 1
21
21
22 # revlog flags
22 # revlog flags
23 REVLOGNGINLINEDATA = (1 << 16)
23 REVLOGNGINLINEDATA = (1 << 16)
24 REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
24 REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
25
25
26 REVLOG_DEFAULT_FORMAT = REVLOGNG
26 REVLOG_DEFAULT_FORMAT = REVLOGNG
27 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
27 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
28
28
29 def flagstr(flag):
29 def flagstr(flag):
30 if flag == "inline":
30 if flag == "inline":
31 return REVLOGNGINLINEDATA
31 return REVLOGNGINLINEDATA
32 raise RevlogError(_("unknown revlog flag %s") % flag)
32 raise RevlogError(_("unknown revlog flag %s") % flag)
33
33
34 def hash(text, p1, p2):
34 def hash(text, p1, p2):
35 """generate a hash from the given text and its parent hashes
35 """generate a hash from the given text and its parent hashes
36
36
37 This hash combines both the current file contents and its history
37 This hash combines both the current file contents and its history
38 in a manner that makes it easy to distinguish nodes with the same
38 in a manner that makes it easy to distinguish nodes with the same
39 content in the revision graph.
39 content in the revision graph.
40 """
40 """
41 l = [p1, p2]
41 l = [p1, p2]
42 l.sort()
42 l.sort()
43 s = sha.new(l[0])
43 s = sha.new(l[0])
44 s.update(l[1])
44 s.update(l[1])
45 s.update(text)
45 s.update(text)
46 return s.digest()
46 return s.digest()
47
47
48 def compress(text):
48 def compress(text):
49 """ generate a possibly-compressed representation of text """
49 """ generate a possibly-compressed representation of text """
50 if not text: return ("", text)
50 if not text: return ("", text)
51 if len(text) < 44:
51 if len(text) < 44:
52 if text[0] == '\0': return ("", text)
52 if text[0] == '\0': return ("", text)
53 return ('u', text)
53 return ('u', text)
54 bin = zlib.compress(text)
54 bin = zlib.compress(text)
55 if len(bin) > len(text):
55 if len(bin) > len(text):
56 if text[0] == '\0': return ("", text)
56 if text[0] == '\0': return ("", text)
57 return ('u', text)
57 return ('u', text)
58 return ("", bin)
58 return ("", bin)
59
59
60 def decompress(bin):
60 def decompress(bin):
61 """ decompress the given input """
61 """ decompress the given input """
62 if not bin: return bin
62 if not bin: return bin
63 t = bin[0]
63 t = bin[0]
64 if t == '\0': return bin
64 if t == '\0': return bin
65 if t == 'x': return zlib.decompress(bin)
65 if t == 'x': return zlib.decompress(bin)
66 if t == 'u': return bin[1:]
66 if t == 'u': return bin[1:]
67 raise RevlogError(_("unknown compression type %r") % t)
67 raise RevlogError(_("unknown compression type %r") % t)
68
68
69 indexformatv0 = ">4l20s20s20s"
69 indexformatv0 = ">4l20s20s20s"
70 v0shaoffset = 56
70 v0shaoffset = 56
71 # index ng:
71 # index ng:
72 # 6 bytes offset
72 # 6 bytes offset
73 # 2 bytes flags
73 # 2 bytes flags
74 # 4 bytes compressed length
74 # 4 bytes compressed length
75 # 4 bytes uncompressed length
75 # 4 bytes uncompressed length
76 # 4 bytes: base rev
76 # 4 bytes: base rev
77 # 4 bytes link rev
77 # 4 bytes link rev
78 # 4 bytes parent 1 rev
78 # 4 bytes parent 1 rev
79 # 4 bytes parent 2 rev
79 # 4 bytes parent 2 rev
80 # 32 bytes: nodeid
80 # 32 bytes: nodeid
81 indexformatng = ">Qiiiiii20s12x"
81 indexformatng = ">Qiiiiii20s12x"
82 ngshaoffset = 32
82 ngshaoffset = 32
83 versionformat = ">I"
83 versionformat = ">I"
84
84
85 class lazyparser(object):
85 class lazyparser(object):
86 """
86 """
87 this class avoids the need to parse the entirety of large indices
87 this class avoids the need to parse the entirety of large indices
88 """
88 """
89
89
90 # lazyparser is not safe to use on windows if win32 extensions not
90 # lazyparser is not safe to use on windows if win32 extensions not
91 # available. it keeps file handle open, which make it not possible
91 # available. it keeps file handle open, which make it not possible
92 # to break hardlinks on local cloned repos.
92 # to break hardlinks on local cloned repos.
93 safe_to_use = os.name != 'nt' or (not util.is_win_9x() and
93 safe_to_use = os.name != 'nt' or (not util.is_win_9x() and
94 hasattr(util, 'win32api'))
94 hasattr(util, 'win32api'))
95
95
96 def __init__(self, dataf, size, indexformat, shaoffset):
96 def __init__(self, dataf, size, indexformat, shaoffset):
97 self.dataf = dataf
97 self.dataf = dataf
98 self.format = indexformat
98 self.format = indexformat
99 self.s = struct.calcsize(indexformat)
99 self.s = struct.calcsize(indexformat)
100 self.indexformat = indexformat
100 self.indexformat = indexformat
101 self.datasize = size
101 self.datasize = size
102 self.l = size/self.s
102 self.l = size/self.s
103 self.index = [None] * self.l
103 self.index = [None] * self.l
104 self.map = {nullid: nullrev}
104 self.map = {nullid: nullrev}
105 self.allmap = 0
105 self.allmap = 0
106 self.all = 0
106 self.all = 0
107 self.mapfind_count = 0
107 self.mapfind_count = 0
108 self.shaoffset = shaoffset
108 self.shaoffset = shaoffset
109
109
110 def loadmap(self):
110 def loadmap(self):
111 """
111 """
112 during a commit, we need to make sure the rev being added is
112 during a commit, we need to make sure the rev being added is
113 not a duplicate. This requires loading the entire index,
113 not a duplicate. This requires loading the entire index,
114 which is fairly slow. loadmap can load up just the node map,
114 which is fairly slow. loadmap can load up just the node map,
115 which takes much less time.
115 which takes much less time.
116 """
116 """
117 if self.allmap: return
117 if self.allmap: return
118 end = self.datasize
118 end = self.datasize
119 self.allmap = 1
119 self.allmap = 1
120 cur = 0
120 cur = 0
121 count = 0
121 count = 0
122 blocksize = self.s * 256
122 blocksize = self.s * 256
123 self.dataf.seek(0)
123 self.dataf.seek(0)
124 while cur < end:
124 while cur < end:
125 data = self.dataf.read(blocksize)
125 data = self.dataf.read(blocksize)
126 off = 0
126 off = 0
127 for x in xrange(256):
127 for x in xrange(256):
128 n = data[off + self.shaoffset:off + self.shaoffset + 20]
128 n = data[off + self.shaoffset:off + self.shaoffset + 20]
129 self.map[n] = count
129 self.map[n] = count
130 count += 1
130 count += 1
131 if count >= self.l:
131 if count >= self.l:
132 break
132 break
133 off += self.s
133 off += self.s
134 cur += blocksize
134 cur += blocksize
135
135
136 def loadblock(self, blockstart, blocksize, data=None):
136 def loadblock(self, blockstart, blocksize, data=None):
137 if self.all: return
137 if self.all: return
138 if data is None:
138 if data is None:
139 self.dataf.seek(blockstart)
139 self.dataf.seek(blockstart)
140 if blockstart + blocksize > self.datasize:
140 if blockstart + blocksize > self.datasize:
141 # the revlog may have grown since we've started running,
141 # the revlog may have grown since we've started running,
142 # but we don't have space in self.index for more entries.
142 # but we don't have space in self.index for more entries.
143 # limit blocksize so that we don't get too much data.
143 # limit blocksize so that we don't get too much data.
144 blocksize = max(self.datasize - blockstart, 0)
144 blocksize = max(self.datasize - blockstart, 0)
145 data = self.dataf.read(blocksize)
145 data = self.dataf.read(blocksize)
146 lend = len(data) / self.s
146 lend = len(data) / self.s
147 i = blockstart / self.s
147 i = blockstart / self.s
148 off = 0
148 off = 0
149 # lazyindex supports __delitem__
149 # lazyindex supports __delitem__
150 if lend > len(self.index) - i:
150 if lend > len(self.index) - i:
151 lend = len(self.index) - i
151 lend = len(self.index) - i
152 for x in xrange(lend):
152 for x in xrange(lend):
153 if self.index[i + x] == None:
153 if self.index[i + x] == None:
154 b = data[off : off + self.s]
154 b = data[off : off + self.s]
155 self.index[i + x] = b
155 self.index[i + x] = b
156 n = b[self.shaoffset:self.shaoffset + 20]
156 n = b[self.shaoffset:self.shaoffset + 20]
157 self.map[n] = i + x
157 self.map[n] = i + x
158 off += self.s
158 off += self.s
159
159
160 def findnode(self, node):
160 def findnode(self, node):
161 """search backwards through the index file for a specific node"""
161 """search backwards through the index file for a specific node"""
162 if self.allmap: return None
162 if self.allmap: return None
163
163
164 # hg log will cause many many searches for the manifest
164 # hg log will cause many many searches for the manifest
165 # nodes. After we get called a few times, just load the whole
165 # nodes. After we get called a few times, just load the whole
166 # thing.
166 # thing.
167 if self.mapfind_count > 8:
167 if self.mapfind_count > 8:
168 self.loadmap()
168 self.loadmap()
169 if node in self.map:
169 if node in self.map:
170 return node
170 return node
171 return None
171 return None
172 self.mapfind_count += 1
172 self.mapfind_count += 1
173 last = self.l - 1
173 last = self.l - 1
174 while self.index[last] != None:
174 while self.index[last] != None:
175 if last == 0:
175 if last == 0:
176 self.all = 1
176 self.all = 1
177 self.allmap = 1
177 self.allmap = 1
178 return None
178 return None
179 last -= 1
179 last -= 1
180 end = (last + 1) * self.s
180 end = (last + 1) * self.s
181 blocksize = self.s * 256
181 blocksize = self.s * 256
182 while end >= 0:
182 while end >= 0:
183 start = max(end - blocksize, 0)
183 start = max(end - blocksize, 0)
184 self.dataf.seek(start)
184 self.dataf.seek(start)
185 data = self.dataf.read(end - start)
185 data = self.dataf.read(end - start)
186 findend = end - start
186 findend = end - start
187 while True:
187 while True:
188 # we're searching backwards, so weh have to make sure
188 # we're searching backwards, so weh have to make sure
189 # we don't find a changeset where this node is a parent
189 # we don't find a changeset where this node is a parent
190 off = data.rfind(node, 0, findend)
190 off = data.rfind(node, 0, findend)
191 findend = off
191 findend = off
192 if off >= 0:
192 if off >= 0:
193 i = off / self.s
193 i = off / self.s
194 off = i * self.s
194 off = i * self.s
195 n = data[off + self.shaoffset:off + self.shaoffset + 20]
195 n = data[off + self.shaoffset:off + self.shaoffset + 20]
196 if n == node:
196 if n == node:
197 self.map[n] = i + start / self.s
197 self.map[n] = i + start / self.s
198 return node
198 return node
199 else:
199 else:
200 break
200 break
201 end -= blocksize
201 end -= blocksize
202 return None
202 return None
203
203
204 def loadindex(self, i=None, end=None):
204 def loadindex(self, i=None, end=None):
205 if self.all: return
205 if self.all: return
206 all = False
206 all = False
207 if i == None:
207 if i == None:
208 blockstart = 0
208 blockstart = 0
209 blocksize = (512 / self.s) * self.s
209 blocksize = (512 / self.s) * self.s
210 end = self.datasize
210 end = self.datasize
211 all = True
211 all = True
212 else:
212 else:
213 if end:
213 if end:
214 blockstart = i * self.s
214 blockstart = i * self.s
215 end = end * self.s
215 end = end * self.s
216 blocksize = end - blockstart
216 blocksize = end - blockstart
217 else:
217 else:
218 blockstart = (i & ~(32)) * self.s
218 blockstart = (i & ~(32)) * self.s
219 blocksize = self.s * 64
219 blocksize = self.s * 64
220 end = blockstart + blocksize
220 end = blockstart + blocksize
221 while blockstart < end:
221 while blockstart < end:
222 self.loadblock(blockstart, blocksize)
222 self.loadblock(blockstart, blocksize)
223 blockstart += blocksize
223 blockstart += blocksize
224 if all: self.all = True
224 if all: self.all = True
225
225
226 class lazyindex(object):
226 class lazyindex(object):
227 """a lazy version of the index array"""
227 """a lazy version of the index array"""
228 def __init__(self, parser):
228 def __init__(self, parser):
229 self.p = parser
229 self.p = parser
230 def __len__(self):
230 def __len__(self):
231 return len(self.p.index)
231 return len(self.p.index)
232 def load(self, pos):
232 def load(self, pos):
233 if pos < 0:
233 if pos < 0:
234 pos += len(self.p.index)
234 pos += len(self.p.index)
235 self.p.loadindex(pos)
235 self.p.loadindex(pos)
236 return self.p.index[pos]
236 return self.p.index[pos]
237 def __getitem__(self, pos):
237 def __getitem__(self, pos):
238 ret = self.p.index[pos] or self.load(pos)
238 ret = self.p.index[pos] or self.load(pos)
239 if isinstance(ret, str):
239 if isinstance(ret, str):
240 ret = struct.unpack(self.p.indexformat, ret)
240 ret = struct.unpack(self.p.indexformat, ret)
241 return ret
241 return ret
242 def __setitem__(self, pos, item):
242 def __setitem__(self, pos, item):
243 self.p.index[pos] = item
243 self.p.index[pos] = item
244 def __delitem__(self, pos):
244 def __delitem__(self, pos):
245 del self.p.index[pos]
245 del self.p.index[pos]
246 def append(self, e):
246 def append(self, e):
247 self.p.index.append(e)
247 self.p.index.append(e)
248
248
249 class lazymap(object):
249 class lazymap(object):
250 """a lazy version of the node map"""
250 """a lazy version of the node map"""
251 def __init__(self, parser):
251 def __init__(self, parser):
252 self.p = parser
252 self.p = parser
253 def load(self, key):
253 def load(self, key):
254 n = self.p.findnode(key)
254 n = self.p.findnode(key)
255 if n == None:
255 if n == None:
256 raise KeyError(key)
256 raise KeyError(key)
257 def __contains__(self, key):
257 def __contains__(self, key):
258 if key in self.p.map:
258 if key in self.p.map:
259 return True
259 return True
260 self.p.loadmap()
260 self.p.loadmap()
261 return key in self.p.map
261 return key in self.p.map
262 def __iter__(self):
262 def __iter__(self):
263 yield nullid
263 yield nullid
264 for i in xrange(self.p.l):
264 for i in xrange(self.p.l):
265 ret = self.p.index[i]
265 ret = self.p.index[i]
266 if not ret:
266 if not ret:
267 self.p.loadindex(i)
267 self.p.loadindex(i)
268 ret = self.p.index[i]
268 ret = self.p.index[i]
269 if isinstance(ret, str):
269 if isinstance(ret, str):
270 ret = struct.unpack(self.p.indexformat, ret)
270 ret = struct.unpack(self.p.indexformat, ret)
271 yield ret[-1]
271 yield ret[-1]
272 def __getitem__(self, key):
272 def __getitem__(self, key):
273 try:
273 try:
274 return self.p.map[key]
274 return self.p.map[key]
275 except KeyError:
275 except KeyError:
276 try:
276 try:
277 self.load(key)
277 self.load(key)
278 return self.p.map[key]
278 return self.p.map[key]
279 except KeyError:
279 except KeyError:
280 raise KeyError("node " + hex(key))
280 raise KeyError("node " + hex(key))
281 def __setitem__(self, key, val):
281 def __setitem__(self, key, val):
282 self.p.map[key] = val
282 self.p.map[key] = val
283 def __delitem__(self, key):
283 def __delitem__(self, key):
284 del self.p.map[key]
284 del self.p.map[key]
285
285
286 class RevlogError(Exception): pass
286 class RevlogError(Exception): pass
287 class LookupError(RevlogError): pass
287 class LookupError(RevlogError): pass
288
288
289 class revlog(object):
289 class revlog(object):
290 """
290 """
291 the underlying revision storage object
291 the underlying revision storage object
292
292
293 A revlog consists of two parts, an index and the revision data.
293 A revlog consists of two parts, an index and the revision data.
294
294
295 The index is a file with a fixed record size containing
295 The index is a file with a fixed record size containing
296 information on each revision, includings its nodeid (hash), the
296 information on each revision, includings its nodeid (hash), the
297 nodeids of its parents, the position and offset of its data within
297 nodeids of its parents, the position and offset of its data within
298 the data file, and the revision it's based on. Finally, each entry
298 the data file, and the revision it's based on. Finally, each entry
299 contains a linkrev entry that can serve as a pointer to external
299 contains a linkrev entry that can serve as a pointer to external
300 data.
300 data.
301
301
302 The revision data itself is a linear collection of data chunks.
302 The revision data itself is a linear collection of data chunks.
303 Each chunk represents a revision and is usually represented as a
303 Each chunk represents a revision and is usually represented as a
304 delta against the previous chunk. To bound lookup time, runs of
304 delta against the previous chunk. To bound lookup time, runs of
305 deltas are limited to about 2 times the length of the original
305 deltas are limited to about 2 times the length of the original
306 version data. This makes retrieval of a version proportional to
306 version data. This makes retrieval of a version proportional to
307 its size, or O(1) relative to the number of revisions.
307 its size, or O(1) relative to the number of revisions.
308
308
309 Both pieces of the revlog are written to in an append-only
309 Both pieces of the revlog are written to in an append-only
310 fashion, which means we never need to rewrite a file to insert or
310 fashion, which means we never need to rewrite a file to insert or
311 remove data, and can use some simple techniques to avoid the need
311 remove data, and can use some simple techniques to avoid the need
312 for locking while reading.
312 for locking while reading.
313 """
313 """
314 def __init__(self, opener, indexfile, defversion=REVLOG_DEFAULT_VERSION):
314 def __init__(self, opener, indexfile):
315 """
315 """
316 create a revlog object
316 create a revlog object
317
317
318 opener is a function that abstracts the file opening operation
318 opener is a function that abstracts the file opening operation
319 and can be used to implement COW semantics or the like.
319 and can be used to implement COW semantics or the like.
320 """
320 """
321 self.indexfile = indexfile
321 self.indexfile = indexfile
322 self.datafile = indexfile[:-2] + ".d"
322 self.datafile = indexfile[:-2] + ".d"
323 self.opener = opener
323 self.opener = opener
324
324
325 self.indexstat = None
325 self.indexstat = None
326 self.cache = None
326 self.cache = None
327 self.chunkcache = None
327 self.chunkcache = None
328 self.defversion = defversion
328 self.defversion=REVLOG_DEFAULT_VERSION
329 if hasattr(opener, "defversion"):
330 self.defversion = opener.defversion
329 self.load()
331 self.load()
330
332
331 def load(self):
333 def load(self):
332 v = self.defversion
334 v = self.defversion
333 try:
335 try:
334 f = self.opener(self.indexfile)
336 f = self.opener(self.indexfile)
335 i = f.read(4)
337 i = f.read(4)
336 f.seek(0)
338 f.seek(0)
337 except IOError, inst:
339 except IOError, inst:
338 if inst.errno != errno.ENOENT:
340 if inst.errno != errno.ENOENT:
339 raise
341 raise
340 i = ""
342 i = ""
341 else:
343 else:
342 try:
344 try:
343 st = util.fstat(f)
345 st = util.fstat(f)
344 except AttributeError, inst:
346 except AttributeError, inst:
345 st = None
347 st = None
346 else:
348 else:
347 oldst = self.indexstat
349 oldst = self.indexstat
348 if (oldst and st.st_dev == oldst.st_dev
350 if (oldst and st.st_dev == oldst.st_dev
349 and st.st_ino == oldst.st_ino
351 and st.st_ino == oldst.st_ino
350 and st.st_mtime == oldst.st_mtime
352 and st.st_mtime == oldst.st_mtime
351 and st.st_ctime == oldst.st_ctime):
353 and st.st_ctime == oldst.st_ctime):
352 return
354 return
353 self.indexstat = st
355 self.indexstat = st
354 if len(i) > 0:
356 if len(i) > 0:
355 v = struct.unpack(versionformat, i)[0]
357 v = struct.unpack(versionformat, i)[0]
356 flags = v & ~0xFFFF
358 flags = v & ~0xFFFF
357 fmt = v & 0xFFFF
359 fmt = v & 0xFFFF
358 if fmt == REVLOGV0:
360 if fmt == REVLOGV0:
359 if flags:
361 if flags:
360 raise RevlogError(_("index %s unknown flags %#04x for format v0")
362 raise RevlogError(_("index %s unknown flags %#04x for format v0")
361 % (self.indexfile, flags >> 16))
363 % (self.indexfile, flags >> 16))
362 elif fmt == REVLOGNG:
364 elif fmt == REVLOGNG:
363 if flags & ~REVLOGNGINLINEDATA:
365 if flags & ~REVLOGNGINLINEDATA:
364 raise RevlogError(_("index %s unknown flags %#04x for revlogng")
366 raise RevlogError(_("index %s unknown flags %#04x for revlogng")
365 % (self.indexfile, flags >> 16))
367 % (self.indexfile, flags >> 16))
366 else:
368 else:
367 raise RevlogError(_("index %s unknown format %d")
369 raise RevlogError(_("index %s unknown format %d")
368 % (self.indexfile, fmt))
370 % (self.indexfile, fmt))
369 self.version = v
371 self.version = v
370 if v == REVLOGV0:
372 if v == REVLOGV0:
371 self.indexformat = indexformatv0
373 self.indexformat = indexformatv0
372 shaoffset = v0shaoffset
374 shaoffset = v0shaoffset
373 else:
375 else:
374 self.indexformat = indexformatng
376 self.indexformat = indexformatng
375 shaoffset = ngshaoffset
377 shaoffset = ngshaoffset
376
378
377 if i:
379 if i:
378 if (lazyparser.safe_to_use and not self.inlinedata() and
380 if (lazyparser.safe_to_use and not self.inlinedata() and
379 st and st.st_size > 10000):
381 st and st.st_size > 10000):
380 # big index, let's parse it on demand
382 # big index, let's parse it on demand
381 parser = lazyparser(f, st.st_size, self.indexformat, shaoffset)
383 parser = lazyparser(f, st.st_size, self.indexformat, shaoffset)
382 self.index = lazyindex(parser)
384 self.index = lazyindex(parser)
383 self.nodemap = lazymap(parser)
385 self.nodemap = lazymap(parser)
384 else:
386 else:
385 self.parseindex(f, st)
387 self.parseindex(f, st)
386 if self.version != REVLOGV0:
388 if self.version != REVLOGV0:
387 e = list(self.index[0])
389 e = list(self.index[0])
388 type = self.ngtype(e[0])
390 type = self.ngtype(e[0])
389 e[0] = self.offset_type(0, type)
391 e[0] = self.offset_type(0, type)
390 self.index[0] = e
392 self.index[0] = e
391 else:
393 else:
392 self.nodemap = {nullid: nullrev}
394 self.nodemap = {nullid: nullrev}
393 self.index = []
395 self.index = []
394
396
395
397
396 def parseindex(self, fp, st):
398 def parseindex(self, fp, st):
397 s = struct.calcsize(self.indexformat)
399 s = struct.calcsize(self.indexformat)
398 self.index = []
400 self.index = []
399 self.nodemap = {nullid: nullrev}
401 self.nodemap = {nullid: nullrev}
400 inline = self.inlinedata()
402 inline = self.inlinedata()
401 n = 0
403 n = 0
402 leftover = None
404 leftover = None
403 while True:
405 while True:
404 if st:
406 if st:
405 data = fp.read(65536)
407 data = fp.read(65536)
406 else:
408 else:
407 # hack for httprangereader, it doesn't do partial reads well
409 # hack for httprangereader, it doesn't do partial reads well
408 data = fp.read()
410 data = fp.read()
409 if not data:
411 if not data:
410 break
412 break
411 if n == 0 and self.inlinedata():
413 if n == 0 and self.inlinedata():
412 # cache the first chunk
414 # cache the first chunk
413 self.chunkcache = (0, data)
415 self.chunkcache = (0, data)
414 if leftover:
416 if leftover:
415 data = leftover + data
417 data = leftover + data
416 leftover = None
418 leftover = None
417 off = 0
419 off = 0
418 l = len(data)
420 l = len(data)
419 while off < l:
421 while off < l:
420 if l - off < s:
422 if l - off < s:
421 leftover = data[off:]
423 leftover = data[off:]
422 break
424 break
423 cur = data[off:off + s]
425 cur = data[off:off + s]
424 off += s
426 off += s
425 e = struct.unpack(self.indexformat, cur)
427 e = struct.unpack(self.indexformat, cur)
426 self.index.append(e)
428 self.index.append(e)
427 self.nodemap[e[-1]] = n
429 self.nodemap[e[-1]] = n
428 n += 1
430 n += 1
429 if inline:
431 if inline:
430 if e[1] < 0:
432 if e[1] < 0:
431 break
433 break
432 off += e[1]
434 off += e[1]
433 if off > l:
435 if off > l:
434 # some things don't seek well, just read it
436 # some things don't seek well, just read it
435 fp.read(off - l)
437 fp.read(off - l)
436 break
438 break
437 if not st:
439 if not st:
438 break
440 break
439
441
440
442
441 def ngoffset(self, q):
443 def ngoffset(self, q):
442 if q & 0xFFFF:
444 if q & 0xFFFF:
443 raise RevlogError(_('%s: incompatible revision flag %x') %
445 raise RevlogError(_('%s: incompatible revision flag %x') %
444 (self.indexfile, q))
446 (self.indexfile, q))
445 return long(q >> 16)
447 return long(q >> 16)
446
448
447 def ngtype(self, q):
449 def ngtype(self, q):
448 return int(q & 0xFFFF)
450 return int(q & 0xFFFF)
449
451
450 def offset_type(self, offset, type):
452 def offset_type(self, offset, type):
451 return long(long(offset) << 16 | type)
453 return long(long(offset) << 16 | type)
452
454
453 def loadindex(self, start, end):
455 def loadindex(self, start, end):
454 """load a block of indexes all at once from the lazy parser"""
456 """load a block of indexes all at once from the lazy parser"""
455 if isinstance(self.index, lazyindex):
457 if isinstance(self.index, lazyindex):
456 self.index.p.loadindex(start, end)
458 self.index.p.loadindex(start, end)
457
459
458 def loadindexmap(self):
460 def loadindexmap(self):
459 """loads both the map and the index from the lazy parser"""
461 """loads both the map and the index from the lazy parser"""
460 if isinstance(self.index, lazyindex):
462 if isinstance(self.index, lazyindex):
461 p = self.index.p
463 p = self.index.p
462 p.loadindex()
464 p.loadindex()
463 self.nodemap = p.map
465 self.nodemap = p.map
464
466
465 def loadmap(self):
467 def loadmap(self):
466 """loads the map from the lazy parser"""
468 """loads the map from the lazy parser"""
467 if isinstance(self.nodemap, lazymap):
469 if isinstance(self.nodemap, lazymap):
468 self.nodemap.p.loadmap()
470 self.nodemap.p.loadmap()
469 self.nodemap = self.nodemap.p.map
471 self.nodemap = self.nodemap.p.map
470
472
471 def inlinedata(self): return self.version & REVLOGNGINLINEDATA
473 def inlinedata(self): return self.version & REVLOGNGINLINEDATA
472 def tip(self): return self.node(len(self.index) - 1)
474 def tip(self): return self.node(len(self.index) - 1)
473 def count(self): return len(self.index)
475 def count(self): return len(self.index)
474 def node(self, rev):
476 def node(self, rev):
475 return rev == nullrev and nullid or self.index[rev][-1]
477 return rev == nullrev and nullid or self.index[rev][-1]
476 def rev(self, node):
478 def rev(self, node):
477 try:
479 try:
478 return self.nodemap[node]
480 return self.nodemap[node]
479 except KeyError:
481 except KeyError:
480 raise LookupError(_('%s: no node %s') % (self.indexfile, hex(node)))
482 raise LookupError(_('%s: no node %s') % (self.indexfile, hex(node)))
481 def linkrev(self, node):
483 def linkrev(self, node):
482 return (node == nullid) and nullrev or self.index[self.rev(node)][-4]
484 return (node == nullid) and nullrev or self.index[self.rev(node)][-4]
483 def parents(self, node):
485 def parents(self, node):
484 if node == nullid: return (nullid, nullid)
486 if node == nullid: return (nullid, nullid)
485 r = self.rev(node)
487 r = self.rev(node)
486 d = self.index[r][-3:-1]
488 d = self.index[r][-3:-1]
487 if self.version == REVLOGV0:
489 if self.version == REVLOGV0:
488 return d
490 return d
489 return (self.node(d[0]), self.node(d[1]))
491 return (self.node(d[0]), self.node(d[1]))
490 def parentrevs(self, rev):
492 def parentrevs(self, rev):
491 if rev == nullrev:
493 if rev == nullrev:
492 return (nullrev, nullrev)
494 return (nullrev, nullrev)
493 d = self.index[rev][-3:-1]
495 d = self.index[rev][-3:-1]
494 if self.version == REVLOGV0:
496 if self.version == REVLOGV0:
495 return (self.rev(d[0]), self.rev(d[1]))
497 return (self.rev(d[0]), self.rev(d[1]))
496 return d
498 return d
497 def start(self, rev):
499 def start(self, rev):
498 if rev == nullrev:
500 if rev == nullrev:
499 return 0
501 return 0
500 if self.version != REVLOGV0:
502 if self.version != REVLOGV0:
501 return self.ngoffset(self.index[rev][0])
503 return self.ngoffset(self.index[rev][0])
502 return self.index[rev][0]
504 return self.index[rev][0]
503
505
504 def end(self, rev): return self.start(rev) + self.length(rev)
506 def end(self, rev): return self.start(rev) + self.length(rev)
505
507
506 def size(self, rev):
508 def size(self, rev):
507 """return the length of the uncompressed text for a given revision"""
509 """return the length of the uncompressed text for a given revision"""
508 if rev == nullrev:
510 if rev == nullrev:
509 return 0
511 return 0
510 l = -1
512 l = -1
511 if self.version != REVLOGV0:
513 if self.version != REVLOGV0:
512 l = self.index[rev][2]
514 l = self.index[rev][2]
513 if l >= 0:
515 if l >= 0:
514 return l
516 return l
515
517
516 t = self.revision(self.node(rev))
518 t = self.revision(self.node(rev))
517 return len(t)
519 return len(t)
518
520
519 # alternate implementation, The advantage to this code is it
521 # alternate implementation, The advantage to this code is it
520 # will be faster for a single revision. But, the results are not
522 # will be faster for a single revision. But, the results are not
521 # cached, so finding the size of every revision will be slower.
523 # cached, so finding the size of every revision will be slower.
522 """
524 """
523 if self.cache and self.cache[1] == rev:
525 if self.cache and self.cache[1] == rev:
524 return len(self.cache[2])
526 return len(self.cache[2])
525
527
526 base = self.base(rev)
528 base = self.base(rev)
527 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
529 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
528 base = self.cache[1]
530 base = self.cache[1]
529 text = self.cache[2]
531 text = self.cache[2]
530 else:
532 else:
531 text = self.revision(self.node(base))
533 text = self.revision(self.node(base))
532
534
533 l = len(text)
535 l = len(text)
534 for x in xrange(base + 1, rev + 1):
536 for x in xrange(base + 1, rev + 1):
535 l = mdiff.patchedsize(l, self.chunk(x))
537 l = mdiff.patchedsize(l, self.chunk(x))
536 return l
538 return l
537 """
539 """
538
540
539 def length(self, rev):
541 def length(self, rev):
540 if rev == nullrev:
542 if rev == nullrev:
541 return 0
543 return 0
542 else:
544 else:
543 return self.index[rev][1]
545 return self.index[rev][1]
544 def base(self, rev):
546 def base(self, rev):
545 if (rev == nullrev):
547 if (rev == nullrev):
546 return nullrev
548 return nullrev
547 else:
549 else:
548 return self.index[rev][-5]
550 return self.index[rev][-5]
549
551
550 def reachable(self, node, stop=None):
552 def reachable(self, node, stop=None):
551 """return a hash of all nodes ancestral to a given node, including
553 """return a hash of all nodes ancestral to a given node, including
552 the node itself, stopping when stop is matched"""
554 the node itself, stopping when stop is matched"""
553 reachable = {}
555 reachable = {}
554 visit = [node]
556 visit = [node]
555 reachable[node] = 1
557 reachable[node] = 1
556 if stop:
558 if stop:
557 stopn = self.rev(stop)
559 stopn = self.rev(stop)
558 else:
560 else:
559 stopn = 0
561 stopn = 0
560 while visit:
562 while visit:
561 n = visit.pop(0)
563 n = visit.pop(0)
562 if n == stop:
564 if n == stop:
563 continue
565 continue
564 if n == nullid:
566 if n == nullid:
565 continue
567 continue
566 for p in self.parents(n):
568 for p in self.parents(n):
567 if self.rev(p) < stopn:
569 if self.rev(p) < stopn:
568 continue
570 continue
569 if p not in reachable:
571 if p not in reachable:
570 reachable[p] = 1
572 reachable[p] = 1
571 visit.append(p)
573 visit.append(p)
572 return reachable
574 return reachable
573
575
574 def nodesbetween(self, roots=None, heads=None):
576 def nodesbetween(self, roots=None, heads=None):
575 """Return a tuple containing three elements. Elements 1 and 2 contain
577 """Return a tuple containing three elements. Elements 1 and 2 contain
576 a final list bases and heads after all the unreachable ones have been
578 a final list bases and heads after all the unreachable ones have been
577 pruned. Element 0 contains a topologically sorted list of all
579 pruned. Element 0 contains a topologically sorted list of all
578
580
579 nodes that satisfy these constraints:
581 nodes that satisfy these constraints:
580 1. All nodes must be descended from a node in roots (the nodes on
582 1. All nodes must be descended from a node in roots (the nodes on
581 roots are considered descended from themselves).
583 roots are considered descended from themselves).
582 2. All nodes must also be ancestors of a node in heads (the nodes in
584 2. All nodes must also be ancestors of a node in heads (the nodes in
583 heads are considered to be their own ancestors).
585 heads are considered to be their own ancestors).
584
586
585 If roots is unspecified, nullid is assumed as the only root.
587 If roots is unspecified, nullid is assumed as the only root.
586 If heads is unspecified, it is taken to be the output of the
588 If heads is unspecified, it is taken to be the output of the
587 heads method (i.e. a list of all nodes in the repository that
589 heads method (i.e. a list of all nodes in the repository that
588 have no children)."""
590 have no children)."""
589 nonodes = ([], [], [])
591 nonodes = ([], [], [])
590 if roots is not None:
592 if roots is not None:
591 roots = list(roots)
593 roots = list(roots)
592 if not roots:
594 if not roots:
593 return nonodes
595 return nonodes
594 lowestrev = min([self.rev(n) for n in roots])
596 lowestrev = min([self.rev(n) for n in roots])
595 else:
597 else:
596 roots = [nullid] # Everybody's a descendent of nullid
598 roots = [nullid] # Everybody's a descendent of nullid
597 lowestrev = nullrev
599 lowestrev = nullrev
598 if (lowestrev == nullrev) and (heads is None):
600 if (lowestrev == nullrev) and (heads is None):
599 # We want _all_ the nodes!
601 # We want _all_ the nodes!
600 return ([self.node(r) for r in xrange(0, self.count())],
602 return ([self.node(r) for r in xrange(0, self.count())],
601 [nullid], list(self.heads()))
603 [nullid], list(self.heads()))
602 if heads is None:
604 if heads is None:
603 # All nodes are ancestors, so the latest ancestor is the last
605 # All nodes are ancestors, so the latest ancestor is the last
604 # node.
606 # node.
605 highestrev = self.count() - 1
607 highestrev = self.count() - 1
606 # Set ancestors to None to signal that every node is an ancestor.
608 # Set ancestors to None to signal that every node is an ancestor.
607 ancestors = None
609 ancestors = None
608 # Set heads to an empty dictionary for later discovery of heads
610 # Set heads to an empty dictionary for later discovery of heads
609 heads = {}
611 heads = {}
610 else:
612 else:
611 heads = list(heads)
613 heads = list(heads)
612 if not heads:
614 if not heads:
613 return nonodes
615 return nonodes
614 ancestors = {}
616 ancestors = {}
615 # Turn heads into a dictionary so we can remove 'fake' heads.
617 # Turn heads into a dictionary so we can remove 'fake' heads.
616 # Also, later we will be using it to filter out the heads we can't
618 # Also, later we will be using it to filter out the heads we can't
617 # find from roots.
619 # find from roots.
618 heads = dict.fromkeys(heads, 0)
620 heads = dict.fromkeys(heads, 0)
619 # Start at the top and keep marking parents until we're done.
621 # Start at the top and keep marking parents until we're done.
620 nodestotag = heads.keys()
622 nodestotag = heads.keys()
621 # Remember where the top was so we can use it as a limit later.
623 # Remember where the top was so we can use it as a limit later.
622 highestrev = max([self.rev(n) for n in nodestotag])
624 highestrev = max([self.rev(n) for n in nodestotag])
623 while nodestotag:
625 while nodestotag:
624 # grab a node to tag
626 # grab a node to tag
625 n = nodestotag.pop()
627 n = nodestotag.pop()
626 # Never tag nullid
628 # Never tag nullid
627 if n == nullid:
629 if n == nullid:
628 continue
630 continue
629 # A node's revision number represents its place in a
631 # A node's revision number represents its place in a
630 # topologically sorted list of nodes.
632 # topologically sorted list of nodes.
631 r = self.rev(n)
633 r = self.rev(n)
632 if r >= lowestrev:
634 if r >= lowestrev:
633 if n not in ancestors:
635 if n not in ancestors:
634 # If we are possibly a descendent of one of the roots
636 # If we are possibly a descendent of one of the roots
635 # and we haven't already been marked as an ancestor
637 # and we haven't already been marked as an ancestor
636 ancestors[n] = 1 # Mark as ancestor
638 ancestors[n] = 1 # Mark as ancestor
637 # Add non-nullid parents to list of nodes to tag.
639 # Add non-nullid parents to list of nodes to tag.
638 nodestotag.extend([p for p in self.parents(n) if
640 nodestotag.extend([p for p in self.parents(n) if
639 p != nullid])
641 p != nullid])
640 elif n in heads: # We've seen it before, is it a fake head?
642 elif n in heads: # We've seen it before, is it a fake head?
641 # So it is, real heads should not be the ancestors of
643 # So it is, real heads should not be the ancestors of
642 # any other heads.
644 # any other heads.
643 heads.pop(n)
645 heads.pop(n)
644 if not ancestors:
646 if not ancestors:
645 return nonodes
647 return nonodes
646 # Now that we have our set of ancestors, we want to remove any
648 # Now that we have our set of ancestors, we want to remove any
647 # roots that are not ancestors.
649 # roots that are not ancestors.
648
650
649 # If one of the roots was nullid, everything is included anyway.
651 # If one of the roots was nullid, everything is included anyway.
650 if lowestrev > nullrev:
652 if lowestrev > nullrev:
651 # But, since we weren't, let's recompute the lowest rev to not
653 # But, since we weren't, let's recompute the lowest rev to not
652 # include roots that aren't ancestors.
654 # include roots that aren't ancestors.
653
655
654 # Filter out roots that aren't ancestors of heads
656 # Filter out roots that aren't ancestors of heads
655 roots = [n for n in roots if n in ancestors]
657 roots = [n for n in roots if n in ancestors]
656 # Recompute the lowest revision
658 # Recompute the lowest revision
657 if roots:
659 if roots:
658 lowestrev = min([self.rev(n) for n in roots])
660 lowestrev = min([self.rev(n) for n in roots])
659 else:
661 else:
660 # No more roots? Return empty list
662 # No more roots? Return empty list
661 return nonodes
663 return nonodes
662 else:
664 else:
663 # We are descending from nullid, and don't need to care about
665 # We are descending from nullid, and don't need to care about
664 # any other roots.
666 # any other roots.
665 lowestrev = nullrev
667 lowestrev = nullrev
666 roots = [nullid]
668 roots = [nullid]
667 # Transform our roots list into a 'set' (i.e. a dictionary where the
669 # Transform our roots list into a 'set' (i.e. a dictionary where the
668 # values don't matter.
670 # values don't matter.
669 descendents = dict.fromkeys(roots, 1)
671 descendents = dict.fromkeys(roots, 1)
670 # Also, keep the original roots so we can filter out roots that aren't
672 # Also, keep the original roots so we can filter out roots that aren't
671 # 'real' roots (i.e. are descended from other roots).
673 # 'real' roots (i.e. are descended from other roots).
672 roots = descendents.copy()
674 roots = descendents.copy()
673 # Our topologically sorted list of output nodes.
675 # Our topologically sorted list of output nodes.
674 orderedout = []
676 orderedout = []
675 # Don't start at nullid since we don't want nullid in our output list,
677 # Don't start at nullid since we don't want nullid in our output list,
676 # and if nullid shows up in descedents, empty parents will look like
678 # and if nullid shows up in descedents, empty parents will look like
677 # they're descendents.
679 # they're descendents.
678 for r in xrange(max(lowestrev, 0), highestrev + 1):
680 for r in xrange(max(lowestrev, 0), highestrev + 1):
679 n = self.node(r)
681 n = self.node(r)
680 isdescendent = False
682 isdescendent = False
681 if lowestrev == nullrev: # Everybody is a descendent of nullid
683 if lowestrev == nullrev: # Everybody is a descendent of nullid
682 isdescendent = True
684 isdescendent = True
683 elif n in descendents:
685 elif n in descendents:
684 # n is already a descendent
686 # n is already a descendent
685 isdescendent = True
687 isdescendent = True
686 # This check only needs to be done here because all the roots
688 # This check only needs to be done here because all the roots
687 # will start being marked is descendents before the loop.
689 # will start being marked is descendents before the loop.
688 if n in roots:
690 if n in roots:
689 # If n was a root, check if it's a 'real' root.
691 # If n was a root, check if it's a 'real' root.
690 p = tuple(self.parents(n))
692 p = tuple(self.parents(n))
691 # If any of its parents are descendents, it's not a root.
693 # If any of its parents are descendents, it's not a root.
692 if (p[0] in descendents) or (p[1] in descendents):
694 if (p[0] in descendents) or (p[1] in descendents):
693 roots.pop(n)
695 roots.pop(n)
694 else:
696 else:
695 p = tuple(self.parents(n))
697 p = tuple(self.parents(n))
696 # A node is a descendent if either of its parents are
698 # A node is a descendent if either of its parents are
697 # descendents. (We seeded the dependents list with the roots
699 # descendents. (We seeded the dependents list with the roots
698 # up there, remember?)
700 # up there, remember?)
699 if (p[0] in descendents) or (p[1] in descendents):
701 if (p[0] in descendents) or (p[1] in descendents):
700 descendents[n] = 1
702 descendents[n] = 1
701 isdescendent = True
703 isdescendent = True
702 if isdescendent and ((ancestors is None) or (n in ancestors)):
704 if isdescendent and ((ancestors is None) or (n in ancestors)):
703 # Only include nodes that are both descendents and ancestors.
705 # Only include nodes that are both descendents and ancestors.
704 orderedout.append(n)
706 orderedout.append(n)
705 if (ancestors is not None) and (n in heads):
707 if (ancestors is not None) and (n in heads):
706 # We're trying to figure out which heads are reachable
708 # We're trying to figure out which heads are reachable
707 # from roots.
709 # from roots.
708 # Mark this head as having been reached
710 # Mark this head as having been reached
709 heads[n] = 1
711 heads[n] = 1
710 elif ancestors is None:
712 elif ancestors is None:
711 # Otherwise, we're trying to discover the heads.
713 # Otherwise, we're trying to discover the heads.
712 # Assume this is a head because if it isn't, the next step
714 # Assume this is a head because if it isn't, the next step
713 # will eventually remove it.
715 # will eventually remove it.
714 heads[n] = 1
716 heads[n] = 1
715 # But, obviously its parents aren't.
717 # But, obviously its parents aren't.
716 for p in self.parents(n):
718 for p in self.parents(n):
717 heads.pop(p, None)
719 heads.pop(p, None)
718 heads = [n for n in heads.iterkeys() if heads[n] != 0]
720 heads = [n for n in heads.iterkeys() if heads[n] != 0]
719 roots = roots.keys()
721 roots = roots.keys()
720 assert orderedout
722 assert orderedout
721 assert roots
723 assert roots
722 assert heads
724 assert heads
723 return (orderedout, roots, heads)
725 return (orderedout, roots, heads)
724
726
725 def heads(self, start=None, stop=None):
727 def heads(self, start=None, stop=None):
726 """return the list of all nodes that have no children
728 """return the list of all nodes that have no children
727
729
728 if start is specified, only heads that are descendants of
730 if start is specified, only heads that are descendants of
729 start will be returned
731 start will be returned
730 if stop is specified, it will consider all the revs from stop
732 if stop is specified, it will consider all the revs from stop
731 as if they had no children
733 as if they had no children
732 """
734 """
733 if start is None:
735 if start is None:
734 start = nullid
736 start = nullid
735 if stop is None:
737 if stop is None:
736 stop = []
738 stop = []
737 stoprevs = dict.fromkeys([self.rev(n) for n in stop])
739 stoprevs = dict.fromkeys([self.rev(n) for n in stop])
738 startrev = self.rev(start)
740 startrev = self.rev(start)
739 reachable = {startrev: 1}
741 reachable = {startrev: 1}
740 heads = {startrev: 1}
742 heads = {startrev: 1}
741
743
742 parentrevs = self.parentrevs
744 parentrevs = self.parentrevs
743 for r in xrange(startrev + 1, self.count()):
745 for r in xrange(startrev + 1, self.count()):
744 for p in parentrevs(r):
746 for p in parentrevs(r):
745 if p in reachable:
747 if p in reachable:
746 if r not in stoprevs:
748 if r not in stoprevs:
747 reachable[r] = 1
749 reachable[r] = 1
748 heads[r] = 1
750 heads[r] = 1
749 if p in heads and p not in stoprevs:
751 if p in heads and p not in stoprevs:
750 del heads[p]
752 del heads[p]
751
753
752 return [self.node(r) for r in heads]
754 return [self.node(r) for r in heads]
753
755
754 def children(self, node):
756 def children(self, node):
755 """find the children of a given node"""
757 """find the children of a given node"""
756 c = []
758 c = []
757 p = self.rev(node)
759 p = self.rev(node)
758 for r in range(p + 1, self.count()):
760 for r in range(p + 1, self.count()):
759 for pr in self.parentrevs(r):
761 for pr in self.parentrevs(r):
760 if pr == p:
762 if pr == p:
761 c.append(self.node(r))
763 c.append(self.node(r))
762 return c
764 return c
763
765
764 def _match(self, id):
766 def _match(self, id):
765 if isinstance(id, (long, int)):
767 if isinstance(id, (long, int)):
766 # rev
768 # rev
767 return self.node(id)
769 return self.node(id)
768 if len(id) == 20:
770 if len(id) == 20:
769 # possibly a binary node
771 # possibly a binary node
770 # odds of a binary node being all hex in ASCII are 1 in 10**25
772 # odds of a binary node being all hex in ASCII are 1 in 10**25
771 try:
773 try:
772 node = id
774 node = id
773 r = self.rev(node) # quick search the index
775 r = self.rev(node) # quick search the index
774 return node
776 return node
775 except LookupError:
777 except LookupError:
776 pass # may be partial hex id
778 pass # may be partial hex id
777 try:
779 try:
778 # str(rev)
780 # str(rev)
779 rev = int(id)
781 rev = int(id)
780 if str(rev) != id: raise ValueError
782 if str(rev) != id: raise ValueError
781 if rev < 0: rev = self.count() + rev
783 if rev < 0: rev = self.count() + rev
782 if rev < 0 or rev >= self.count(): raise ValueError
784 if rev < 0 or rev >= self.count(): raise ValueError
783 return self.node(rev)
785 return self.node(rev)
784 except (ValueError, OverflowError):
786 except (ValueError, OverflowError):
785 pass
787 pass
786 if len(id) == 40:
788 if len(id) == 40:
787 try:
789 try:
788 # a full hex nodeid?
790 # a full hex nodeid?
789 node = bin(id)
791 node = bin(id)
790 r = self.rev(node)
792 r = self.rev(node)
791 return node
793 return node
792 except TypeError:
794 except TypeError:
793 pass
795 pass
794
796
795 def _partialmatch(self, id):
797 def _partialmatch(self, id):
796 if len(id) < 40:
798 if len(id) < 40:
797 try:
799 try:
798 # hex(node)[:...]
800 # hex(node)[:...]
799 bin_id = bin(id[:len(id) & ~1]) # grab an even number of digits
801 bin_id = bin(id[:len(id) & ~1]) # grab an even number of digits
800 node = None
802 node = None
801 for n in self.nodemap:
803 for n in self.nodemap:
802 if n.startswith(bin_id) and hex(n).startswith(id):
804 if n.startswith(bin_id) and hex(n).startswith(id):
803 if node is not None:
805 if node is not None:
804 raise LookupError(_("Ambiguous identifier"))
806 raise LookupError(_("Ambiguous identifier"))
805 node = n
807 node = n
806 if node is not None:
808 if node is not None:
807 return node
809 return node
808 except TypeError:
810 except TypeError:
809 pass
811 pass
810
812
811 def lookup(self, id):
813 def lookup(self, id):
812 """locate a node based on:
814 """locate a node based on:
813 - revision number or str(revision number)
815 - revision number or str(revision number)
814 - nodeid or subset of hex nodeid
816 - nodeid or subset of hex nodeid
815 """
817 """
816
818
817 n = self._match(id)
819 n = self._match(id)
818 if n is not None:
820 if n is not None:
819 return n
821 return n
820 n = self._partialmatch(id)
822 n = self._partialmatch(id)
821 if n:
823 if n:
822 return n
824 return n
823
825
824 raise LookupError(_("No match found"))
826 raise LookupError(_("No match found"))
825
827
826 def cmp(self, node, text):
828 def cmp(self, node, text):
827 """compare text with a given file revision"""
829 """compare text with a given file revision"""
828 p1, p2 = self.parents(node)
830 p1, p2 = self.parents(node)
829 return hash(text, p1, p2) != node
831 return hash(text, p1, p2) != node
830
832
831 def makenode(self, node, text):
833 def makenode(self, node, text):
832 """calculate a file nodeid for text, descended or possibly
834 """calculate a file nodeid for text, descended or possibly
833 unchanged from node"""
835 unchanged from node"""
834
836
835 if self.cmp(node, text):
837 if self.cmp(node, text):
836 return hash(text, node, nullid)
838 return hash(text, node, nullid)
837 return node
839 return node
838
840
839 def diff(self, a, b):
841 def diff(self, a, b):
840 """return a delta between two revisions"""
842 """return a delta between two revisions"""
841 return mdiff.textdiff(a, b)
843 return mdiff.textdiff(a, b)
842
844
843 def patches(self, t, pl):
845 def patches(self, t, pl):
844 """apply a list of patches to a string"""
846 """apply a list of patches to a string"""
845 return mdiff.patches(t, pl)
847 return mdiff.patches(t, pl)
846
848
847 def chunk(self, rev, df=None, cachelen=4096):
849 def chunk(self, rev, df=None, cachelen=4096):
848 start, length = self.start(rev), self.length(rev)
850 start, length = self.start(rev), self.length(rev)
849 inline = self.inlinedata()
851 inline = self.inlinedata()
850 if inline:
852 if inline:
851 start += (rev + 1) * struct.calcsize(self.indexformat)
853 start += (rev + 1) * struct.calcsize(self.indexformat)
852 end = start + length
854 end = start + length
853 def loadcache(df):
855 def loadcache(df):
854 cache_length = max(cachelen, length) # 4k
856 cache_length = max(cachelen, length) # 4k
855 if not df:
857 if not df:
856 if inline:
858 if inline:
857 df = self.opener(self.indexfile)
859 df = self.opener(self.indexfile)
858 else:
860 else:
859 df = self.opener(self.datafile)
861 df = self.opener(self.datafile)
860 df.seek(start)
862 df.seek(start)
861 self.chunkcache = (start, df.read(cache_length))
863 self.chunkcache = (start, df.read(cache_length))
862
864
863 if not self.chunkcache:
865 if not self.chunkcache:
864 loadcache(df)
866 loadcache(df)
865
867
866 cache_start = self.chunkcache[0]
868 cache_start = self.chunkcache[0]
867 cache_end = cache_start + len(self.chunkcache[1])
869 cache_end = cache_start + len(self.chunkcache[1])
868 if start >= cache_start and end <= cache_end:
870 if start >= cache_start and end <= cache_end:
869 # it is cached
871 # it is cached
870 offset = start - cache_start
872 offset = start - cache_start
871 else:
873 else:
872 loadcache(df)
874 loadcache(df)
873 offset = 0
875 offset = 0
874
876
875 #def checkchunk():
877 #def checkchunk():
876 # df = self.opener(self.datafile)
878 # df = self.opener(self.datafile)
877 # df.seek(start)
879 # df.seek(start)
878 # return df.read(length)
880 # return df.read(length)
879 #assert s == checkchunk()
881 #assert s == checkchunk()
880 return decompress(self.chunkcache[1][offset:offset + length])
882 return decompress(self.chunkcache[1][offset:offset + length])
881
883
882 def delta(self, node):
884 def delta(self, node):
883 """return or calculate a delta between a node and its predecessor"""
885 """return or calculate a delta between a node and its predecessor"""
884 r = self.rev(node)
886 r = self.rev(node)
885 return self.revdiff(r - 1, r)
887 return self.revdiff(r - 1, r)
886
888
887 def revdiff(self, rev1, rev2):
889 def revdiff(self, rev1, rev2):
888 """return or calculate a delta between two revisions"""
890 """return or calculate a delta between two revisions"""
889 b1 = self.base(rev1)
891 b1 = self.base(rev1)
890 b2 = self.base(rev2)
892 b2 = self.base(rev2)
891 if b1 == b2 and rev1 + 1 == rev2:
893 if b1 == b2 and rev1 + 1 == rev2:
892 return self.chunk(rev2)
894 return self.chunk(rev2)
893 else:
895 else:
894 return self.diff(self.revision(self.node(rev1)),
896 return self.diff(self.revision(self.node(rev1)),
895 self.revision(self.node(rev2)))
897 self.revision(self.node(rev2)))
896
898
897 def revision(self, node):
899 def revision(self, node):
898 """return an uncompressed revision of a given"""
900 """return an uncompressed revision of a given"""
899 if node == nullid: return ""
901 if node == nullid: return ""
900 if self.cache and self.cache[0] == node: return self.cache[2]
902 if self.cache and self.cache[0] == node: return self.cache[2]
901
903
902 # look up what we need to read
904 # look up what we need to read
903 text = None
905 text = None
904 rev = self.rev(node)
906 rev = self.rev(node)
905 base = self.base(rev)
907 base = self.base(rev)
906
908
907 if self.inlinedata():
909 if self.inlinedata():
908 # we probably have the whole chunk cached
910 # we probably have the whole chunk cached
909 df = None
911 df = None
910 else:
912 else:
911 df = self.opener(self.datafile)
913 df = self.opener(self.datafile)
912
914
913 # do we have useful data cached?
915 # do we have useful data cached?
914 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
916 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
915 base = self.cache[1]
917 base = self.cache[1]
916 text = self.cache[2]
918 text = self.cache[2]
917 self.loadindex(base, rev + 1)
919 self.loadindex(base, rev + 1)
918 else:
920 else:
919 self.loadindex(base, rev + 1)
921 self.loadindex(base, rev + 1)
920 text = self.chunk(base, df=df)
922 text = self.chunk(base, df=df)
921
923
922 bins = []
924 bins = []
923 for r in xrange(base + 1, rev + 1):
925 for r in xrange(base + 1, rev + 1):
924 bins.append(self.chunk(r, df=df))
926 bins.append(self.chunk(r, df=df))
925
927
926 text = self.patches(text, bins)
928 text = self.patches(text, bins)
927
929
928 p1, p2 = self.parents(node)
930 p1, p2 = self.parents(node)
929 if node != hash(text, p1, p2):
931 if node != hash(text, p1, p2):
930 raise RevlogError(_("integrity check failed on %s:%d")
932 raise RevlogError(_("integrity check failed on %s:%d")
931 % (self.datafile, rev))
933 % (self.datafile, rev))
932
934
933 self.cache = (node, rev, text)
935 self.cache = (node, rev, text)
934 return text
936 return text
935
937
936 def checkinlinesize(self, tr, fp=None):
938 def checkinlinesize(self, tr, fp=None):
937 if not self.inlinedata():
939 if not self.inlinedata():
938 return
940 return
939 if not fp:
941 if not fp:
940 fp = self.opener(self.indexfile, 'r')
942 fp = self.opener(self.indexfile, 'r')
941 fp.seek(0, 2)
943 fp.seek(0, 2)
942 size = fp.tell()
944 size = fp.tell()
943 if size < 131072:
945 if size < 131072:
944 return
946 return
945 trinfo = tr.find(self.indexfile)
947 trinfo = tr.find(self.indexfile)
946 if trinfo == None:
948 if trinfo == None:
947 raise RevlogError(_("%s not found in the transaction")
949 raise RevlogError(_("%s not found in the transaction")
948 % self.indexfile)
950 % self.indexfile)
949
951
950 trindex = trinfo[2]
952 trindex = trinfo[2]
951 dataoff = self.start(trindex)
953 dataoff = self.start(trindex)
952
954
953 tr.add(self.datafile, dataoff)
955 tr.add(self.datafile, dataoff)
954 df = self.opener(self.datafile, 'w')
956 df = self.opener(self.datafile, 'w')
955 calc = struct.calcsize(self.indexformat)
957 calc = struct.calcsize(self.indexformat)
956 for r in xrange(self.count()):
958 for r in xrange(self.count()):
957 start = self.start(r) + (r + 1) * calc
959 start = self.start(r) + (r + 1) * calc
958 length = self.length(r)
960 length = self.length(r)
959 fp.seek(start)
961 fp.seek(start)
960 d = fp.read(length)
962 d = fp.read(length)
961 df.write(d)
963 df.write(d)
962 fp.close()
964 fp.close()
963 df.close()
965 df.close()
964 fp = self.opener(self.indexfile, 'w', atomictemp=True)
966 fp = self.opener(self.indexfile, 'w', atomictemp=True)
965 self.version &= ~(REVLOGNGINLINEDATA)
967 self.version &= ~(REVLOGNGINLINEDATA)
966 if self.count():
968 if self.count():
967 x = self.index[0]
969 x = self.index[0]
968 e = struct.pack(self.indexformat, *x)[4:]
970 e = struct.pack(self.indexformat, *x)[4:]
969 l = struct.pack(versionformat, self.version)
971 l = struct.pack(versionformat, self.version)
970 fp.write(l)
972 fp.write(l)
971 fp.write(e)
973 fp.write(e)
972
974
973 for i in xrange(1, self.count()):
975 for i in xrange(1, self.count()):
974 x = self.index[i]
976 x = self.index[i]
975 e = struct.pack(self.indexformat, *x)
977 e = struct.pack(self.indexformat, *x)
976 fp.write(e)
978 fp.write(e)
977
979
978 # if we don't call rename, the temp file will never replace the
980 # if we don't call rename, the temp file will never replace the
979 # real index
981 # real index
980 fp.rename()
982 fp.rename()
981
983
982 tr.replace(self.indexfile, trindex * calc)
984 tr.replace(self.indexfile, trindex * calc)
983 self.chunkcache = None
985 self.chunkcache = None
984
986
985 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
987 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
986 """add a revision to the log
988 """add a revision to the log
987
989
988 text - the revision data to add
990 text - the revision data to add
989 transaction - the transaction object used for rollback
991 transaction - the transaction object used for rollback
990 link - the linkrev data to add
992 link - the linkrev data to add
991 p1, p2 - the parent nodeids of the revision
993 p1, p2 - the parent nodeids of the revision
992 d - an optional precomputed delta
994 d - an optional precomputed delta
993 """
995 """
994 if not self.inlinedata():
996 if not self.inlinedata():
995 dfh = self.opener(self.datafile, "a")
997 dfh = self.opener(self.datafile, "a")
996 else:
998 else:
997 dfh = None
999 dfh = None
998 ifh = self.opener(self.indexfile, "a+")
1000 ifh = self.opener(self.indexfile, "a+")
999 return self._addrevision(text, transaction, link, p1, p2, d, ifh, dfh)
1001 return self._addrevision(text, transaction, link, p1, p2, d, ifh, dfh)
1000
1002
1001 def _addrevision(self, text, transaction, link, p1, p2, d, ifh, dfh):
1003 def _addrevision(self, text, transaction, link, p1, p2, d, ifh, dfh):
1002 if text is None: text = ""
1004 if text is None: text = ""
1003 if p1 is None: p1 = self.tip()
1005 if p1 is None: p1 = self.tip()
1004 if p2 is None: p2 = nullid
1006 if p2 is None: p2 = nullid
1005
1007
1006 node = hash(text, p1, p2)
1008 node = hash(text, p1, p2)
1007
1009
1008 if node in self.nodemap:
1010 if node in self.nodemap:
1009 return node
1011 return node
1010
1012
1011 n = self.count()
1013 n = self.count()
1012 t = n - 1
1014 t = n - 1
1013
1015
1014 if n:
1016 if n:
1015 base = self.base(t)
1017 base = self.base(t)
1016 start = self.start(base)
1018 start = self.start(base)
1017 end = self.end(t)
1019 end = self.end(t)
1018 if not d:
1020 if not d:
1019 prev = self.revision(self.tip())
1021 prev = self.revision(self.tip())
1020 d = self.diff(prev, text)
1022 d = self.diff(prev, text)
1021 data = compress(d)
1023 data = compress(d)
1022 l = len(data[1]) + len(data[0])
1024 l = len(data[1]) + len(data[0])
1023 dist = end - start + l
1025 dist = end - start + l
1024
1026
1025 # full versions are inserted when the needed deltas
1027 # full versions are inserted when the needed deltas
1026 # become comparable to the uncompressed text
1028 # become comparable to the uncompressed text
1027 if not n or dist > len(text) * 2:
1029 if not n or dist > len(text) * 2:
1028 data = compress(text)
1030 data = compress(text)
1029 l = len(data[1]) + len(data[0])
1031 l = len(data[1]) + len(data[0])
1030 base = n
1032 base = n
1031 else:
1033 else:
1032 base = self.base(t)
1034 base = self.base(t)
1033
1035
1034 offset = 0
1036 offset = 0
1035 if t >= 0:
1037 if t >= 0:
1036 offset = self.end(t)
1038 offset = self.end(t)
1037
1039
1038 if self.version == REVLOGV0:
1040 if self.version == REVLOGV0:
1039 e = (offset, l, base, link, p1, p2, node)
1041 e = (offset, l, base, link, p1, p2, node)
1040 else:
1042 else:
1041 e = (self.offset_type(offset, 0), l, len(text),
1043 e = (self.offset_type(offset, 0), l, len(text),
1042 base, link, self.rev(p1), self.rev(p2), node)
1044 base, link, self.rev(p1), self.rev(p2), node)
1043
1045
1044 self.index.append(e)
1046 self.index.append(e)
1045 self.nodemap[node] = n
1047 self.nodemap[node] = n
1046 entry = struct.pack(self.indexformat, *e)
1048 entry = struct.pack(self.indexformat, *e)
1047
1049
1048 if not self.inlinedata():
1050 if not self.inlinedata():
1049 transaction.add(self.datafile, offset)
1051 transaction.add(self.datafile, offset)
1050 transaction.add(self.indexfile, n * len(entry))
1052 transaction.add(self.indexfile, n * len(entry))
1051 if data[0]:
1053 if data[0]:
1052 dfh.write(data[0])
1054 dfh.write(data[0])
1053 dfh.write(data[1])
1055 dfh.write(data[1])
1054 dfh.flush()
1056 dfh.flush()
1055 else:
1057 else:
1056 ifh.seek(0, 2)
1058 ifh.seek(0, 2)
1057 transaction.add(self.indexfile, ifh.tell(), self.count() - 1)
1059 transaction.add(self.indexfile, ifh.tell(), self.count() - 1)
1058
1060
1059 if len(self.index) == 1 and self.version != REVLOGV0:
1061 if len(self.index) == 1 and self.version != REVLOGV0:
1060 l = struct.pack(versionformat, self.version)
1062 l = struct.pack(versionformat, self.version)
1061 ifh.write(l)
1063 ifh.write(l)
1062 entry = entry[4:]
1064 entry = entry[4:]
1063
1065
1064 ifh.write(entry)
1066 ifh.write(entry)
1065
1067
1066 if self.inlinedata():
1068 if self.inlinedata():
1067 ifh.write(data[0])
1069 ifh.write(data[0])
1068 ifh.write(data[1])
1070 ifh.write(data[1])
1069 self.checkinlinesize(transaction, ifh)
1071 self.checkinlinesize(transaction, ifh)
1070
1072
1071 self.cache = (node, n, text)
1073 self.cache = (node, n, text)
1072 return node
1074 return node
1073
1075
1074 def ancestor(self, a, b):
1076 def ancestor(self, a, b):
1075 """calculate the least common ancestor of nodes a and b"""
1077 """calculate the least common ancestor of nodes a and b"""
1076
1078
1077 def parents(rev):
1079 def parents(rev):
1078 return [p for p in self.parentrevs(rev) if p != nullrev]
1080 return [p for p in self.parentrevs(rev) if p != nullrev]
1079
1081
1080 c = ancestor.ancestor(self.rev(a), self.rev(b), parents)
1082 c = ancestor.ancestor(self.rev(a), self.rev(b), parents)
1081 if c is None:
1083 if c is None:
1082 return nullid
1084 return nullid
1083
1085
1084 return self.node(c)
1086 return self.node(c)
1085
1087
1086 def group(self, nodelist, lookup, infocollect=None):
1088 def group(self, nodelist, lookup, infocollect=None):
1087 """calculate a delta group
1089 """calculate a delta group
1088
1090
1089 Given a list of changeset revs, return a set of deltas and
1091 Given a list of changeset revs, return a set of deltas and
1090 metadata corresponding to nodes. the first delta is
1092 metadata corresponding to nodes. the first delta is
1091 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
1093 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
1092 have this parent as it has all history before these
1094 have this parent as it has all history before these
1093 changesets. parent is parent[0]
1095 changesets. parent is parent[0]
1094 """
1096 """
1095 revs = [self.rev(n) for n in nodelist]
1097 revs = [self.rev(n) for n in nodelist]
1096
1098
1097 # if we don't have any revisions touched by these changesets, bail
1099 # if we don't have any revisions touched by these changesets, bail
1098 if not revs:
1100 if not revs:
1099 yield changegroup.closechunk()
1101 yield changegroup.closechunk()
1100 return
1102 return
1101
1103
1102 # add the parent of the first rev
1104 # add the parent of the first rev
1103 p = self.parents(self.node(revs[0]))[0]
1105 p = self.parents(self.node(revs[0]))[0]
1104 revs.insert(0, self.rev(p))
1106 revs.insert(0, self.rev(p))
1105
1107
1106 # build deltas
1108 # build deltas
1107 for d in xrange(0, len(revs) - 1):
1109 for d in xrange(0, len(revs) - 1):
1108 a, b = revs[d], revs[d + 1]
1110 a, b = revs[d], revs[d + 1]
1109 nb = self.node(b)
1111 nb = self.node(b)
1110
1112
1111 if infocollect is not None:
1113 if infocollect is not None:
1112 infocollect(nb)
1114 infocollect(nb)
1113
1115
1114 d = self.revdiff(a, b)
1116 d = self.revdiff(a, b)
1115 p = self.parents(nb)
1117 p = self.parents(nb)
1116 meta = nb + p[0] + p[1] + lookup(nb)
1118 meta = nb + p[0] + p[1] + lookup(nb)
1117 yield changegroup.genchunk("%s%s" % (meta, d))
1119 yield changegroup.genchunk("%s%s" % (meta, d))
1118
1120
1119 yield changegroup.closechunk()
1121 yield changegroup.closechunk()
1120
1122
1121 def addgroup(self, revs, linkmapper, transaction, unique=0):
1123 def addgroup(self, revs, linkmapper, transaction, unique=0):
1122 """
1124 """
1123 add a delta group
1125 add a delta group
1124
1126
1125 given a set of deltas, add them to the revision log. the
1127 given a set of deltas, add them to the revision log. the
1126 first delta is against its parent, which should be in our
1128 first delta is against its parent, which should be in our
1127 log, the rest are against the previous delta.
1129 log, the rest are against the previous delta.
1128 """
1130 """
1129
1131
1130 #track the base of the current delta log
1132 #track the base of the current delta log
1131 r = self.count()
1133 r = self.count()
1132 t = r - 1
1134 t = r - 1
1133 node = None
1135 node = None
1134
1136
1135 base = prev = nullrev
1137 base = prev = nullrev
1136 start = end = textlen = 0
1138 start = end = textlen = 0
1137 if r:
1139 if r:
1138 end = self.end(t)
1140 end = self.end(t)
1139
1141
1140 ifh = self.opener(self.indexfile, "a+")
1142 ifh = self.opener(self.indexfile, "a+")
1141 ifh.seek(0, 2)
1143 ifh.seek(0, 2)
1142 transaction.add(self.indexfile, ifh.tell(), self.count())
1144 transaction.add(self.indexfile, ifh.tell(), self.count())
1143 if self.inlinedata():
1145 if self.inlinedata():
1144 dfh = None
1146 dfh = None
1145 else:
1147 else:
1146 transaction.add(self.datafile, end)
1148 transaction.add(self.datafile, end)
1147 dfh = self.opener(self.datafile, "a")
1149 dfh = self.opener(self.datafile, "a")
1148
1150
1149 # loop through our set of deltas
1151 # loop through our set of deltas
1150 chain = None
1152 chain = None
1151 for chunk in revs:
1153 for chunk in revs:
1152 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
1154 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
1153 link = linkmapper(cs)
1155 link = linkmapper(cs)
1154 if node in self.nodemap:
1156 if node in self.nodemap:
1155 # this can happen if two branches make the same change
1157 # this can happen if two branches make the same change
1156 # if unique:
1158 # if unique:
1157 # raise RevlogError(_("already have %s") % hex(node[:4]))
1159 # raise RevlogError(_("already have %s") % hex(node[:4]))
1158 chain = node
1160 chain = node
1159 continue
1161 continue
1160 delta = chunk[80:]
1162 delta = chunk[80:]
1161
1163
1162 for p in (p1, p2):
1164 for p in (p1, p2):
1163 if not p in self.nodemap:
1165 if not p in self.nodemap:
1164 raise LookupError(_("unknown parent %s") % short(p))
1166 raise LookupError(_("unknown parent %s") % short(p))
1165
1167
1166 if not chain:
1168 if not chain:
1167 # retrieve the parent revision of the delta chain
1169 # retrieve the parent revision of the delta chain
1168 chain = p1
1170 chain = p1
1169 if not chain in self.nodemap:
1171 if not chain in self.nodemap:
1170 raise LookupError(_("unknown base %s") % short(chain[:4]))
1172 raise LookupError(_("unknown base %s") % short(chain[:4]))
1171
1173
1172 # full versions are inserted when the needed deltas become
1174 # full versions are inserted when the needed deltas become
1173 # comparable to the uncompressed text or when the previous
1175 # comparable to the uncompressed text or when the previous
1174 # version is not the one we have a delta against. We use
1176 # version is not the one we have a delta against. We use
1175 # the size of the previous full rev as a proxy for the
1177 # the size of the previous full rev as a proxy for the
1176 # current size.
1178 # current size.
1177
1179
1178 if chain == prev:
1180 if chain == prev:
1179 tempd = compress(delta)
1181 tempd = compress(delta)
1180 cdelta = tempd[0] + tempd[1]
1182 cdelta = tempd[0] + tempd[1]
1181 textlen = mdiff.patchedsize(textlen, delta)
1183 textlen = mdiff.patchedsize(textlen, delta)
1182
1184
1183 if chain != prev or (end - start + len(cdelta)) > textlen * 2:
1185 if chain != prev or (end - start + len(cdelta)) > textlen * 2:
1184 # flush our writes here so we can read it in revision
1186 # flush our writes here so we can read it in revision
1185 if dfh:
1187 if dfh:
1186 dfh.flush()
1188 dfh.flush()
1187 ifh.flush()
1189 ifh.flush()
1188 text = self.revision(chain)
1190 text = self.revision(chain)
1189 text = self.patches(text, [delta])
1191 text = self.patches(text, [delta])
1190 chk = self._addrevision(text, transaction, link, p1, p2, None,
1192 chk = self._addrevision(text, transaction, link, p1, p2, None,
1191 ifh, dfh)
1193 ifh, dfh)
1192 if not dfh and not self.inlinedata():
1194 if not dfh and not self.inlinedata():
1193 # addrevision switched from inline to conventional
1195 # addrevision switched from inline to conventional
1194 # reopen the index
1196 # reopen the index
1195 dfh = self.opener(self.datafile, "a")
1197 dfh = self.opener(self.datafile, "a")
1196 ifh = self.opener(self.indexfile, "a")
1198 ifh = self.opener(self.indexfile, "a")
1197 if chk != node:
1199 if chk != node:
1198 raise RevlogError(_("consistency error adding group"))
1200 raise RevlogError(_("consistency error adding group"))
1199 textlen = len(text)
1201 textlen = len(text)
1200 else:
1202 else:
1201 if self.version == REVLOGV0:
1203 if self.version == REVLOGV0:
1202 e = (end, len(cdelta), base, link, p1, p2, node)
1204 e = (end, len(cdelta), base, link, p1, p2, node)
1203 else:
1205 else:
1204 e = (self.offset_type(end, 0), len(cdelta), textlen, base,
1206 e = (self.offset_type(end, 0), len(cdelta), textlen, base,
1205 link, self.rev(p1), self.rev(p2), node)
1207 link, self.rev(p1), self.rev(p2), node)
1206 self.index.append(e)
1208 self.index.append(e)
1207 self.nodemap[node] = r
1209 self.nodemap[node] = r
1208 if self.inlinedata():
1210 if self.inlinedata():
1209 ifh.write(struct.pack(self.indexformat, *e))
1211 ifh.write(struct.pack(self.indexformat, *e))
1210 ifh.write(cdelta)
1212 ifh.write(cdelta)
1211 self.checkinlinesize(transaction, ifh)
1213 self.checkinlinesize(transaction, ifh)
1212 if not self.inlinedata():
1214 if not self.inlinedata():
1213 dfh = self.opener(self.datafile, "a")
1215 dfh = self.opener(self.datafile, "a")
1214 ifh = self.opener(self.indexfile, "a")
1216 ifh = self.opener(self.indexfile, "a")
1215 else:
1217 else:
1216 dfh.write(cdelta)
1218 dfh.write(cdelta)
1217 ifh.write(struct.pack(self.indexformat, *e))
1219 ifh.write(struct.pack(self.indexformat, *e))
1218
1220
1219 t, r, chain, prev = r, r + 1, node, node
1221 t, r, chain, prev = r, r + 1, node, node
1220 base = self.base(t)
1222 base = self.base(t)
1221 start = self.start(base)
1223 start = self.start(base)
1222 end = self.end(t)
1224 end = self.end(t)
1223
1225
1224 return node
1226 return node
1225
1227
1226 def strip(self, rev, minlink):
1228 def strip(self, rev, minlink):
1227 if self.count() == 0 or rev >= self.count():
1229 if self.count() == 0 or rev >= self.count():
1228 return
1230 return
1229
1231
1230 if isinstance(self.index, lazyindex):
1232 if isinstance(self.index, lazyindex):
1231 self.loadindexmap()
1233 self.loadindexmap()
1232
1234
1233 # When stripping away a revision, we need to make sure it
1235 # When stripping away a revision, we need to make sure it
1234 # does not actually belong to an older changeset.
1236 # does not actually belong to an older changeset.
1235 # The minlink parameter defines the oldest revision
1237 # The minlink parameter defines the oldest revision
1236 # we're allowed to strip away.
1238 # we're allowed to strip away.
1237 while minlink > self.index[rev][-4]:
1239 while minlink > self.index[rev][-4]:
1238 rev += 1
1240 rev += 1
1239 if rev >= self.count():
1241 if rev >= self.count():
1240 return
1242 return
1241
1243
1242 # first truncate the files on disk
1244 # first truncate the files on disk
1243 end = self.start(rev)
1245 end = self.start(rev)
1244 if not self.inlinedata():
1246 if not self.inlinedata():
1245 df = self.opener(self.datafile, "a")
1247 df = self.opener(self.datafile, "a")
1246 df.truncate(end)
1248 df.truncate(end)
1247 end = rev * struct.calcsize(self.indexformat)
1249 end = rev * struct.calcsize(self.indexformat)
1248 else:
1250 else:
1249 end += rev * struct.calcsize(self.indexformat)
1251 end += rev * struct.calcsize(self.indexformat)
1250
1252
1251 indexf = self.opener(self.indexfile, "a")
1253 indexf = self.opener(self.indexfile, "a")
1252 indexf.truncate(end)
1254 indexf.truncate(end)
1253
1255
1254 # then reset internal state in memory to forget those revisions
1256 # then reset internal state in memory to forget those revisions
1255 self.cache = None
1257 self.cache = None
1256 self.chunkcache = None
1258 self.chunkcache = None
1257 for x in xrange(rev, self.count()):
1259 for x in xrange(rev, self.count()):
1258 del self.nodemap[self.node(x)]
1260 del self.nodemap[self.node(x)]
1259
1261
1260 del self.index[rev:]
1262 del self.index[rev:]
1261
1263
1262 def checksize(self):
1264 def checksize(self):
1263 expected = 0
1265 expected = 0
1264 if self.count():
1266 if self.count():
1265 expected = self.end(self.count() - 1)
1267 expected = self.end(self.count() - 1)
1266
1268
1267 try:
1269 try:
1268 f = self.opener(self.datafile)
1270 f = self.opener(self.datafile)
1269 f.seek(0, 2)
1271 f.seek(0, 2)
1270 actual = f.tell()
1272 actual = f.tell()
1271 dd = actual - expected
1273 dd = actual - expected
1272 except IOError, inst:
1274 except IOError, inst:
1273 if inst.errno != errno.ENOENT:
1275 if inst.errno != errno.ENOENT:
1274 raise
1276 raise
1275 dd = 0
1277 dd = 0
1276
1278
1277 try:
1279 try:
1278 f = self.opener(self.indexfile)
1280 f = self.opener(self.indexfile)
1279 f.seek(0, 2)
1281 f.seek(0, 2)
1280 actual = f.tell()
1282 actual = f.tell()
1281 s = struct.calcsize(self.indexformat)
1283 s = struct.calcsize(self.indexformat)
1282 i = actual / s
1284 i = actual / s
1283 di = actual - (i * s)
1285 di = actual - (i * s)
1284 if self.inlinedata():
1286 if self.inlinedata():
1285 databytes = 0
1287 databytes = 0
1286 for r in xrange(self.count()):
1288 for r in xrange(self.count()):
1287 databytes += self.length(r)
1289 databytes += self.length(r)
1288 dd = 0
1290 dd = 0
1289 di = actual - self.count() * s - databytes
1291 di = actual - self.count() * s - databytes
1290 except IOError, inst:
1292 except IOError, inst:
1291 if inst.errno != errno.ENOENT:
1293 if inst.errno != errno.ENOENT:
1292 raise
1294 raise
1293 di = 0
1295 di = 0
1294
1296
1295 return (dd, di)
1297 return (dd, di)
1296
1298
1297
1299
@@ -1,204 +1,204 b''
1 # sshserver.py - ssh protocol server support for mercurial
1 # sshserver.py - ssh protocol server support for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from i18n import _
9 from i18n import _
10 from node import *
10 from node import *
11 import os, streamclone, sys, tempfile, util
11 import os, streamclone, sys, tempfile, util
12
12
13 class sshserver(object):
13 class sshserver(object):
14 def __init__(self, ui, repo):
14 def __init__(self, ui, repo):
15 self.ui = ui
15 self.ui = ui
16 self.repo = repo
16 self.repo = repo
17 self.lock = None
17 self.lock = None
18 self.fin = sys.stdin
18 self.fin = sys.stdin
19 self.fout = sys.stdout
19 self.fout = sys.stdout
20
20
21 sys.stdout = sys.stderr
21 sys.stdout = sys.stderr
22
22
23 # Prevent insertion/deletion of CRs
23 # Prevent insertion/deletion of CRs
24 util.set_binary(self.fin)
24 util.set_binary(self.fin)
25 util.set_binary(self.fout)
25 util.set_binary(self.fout)
26
26
27 def getarg(self):
27 def getarg(self):
28 argline = self.fin.readline()[:-1]
28 argline = self.fin.readline()[:-1]
29 arg, l = argline.split()
29 arg, l = argline.split()
30 val = self.fin.read(int(l))
30 val = self.fin.read(int(l))
31 return arg, val
31 return arg, val
32
32
33 def respond(self, v):
33 def respond(self, v):
34 self.fout.write("%d\n" % len(v))
34 self.fout.write("%d\n" % len(v))
35 self.fout.write(v)
35 self.fout.write(v)
36 self.fout.flush()
36 self.fout.flush()
37
37
38 def serve_forever(self):
38 def serve_forever(self):
39 while self.serve_one(): pass
39 while self.serve_one(): pass
40 sys.exit(0)
40 sys.exit(0)
41
41
42 def serve_one(self):
42 def serve_one(self):
43 cmd = self.fin.readline()[:-1]
43 cmd = self.fin.readline()[:-1]
44 if cmd:
44 if cmd:
45 impl = getattr(self, 'do_' + cmd, None)
45 impl = getattr(self, 'do_' + cmd, None)
46 if impl: impl()
46 if impl: impl()
47 else: self.respond("")
47 else: self.respond("")
48 return cmd != ''
48 return cmd != ''
49
49
50 def do_lookup(self):
50 def do_lookup(self):
51 arg, key = self.getarg()
51 arg, key = self.getarg()
52 assert arg == 'key'
52 assert arg == 'key'
53 try:
53 try:
54 r = hex(self.repo.lookup(key))
54 r = hex(self.repo.lookup(key))
55 success = 1
55 success = 1
56 except Exception,inst:
56 except Exception,inst:
57 r = str(inst)
57 r = str(inst)
58 success = 0
58 success = 0
59 self.respond("%s %s\n" % (success, r))
59 self.respond("%s %s\n" % (success, r))
60
60
61 def do_heads(self):
61 def do_heads(self):
62 h = self.repo.heads()
62 h = self.repo.heads()
63 self.respond(" ".join(map(hex, h)) + "\n")
63 self.respond(" ".join(map(hex, h)) + "\n")
64
64
65 def do_hello(self):
65 def do_hello(self):
66 '''the hello command returns a set of lines describing various
66 '''the hello command returns a set of lines describing various
67 interesting things about the server, in an RFC822-like format.
67 interesting things about the server, in an RFC822-like format.
68 Currently the only one defined is "capabilities", which
68 Currently the only one defined is "capabilities", which
69 consists of a line in the form:
69 consists of a line in the form:
70
70
71 capabilities: space separated list of tokens
71 capabilities: space separated list of tokens
72 '''
72 '''
73
73
74 caps = ['unbundle', 'lookup', 'changegroupsubset']
74 caps = ['unbundle', 'lookup', 'changegroupsubset']
75 if self.ui.configbool('server', 'uncompressed'):
75 if self.ui.configbool('server', 'uncompressed'):
76 caps.append('stream=%d' % self.repo.revlogversion)
76 caps.append('stream=%d' % self.repo.changelog.version)
77 self.respond("capabilities: %s\n" % (' '.join(caps),))
77 self.respond("capabilities: %s\n" % (' '.join(caps),))
78
78
79 def do_lock(self):
79 def do_lock(self):
80 '''DEPRECATED - allowing remote client to lock repo is not safe'''
80 '''DEPRECATED - allowing remote client to lock repo is not safe'''
81
81
82 self.lock = self.repo.lock()
82 self.lock = self.repo.lock()
83 self.respond("")
83 self.respond("")
84
84
85 def do_unlock(self):
85 def do_unlock(self):
86 '''DEPRECATED'''
86 '''DEPRECATED'''
87
87
88 if self.lock:
88 if self.lock:
89 self.lock.release()
89 self.lock.release()
90 self.lock = None
90 self.lock = None
91 self.respond("")
91 self.respond("")
92
92
93 def do_branches(self):
93 def do_branches(self):
94 arg, nodes = self.getarg()
94 arg, nodes = self.getarg()
95 nodes = map(bin, nodes.split(" "))
95 nodes = map(bin, nodes.split(" "))
96 r = []
96 r = []
97 for b in self.repo.branches(nodes):
97 for b in self.repo.branches(nodes):
98 r.append(" ".join(map(hex, b)) + "\n")
98 r.append(" ".join(map(hex, b)) + "\n")
99 self.respond("".join(r))
99 self.respond("".join(r))
100
100
101 def do_between(self):
101 def do_between(self):
102 arg, pairs = self.getarg()
102 arg, pairs = self.getarg()
103 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
103 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
104 r = []
104 r = []
105 for b in self.repo.between(pairs):
105 for b in self.repo.between(pairs):
106 r.append(" ".join(map(hex, b)) + "\n")
106 r.append(" ".join(map(hex, b)) + "\n")
107 self.respond("".join(r))
107 self.respond("".join(r))
108
108
109 def do_changegroup(self):
109 def do_changegroup(self):
110 nodes = []
110 nodes = []
111 arg, roots = self.getarg()
111 arg, roots = self.getarg()
112 nodes = map(bin, roots.split(" "))
112 nodes = map(bin, roots.split(" "))
113
113
114 cg = self.repo.changegroup(nodes, 'serve')
114 cg = self.repo.changegroup(nodes, 'serve')
115 while True:
115 while True:
116 d = cg.read(4096)
116 d = cg.read(4096)
117 if not d:
117 if not d:
118 break
118 break
119 self.fout.write(d)
119 self.fout.write(d)
120
120
121 self.fout.flush()
121 self.fout.flush()
122
122
123 def do_changegroupsubset(self):
123 def do_changegroupsubset(self):
124 bases = []
124 bases = []
125 heads = []
125 heads = []
126 argmap = dict([self.getarg(), self.getarg()])
126 argmap = dict([self.getarg(), self.getarg()])
127 bases = [bin(n) for n in argmap['bases'].split(' ')]
127 bases = [bin(n) for n in argmap['bases'].split(' ')]
128 heads = [bin(n) for n in argmap['heads'].split(' ')]
128 heads = [bin(n) for n in argmap['heads'].split(' ')]
129
129
130 cg = self.repo.changegroupsubset(bases, heads, 'serve')
130 cg = self.repo.changegroupsubset(bases, heads, 'serve')
131 while True:
131 while True:
132 d = cg.read(4096)
132 d = cg.read(4096)
133 if not d:
133 if not d:
134 break
134 break
135 self.fout.write(d)
135 self.fout.write(d)
136
136
137 self.fout.flush()
137 self.fout.flush()
138
138
139 def do_addchangegroup(self):
139 def do_addchangegroup(self):
140 '''DEPRECATED'''
140 '''DEPRECATED'''
141
141
142 if not self.lock:
142 if not self.lock:
143 self.respond("not locked")
143 self.respond("not locked")
144 return
144 return
145
145
146 self.respond("")
146 self.respond("")
147 r = self.repo.addchangegroup(self.fin, 'serve', self.client_url())
147 r = self.repo.addchangegroup(self.fin, 'serve', self.client_url())
148 self.respond(str(r))
148 self.respond(str(r))
149
149
150 def client_url(self):
150 def client_url(self):
151 client = os.environ.get('SSH_CLIENT', '').split(' ', 1)[0]
151 client = os.environ.get('SSH_CLIENT', '').split(' ', 1)[0]
152 return 'remote:ssh:' + client
152 return 'remote:ssh:' + client
153
153
154 def do_unbundle(self):
154 def do_unbundle(self):
155 their_heads = self.getarg()[1].split()
155 their_heads = self.getarg()[1].split()
156
156
157 def check_heads():
157 def check_heads():
158 heads = map(hex, self.repo.heads())
158 heads = map(hex, self.repo.heads())
159 return their_heads == [hex('force')] or their_heads == heads
159 return their_heads == [hex('force')] or their_heads == heads
160
160
161 # fail early if possible
161 # fail early if possible
162 if not check_heads():
162 if not check_heads():
163 self.respond(_('unsynced changes'))
163 self.respond(_('unsynced changes'))
164 return
164 return
165
165
166 self.respond('')
166 self.respond('')
167
167
168 # write bundle data to temporary file because it can be big
168 # write bundle data to temporary file because it can be big
169
169
170 try:
170 try:
171 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
171 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
172 fp = os.fdopen(fd, 'wb+')
172 fp = os.fdopen(fd, 'wb+')
173
173
174 count = int(self.fin.readline())
174 count = int(self.fin.readline())
175 while count:
175 while count:
176 fp.write(self.fin.read(count))
176 fp.write(self.fin.read(count))
177 count = int(self.fin.readline())
177 count = int(self.fin.readline())
178
178
179 was_locked = self.lock is not None
179 was_locked = self.lock is not None
180 if not was_locked:
180 if not was_locked:
181 self.lock = self.repo.lock()
181 self.lock = self.repo.lock()
182 try:
182 try:
183 if not check_heads():
183 if not check_heads():
184 # someone else committed/pushed/unbundled while we
184 # someone else committed/pushed/unbundled while we
185 # were transferring data
185 # were transferring data
186 self.respond(_('unsynced changes'))
186 self.respond(_('unsynced changes'))
187 return
187 return
188 self.respond('')
188 self.respond('')
189
189
190 # push can proceed
190 # push can proceed
191
191
192 fp.seek(0)
192 fp.seek(0)
193 r = self.repo.addchangegroup(fp, 'serve', self.client_url())
193 r = self.repo.addchangegroup(fp, 'serve', self.client_url())
194 self.respond(str(r))
194 self.respond(str(r))
195 finally:
195 finally:
196 if not was_locked:
196 if not was_locked:
197 self.lock.release()
197 self.lock.release()
198 self.lock = None
198 self.lock = None
199 finally:
199 finally:
200 fp.close()
200 fp.close()
201 os.unlink(tempname)
201 os.unlink(tempname)
202
202
203 def do_stream_out(self):
203 def do_stream_out(self):
204 streamclone.stream_out(self.repo, self.fout)
204 streamclone.stream_out(self.repo, self.fout)
@@ -1,85 +1,84 b''
1 # statichttprepo.py - simple http repository class for mercurial
1 # statichttprepo.py - simple http repository class for mercurial
2 #
2 #
3 # This provides read-only repo access to repositories exported via static http
3 # This provides read-only repo access to repositories exported via static http
4 #
4 #
5 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
6 #
6 #
7 # This software may be used and distributed according to the terms
7 # This software may be used and distributed according to the terms
8 # of the GNU General Public License, incorporated herein by reference.
8 # of the GNU General Public License, incorporated herein by reference.
9
9
10 from i18n import _
10 from i18n import _
11 import changelog, filelog, httprangereader
11 import changelog, filelog, httprangereader
12 import repo, localrepo, manifest, os, urllib, urllib2, util
12 import repo, localrepo, manifest, os, urllib, urllib2, util
13
13
14 class rangereader(httprangereader.httprangereader):
14 class rangereader(httprangereader.httprangereader):
15 def read(self, size=None):
15 def read(self, size=None):
16 try:
16 try:
17 return httprangereader.httprangereader.read(self, size)
17 return httprangereader.httprangereader.read(self, size)
18 except urllib2.HTTPError, inst:
18 except urllib2.HTTPError, inst:
19 raise IOError(None, inst)
19 raise IOError(None, inst)
20 except urllib2.URLError, inst:
20 except urllib2.URLError, inst:
21 raise IOError(None, inst.reason[1])
21 raise IOError(None, inst.reason[1])
22
22
23 def opener(base):
23 def opener(base):
24 """return a function that opens files over http"""
24 """return a function that opens files over http"""
25 p = base
25 p = base
26 def o(path, mode="r"):
26 def o(path, mode="r"):
27 f = "/".join((p, urllib.quote(path)))
27 f = "/".join((p, urllib.quote(path)))
28 return rangereader(f)
28 return rangereader(f)
29 return o
29 return o
30
30
31 class statichttprepository(localrepo.localrepository):
31 class statichttprepository(localrepo.localrepository):
32 def __init__(self, ui, path):
32 def __init__(self, ui, path):
33 self._url = path
33 self._url = path
34 self.ui = ui
34 self.ui = ui
35 self.revlogversion = 0
36
35
37 self.path = (path + "/.hg")
36 self.path = (path + "/.hg")
38 self.opener = opener(self.path)
37 self.opener = opener(self.path)
39 # find requirements
38 # find requirements
40 try:
39 try:
41 requirements = self.opener("requires").read().splitlines()
40 requirements = self.opener("requires").read().splitlines()
42 except IOError:
41 except IOError:
43 requirements = []
42 requirements = []
44 # check them
43 # check them
45 for r in requirements:
44 for r in requirements:
46 if r not in self.supported:
45 if r not in self.supported:
47 raise repo.RepoError(_("requirement '%s' not supported") % r)
46 raise repo.RepoError(_("requirement '%s' not supported") % r)
48
47
49 # setup store
48 # setup store
50 if "store" in requirements:
49 if "store" in requirements:
51 self.encodefn = util.encodefilename
50 self.encodefn = util.encodefilename
52 self.decodefn = util.decodefilename
51 self.decodefn = util.decodefilename
53 self.spath = self.path + "/store"
52 self.spath = self.path + "/store"
54 else:
53 else:
55 self.encodefn = lambda x: x
54 self.encodefn = lambda x: x
56 self.decodefn = lambda x: x
55 self.decodefn = lambda x: x
57 self.spath = self.path
56 self.spath = self.path
58 self.sopener = util.encodedopener(opener(self.spath), self.encodefn)
57 self.sopener = util.encodedopener(opener(self.spath), self.encodefn)
59
58
60 self.manifest = manifest.manifest(self.sopener)
59 self.manifest = manifest.manifest(self.sopener)
61 self.changelog = changelog.changelog(self.sopener)
60 self.changelog = changelog.changelog(self.sopener)
62 self.tagscache = None
61 self.tagscache = None
63 self.nodetagscache = None
62 self.nodetagscache = None
64 self.encodepats = None
63 self.encodepats = None
65 self.decodepats = None
64 self.decodepats = None
66
65
67 def url(self):
66 def url(self):
68 return 'static-' + self._url
67 return 'static-' + self._url
69
68
70 def dev(self):
69 def dev(self):
71 return -1
70 return -1
72
71
73 def local(self):
72 def local(self):
74 return False
73 return False
75
74
76 def instance(ui, path, create):
75 def instance(ui, path, create):
77 if create:
76 if create:
78 raise util.Abort(_('cannot create new static-http repository'))
77 raise util.Abort(_('cannot create new static-http repository'))
79 if path.startswith('old-http:'):
78 if path.startswith('old-http:'):
80 ui.warn(_("old-http:// syntax is deprecated, "
79 ui.warn(_("old-http:// syntax is deprecated, "
81 "please use static-http:// instead\n"))
80 "please use static-http:// instead\n"))
82 path = path[4:]
81 path = path[4:]
83 else:
82 else:
84 path = path[7:]
83 path = path[7:]
85 return statichttprepository(ui, path)
84 return statichttprepository(ui, path)
@@ -1,456 +1,450 b''
1 # ui.py - user interface bits for mercurial
1 # ui.py - user interface bits for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from i18n import _
8 from i18n import _
9 import errno, getpass, os, re, socket, sys, tempfile
9 import errno, getpass, os, re, socket, sys, tempfile
10 import ConfigParser, traceback, util
10 import ConfigParser, traceback, util
11
11
12 def dupconfig(orig):
12 def dupconfig(orig):
13 new = util.configparser(orig.defaults())
13 new = util.configparser(orig.defaults())
14 updateconfig(orig, new)
14 updateconfig(orig, new)
15 return new
15 return new
16
16
17 def updateconfig(source, dest, sections=None):
17 def updateconfig(source, dest, sections=None):
18 if not sections:
18 if not sections:
19 sections = source.sections()
19 sections = source.sections()
20 for section in sections:
20 for section in sections:
21 if not dest.has_section(section):
21 if not dest.has_section(section):
22 dest.add_section(section)
22 dest.add_section(section)
23 for name, value in source.items(section, raw=True):
23 for name, value in source.items(section, raw=True):
24 dest.set(section, name, value)
24 dest.set(section, name, value)
25
25
26 class ui(object):
26 class ui(object):
27 def __init__(self, verbose=False, debug=False, quiet=False,
27 def __init__(self, verbose=False, debug=False, quiet=False,
28 interactive=True, traceback=False, report_untrusted=True,
28 interactive=True, traceback=False, report_untrusted=True,
29 parentui=None):
29 parentui=None):
30 self.overlay = None
30 self.overlay = None
31 self.buffers = []
31 self.buffers = []
32 if parentui is None:
32 if parentui is None:
33 # this is the parent of all ui children
33 # this is the parent of all ui children
34 self.parentui = None
34 self.parentui = None
35 self.readhooks = []
35 self.readhooks = []
36 self.quiet = quiet
36 self.quiet = quiet
37 self.verbose = verbose
37 self.verbose = verbose
38 self.debugflag = debug
38 self.debugflag = debug
39 self.interactive = interactive
39 self.interactive = interactive
40 self.traceback = traceback
40 self.traceback = traceback
41 self.report_untrusted = report_untrusted
41 self.report_untrusted = report_untrusted
42 self.trusted_users = {}
42 self.trusted_users = {}
43 self.trusted_groups = {}
43 self.trusted_groups = {}
44 # if ucdata is not None, its keys must be a superset of cdata's
44 # if ucdata is not None, its keys must be a superset of cdata's
45 self.cdata = util.configparser()
45 self.cdata = util.configparser()
46 self.ucdata = None
46 self.ucdata = None
47 # we always trust global config files
47 # we always trust global config files
48 self.check_trusted = False
48 self.check_trusted = False
49 self.readconfig(util.rcpath())
49 self.readconfig(util.rcpath())
50 self.check_trusted = True
50 self.check_trusted = True
51 self.updateopts(verbose, debug, quiet, interactive)
51 self.updateopts(verbose, debug, quiet, interactive)
52 else:
52 else:
53 # parentui may point to an ui object which is already a child
53 # parentui may point to an ui object which is already a child
54 self.parentui = parentui.parentui or parentui
54 self.parentui = parentui.parentui or parentui
55 self.readhooks = self.parentui.readhooks[:]
55 self.readhooks = self.parentui.readhooks[:]
56 self.trusted_users = parentui.trusted_users.copy()
56 self.trusted_users = parentui.trusted_users.copy()
57 self.trusted_groups = parentui.trusted_groups.copy()
57 self.trusted_groups = parentui.trusted_groups.copy()
58 self.cdata = dupconfig(self.parentui.cdata)
58 self.cdata = dupconfig(self.parentui.cdata)
59 if self.parentui.ucdata:
59 if self.parentui.ucdata:
60 self.ucdata = dupconfig(self.parentui.ucdata)
60 self.ucdata = dupconfig(self.parentui.ucdata)
61 if self.parentui.overlay:
61 if self.parentui.overlay:
62 self.overlay = dupconfig(self.parentui.overlay)
62 self.overlay = dupconfig(self.parentui.overlay)
63
63
64 def __getattr__(self, key):
64 def __getattr__(self, key):
65 return getattr(self.parentui, key)
65 return getattr(self.parentui, key)
66
66
67 def updateopts(self, verbose=False, debug=False, quiet=False,
67 def updateopts(self, verbose=False, debug=False, quiet=False,
68 interactive=True, traceback=False, config=[]):
68 interactive=True, traceback=False, config=[]):
69 for section, name, value in config:
69 for section, name, value in config:
70 self.setconfig(section, name, value)
70 self.setconfig(section, name, value)
71
71
72 if quiet or verbose or debug:
72 if quiet or verbose or debug:
73 self.setconfig('ui', 'quiet', str(bool(quiet)))
73 self.setconfig('ui', 'quiet', str(bool(quiet)))
74 self.setconfig('ui', 'verbose', str(bool(verbose)))
74 self.setconfig('ui', 'verbose', str(bool(verbose)))
75 self.setconfig('ui', 'debug', str(bool(debug)))
75 self.setconfig('ui', 'debug', str(bool(debug)))
76
76
77 self.verbosity_constraints()
77 self.verbosity_constraints()
78
78
79 if not interactive:
79 if not interactive:
80 self.setconfig('ui', 'interactive', 'False')
80 self.setconfig('ui', 'interactive', 'False')
81 self.interactive = False
81 self.interactive = False
82
82
83 self.traceback = self.traceback or traceback
83 self.traceback = self.traceback or traceback
84
84
85 def verbosity_constraints(self):
85 def verbosity_constraints(self):
86 self.quiet = self.configbool('ui', 'quiet')
86 self.quiet = self.configbool('ui', 'quiet')
87 self.verbose = self.configbool('ui', 'verbose')
87 self.verbose = self.configbool('ui', 'verbose')
88 self.debugflag = self.configbool('ui', 'debug')
88 self.debugflag = self.configbool('ui', 'debug')
89
89
90 if self.debugflag:
90 if self.debugflag:
91 self.verbose = True
91 self.verbose = True
92 self.quiet = False
92 self.quiet = False
93 elif self.verbose and self.quiet:
93 elif self.verbose and self.quiet:
94 self.quiet = self.verbose = False
94 self.quiet = self.verbose = False
95
95
96 def _is_trusted(self, fp, f, warn=True):
96 def _is_trusted(self, fp, f, warn=True):
97 if not self.check_trusted:
97 if not self.check_trusted:
98 return True
98 return True
99 st = util.fstat(fp)
99 st = util.fstat(fp)
100 if util.isowner(fp, st):
100 if util.isowner(fp, st):
101 return True
101 return True
102 tusers = self.trusted_users
102 tusers = self.trusted_users
103 tgroups = self.trusted_groups
103 tgroups = self.trusted_groups
104 if not tusers:
104 if not tusers:
105 user = util.username()
105 user = util.username()
106 if user is not None:
106 if user is not None:
107 self.trusted_users[user] = 1
107 self.trusted_users[user] = 1
108 self.fixconfig(section='trusted')
108 self.fixconfig(section='trusted')
109 if (tusers or tgroups) and '*' not in tusers and '*' not in tgroups:
109 if (tusers or tgroups) and '*' not in tusers and '*' not in tgroups:
110 user = util.username(st.st_uid)
110 user = util.username(st.st_uid)
111 group = util.groupname(st.st_gid)
111 group = util.groupname(st.st_gid)
112 if user not in tusers and group not in tgroups:
112 if user not in tusers and group not in tgroups:
113 if warn and self.report_untrusted:
113 if warn and self.report_untrusted:
114 self.warn(_('Not trusting file %s from untrusted '
114 self.warn(_('Not trusting file %s from untrusted '
115 'user %s, group %s\n') % (f, user, group))
115 'user %s, group %s\n') % (f, user, group))
116 return False
116 return False
117 return True
117 return True
118
118
119 def readconfig(self, fn, root=None):
119 def readconfig(self, fn, root=None):
120 if isinstance(fn, basestring):
120 if isinstance(fn, basestring):
121 fn = [fn]
121 fn = [fn]
122 for f in fn:
122 for f in fn:
123 try:
123 try:
124 fp = open(f)
124 fp = open(f)
125 except IOError:
125 except IOError:
126 continue
126 continue
127 cdata = self.cdata
127 cdata = self.cdata
128 trusted = self._is_trusted(fp, f)
128 trusted = self._is_trusted(fp, f)
129 if not trusted:
129 if not trusted:
130 if self.ucdata is None:
130 if self.ucdata is None:
131 self.ucdata = dupconfig(self.cdata)
131 self.ucdata = dupconfig(self.cdata)
132 cdata = self.ucdata
132 cdata = self.ucdata
133 elif self.ucdata is not None:
133 elif self.ucdata is not None:
134 # use a separate configparser, so that we don't accidentally
134 # use a separate configparser, so that we don't accidentally
135 # override ucdata settings later on.
135 # override ucdata settings later on.
136 cdata = util.configparser()
136 cdata = util.configparser()
137
137
138 try:
138 try:
139 cdata.readfp(fp, f)
139 cdata.readfp(fp, f)
140 except ConfigParser.ParsingError, inst:
140 except ConfigParser.ParsingError, inst:
141 msg = _("Failed to parse %s\n%s") % (f, inst)
141 msg = _("Failed to parse %s\n%s") % (f, inst)
142 if trusted:
142 if trusted:
143 raise util.Abort(msg)
143 raise util.Abort(msg)
144 self.warn(_("Ignored: %s\n") % msg)
144 self.warn(_("Ignored: %s\n") % msg)
145
145
146 if trusted:
146 if trusted:
147 if cdata != self.cdata:
147 if cdata != self.cdata:
148 updateconfig(cdata, self.cdata)
148 updateconfig(cdata, self.cdata)
149 if self.ucdata is not None:
149 if self.ucdata is not None:
150 updateconfig(cdata, self.ucdata)
150 updateconfig(cdata, self.ucdata)
151 # override data from config files with data set with ui.setconfig
151 # override data from config files with data set with ui.setconfig
152 if self.overlay:
152 if self.overlay:
153 updateconfig(self.overlay, self.cdata)
153 updateconfig(self.overlay, self.cdata)
154 if root is None:
154 if root is None:
155 root = os.path.expanduser('~')
155 root = os.path.expanduser('~')
156 self.fixconfig(root=root)
156 self.fixconfig(root=root)
157 for hook in self.readhooks:
157 for hook in self.readhooks:
158 hook(self)
158 hook(self)
159
159
160 def addreadhook(self, hook):
160 def addreadhook(self, hook):
161 self.readhooks.append(hook)
161 self.readhooks.append(hook)
162
162
163 def readsections(self, filename, *sections):
163 def readsections(self, filename, *sections):
164 """Read filename and add only the specified sections to the config data
164 """Read filename and add only the specified sections to the config data
165
165
166 The settings are added to the trusted config data.
166 The settings are added to the trusted config data.
167 """
167 """
168 if not sections:
168 if not sections:
169 return
169 return
170
170
171 cdata = util.configparser()
171 cdata = util.configparser()
172 try:
172 try:
173 cdata.read(filename)
173 cdata.read(filename)
174 except ConfigParser.ParsingError, inst:
174 except ConfigParser.ParsingError, inst:
175 raise util.Abort(_("failed to parse %s\n%s") % (filename,
175 raise util.Abort(_("failed to parse %s\n%s") % (filename,
176 inst))
176 inst))
177
177
178 for section in sections:
178 for section in sections:
179 if not cdata.has_section(section):
179 if not cdata.has_section(section):
180 cdata.add_section(section)
180 cdata.add_section(section)
181
181
182 updateconfig(cdata, self.cdata, sections)
182 updateconfig(cdata, self.cdata, sections)
183 if self.ucdata:
183 if self.ucdata:
184 updateconfig(cdata, self.ucdata, sections)
184 updateconfig(cdata, self.ucdata, sections)
185
185
186 def fixconfig(self, section=None, name=None, value=None, root=None):
186 def fixconfig(self, section=None, name=None, value=None, root=None):
187 # translate paths relative to root (or home) into absolute paths
187 # translate paths relative to root (or home) into absolute paths
188 if section is None or section == 'paths':
188 if section is None or section == 'paths':
189 if root is None:
189 if root is None:
190 root = os.getcwd()
190 root = os.getcwd()
191 items = section and [(name, value)] or []
191 items = section and [(name, value)] or []
192 for cdata in self.cdata, self.ucdata, self.overlay:
192 for cdata in self.cdata, self.ucdata, self.overlay:
193 if not cdata: continue
193 if not cdata: continue
194 if not items and cdata.has_section('paths'):
194 if not items and cdata.has_section('paths'):
195 pathsitems = cdata.items('paths')
195 pathsitems = cdata.items('paths')
196 else:
196 else:
197 pathsitems = items
197 pathsitems = items
198 for n, path in pathsitems:
198 for n, path in pathsitems:
199 if path and "://" not in path and not os.path.isabs(path):
199 if path and "://" not in path and not os.path.isabs(path):
200 cdata.set("paths", n, os.path.join(root, path))
200 cdata.set("paths", n, os.path.join(root, path))
201
201
202 # update quiet/verbose/debug and interactive status
202 # update quiet/verbose/debug and interactive status
203 if section is None or section == 'ui':
203 if section is None or section == 'ui':
204 if name is None or name in ('quiet', 'verbose', 'debug'):
204 if name is None or name in ('quiet', 'verbose', 'debug'):
205 self.verbosity_constraints()
205 self.verbosity_constraints()
206
206
207 if name is None or name == 'interactive':
207 if name is None or name == 'interactive':
208 self.interactive = self.configbool("ui", "interactive", True)
208 self.interactive = self.configbool("ui", "interactive", True)
209
209
210 # update trust information
210 # update trust information
211 if (section is None or section == 'trusted') and self.trusted_users:
211 if (section is None or section == 'trusted') and self.trusted_users:
212 for user in self.configlist('trusted', 'users'):
212 for user in self.configlist('trusted', 'users'):
213 self.trusted_users[user] = 1
213 self.trusted_users[user] = 1
214 for group in self.configlist('trusted', 'groups'):
214 for group in self.configlist('trusted', 'groups'):
215 self.trusted_groups[group] = 1
215 self.trusted_groups[group] = 1
216
216
217 def setconfig(self, section, name, value):
217 def setconfig(self, section, name, value):
218 if not self.overlay:
218 if not self.overlay:
219 self.overlay = util.configparser()
219 self.overlay = util.configparser()
220 for cdata in (self.overlay, self.cdata, self.ucdata):
220 for cdata in (self.overlay, self.cdata, self.ucdata):
221 if not cdata: continue
221 if not cdata: continue
222 if not cdata.has_section(section):
222 if not cdata.has_section(section):
223 cdata.add_section(section)
223 cdata.add_section(section)
224 cdata.set(section, name, value)
224 cdata.set(section, name, value)
225 self.fixconfig(section, name, value)
225 self.fixconfig(section, name, value)
226
226
227 def _get_cdata(self, untrusted):
227 def _get_cdata(self, untrusted):
228 if untrusted and self.ucdata:
228 if untrusted and self.ucdata:
229 return self.ucdata
229 return self.ucdata
230 return self.cdata
230 return self.cdata
231
231
232 def _config(self, section, name, default, funcname, untrusted, abort):
232 def _config(self, section, name, default, funcname, untrusted, abort):
233 cdata = self._get_cdata(untrusted)
233 cdata = self._get_cdata(untrusted)
234 if cdata.has_option(section, name):
234 if cdata.has_option(section, name):
235 try:
235 try:
236 func = getattr(cdata, funcname)
236 func = getattr(cdata, funcname)
237 return func(section, name)
237 return func(section, name)
238 except ConfigParser.InterpolationError, inst:
238 except ConfigParser.InterpolationError, inst:
239 msg = _("Error in configuration section [%s] "
239 msg = _("Error in configuration section [%s] "
240 "parameter '%s':\n%s") % (section, name, inst)
240 "parameter '%s':\n%s") % (section, name, inst)
241 if abort:
241 if abort:
242 raise util.Abort(msg)
242 raise util.Abort(msg)
243 self.warn(_("Ignored: %s\n") % msg)
243 self.warn(_("Ignored: %s\n") % msg)
244 return default
244 return default
245
245
246 def _configcommon(self, section, name, default, funcname, untrusted):
246 def _configcommon(self, section, name, default, funcname, untrusted):
247 value = self._config(section, name, default, funcname,
247 value = self._config(section, name, default, funcname,
248 untrusted, abort=True)
248 untrusted, abort=True)
249 if self.debugflag and not untrusted and self.ucdata:
249 if self.debugflag and not untrusted and self.ucdata:
250 uvalue = self._config(section, name, None, funcname,
250 uvalue = self._config(section, name, None, funcname,
251 untrusted=True, abort=False)
251 untrusted=True, abort=False)
252 if uvalue is not None and uvalue != value:
252 if uvalue is not None and uvalue != value:
253 self.warn(_("Ignoring untrusted configuration option "
253 self.warn(_("Ignoring untrusted configuration option "
254 "%s.%s = %s\n") % (section, name, uvalue))
254 "%s.%s = %s\n") % (section, name, uvalue))
255 return value
255 return value
256
256
257 def config(self, section, name, default=None, untrusted=False):
257 def config(self, section, name, default=None, untrusted=False):
258 return self._configcommon(section, name, default, 'get', untrusted)
258 return self._configcommon(section, name, default, 'get', untrusted)
259
259
260 def configbool(self, section, name, default=False, untrusted=False):
260 def configbool(self, section, name, default=False, untrusted=False):
261 return self._configcommon(section, name, default, 'getboolean',
261 return self._configcommon(section, name, default, 'getboolean',
262 untrusted)
262 untrusted)
263
263
264 def configlist(self, section, name, default=None, untrusted=False):
264 def configlist(self, section, name, default=None, untrusted=False):
265 """Return a list of comma/space separated strings"""
265 """Return a list of comma/space separated strings"""
266 result = self.config(section, name, untrusted=untrusted)
266 result = self.config(section, name, untrusted=untrusted)
267 if result is None:
267 if result is None:
268 result = default or []
268 result = default or []
269 if isinstance(result, basestring):
269 if isinstance(result, basestring):
270 result = result.replace(",", " ").split()
270 result = result.replace(",", " ").split()
271 return result
271 return result
272
272
273 def has_config(self, section, untrusted=False):
273 def has_config(self, section, untrusted=False):
274 '''tell whether section exists in config.'''
274 '''tell whether section exists in config.'''
275 cdata = self._get_cdata(untrusted)
275 cdata = self._get_cdata(untrusted)
276 return cdata.has_section(section)
276 return cdata.has_section(section)
277
277
278 def _configitems(self, section, untrusted, abort):
278 def _configitems(self, section, untrusted, abort):
279 items = {}
279 items = {}
280 cdata = self._get_cdata(untrusted)
280 cdata = self._get_cdata(untrusted)
281 if cdata.has_section(section):
281 if cdata.has_section(section):
282 try:
282 try:
283 items.update(dict(cdata.items(section)))
283 items.update(dict(cdata.items(section)))
284 except ConfigParser.InterpolationError, inst:
284 except ConfigParser.InterpolationError, inst:
285 msg = _("Error in configuration section [%s]:\n"
285 msg = _("Error in configuration section [%s]:\n"
286 "%s") % (section, inst)
286 "%s") % (section, inst)
287 if abort:
287 if abort:
288 raise util.Abort(msg)
288 raise util.Abort(msg)
289 self.warn(_("Ignored: %s\n") % msg)
289 self.warn(_("Ignored: %s\n") % msg)
290 return items
290 return items
291
291
292 def configitems(self, section, untrusted=False):
292 def configitems(self, section, untrusted=False):
293 items = self._configitems(section, untrusted=untrusted, abort=True)
293 items = self._configitems(section, untrusted=untrusted, abort=True)
294 if self.debugflag and not untrusted and self.ucdata:
294 if self.debugflag and not untrusted and self.ucdata:
295 uitems = self._configitems(section, untrusted=True, abort=False)
295 uitems = self._configitems(section, untrusted=True, abort=False)
296 keys = uitems.keys()
296 keys = uitems.keys()
297 keys.sort()
297 keys.sort()
298 for k in keys:
298 for k in keys:
299 if uitems[k] != items.get(k):
299 if uitems[k] != items.get(k):
300 self.warn(_("Ignoring untrusted configuration option "
300 self.warn(_("Ignoring untrusted configuration option "
301 "%s.%s = %s\n") % (section, k, uitems[k]))
301 "%s.%s = %s\n") % (section, k, uitems[k]))
302 x = items.items()
302 x = items.items()
303 x.sort()
303 x.sort()
304 return x
304 return x
305
305
306 def walkconfig(self, untrusted=False):
306 def walkconfig(self, untrusted=False):
307 cdata = self._get_cdata(untrusted)
307 cdata = self._get_cdata(untrusted)
308 sections = cdata.sections()
308 sections = cdata.sections()
309 sections.sort()
309 sections.sort()
310 for section in sections:
310 for section in sections:
311 for name, value in self.configitems(section, untrusted):
311 for name, value in self.configitems(section, untrusted):
312 yield section, name, str(value).replace('\n', '\\n')
312 yield section, name, str(value).replace('\n', '\\n')
313
313
314 def extensions(self):
314 def extensions(self):
315 result = self.configitems("extensions")
315 result = self.configitems("extensions")
316 for i, (key, value) in enumerate(result):
316 for i, (key, value) in enumerate(result):
317 if value:
317 if value:
318 result[i] = (key, os.path.expanduser(value))
318 result[i] = (key, os.path.expanduser(value))
319 return result
319 return result
320
320
321 def hgignorefiles(self):
321 def hgignorefiles(self):
322 result = []
322 result = []
323 for key, value in self.configitems("ui"):
323 for key, value in self.configitems("ui"):
324 if key == 'ignore' or key.startswith('ignore.'):
324 if key == 'ignore' or key.startswith('ignore.'):
325 result.append(os.path.expanduser(value))
325 result.append(os.path.expanduser(value))
326 return result
326 return result
327
327
328 def configrevlog(self):
329 result = {}
330 for key, value in self.configitems("revlog"):
331 result[key.lower()] = value
332 return result
333
334 def username(self):
328 def username(self):
335 """Return default username to be used in commits.
329 """Return default username to be used in commits.
336
330
337 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
331 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
338 and stop searching if one of these is set.
332 and stop searching if one of these is set.
339 If not found, use ($LOGNAME or $USER or $LNAME or
333 If not found, use ($LOGNAME or $USER or $LNAME or
340 $USERNAME) +"@full.hostname".
334 $USERNAME) +"@full.hostname".
341 """
335 """
342 user = os.environ.get("HGUSER")
336 user = os.environ.get("HGUSER")
343 if user is None:
337 if user is None:
344 user = self.config("ui", "username")
338 user = self.config("ui", "username")
345 if user is None:
339 if user is None:
346 user = os.environ.get("EMAIL")
340 user = os.environ.get("EMAIL")
347 if user is None:
341 if user is None:
348 try:
342 try:
349 user = '%s@%s' % (util.getuser(), socket.getfqdn())
343 user = '%s@%s' % (util.getuser(), socket.getfqdn())
350 self.warn(_("No username found, using '%s' instead\n") % user)
344 self.warn(_("No username found, using '%s' instead\n") % user)
351 except KeyError:
345 except KeyError:
352 pass
346 pass
353 if not user:
347 if not user:
354 raise util.Abort(_("Please specify a username."))
348 raise util.Abort(_("Please specify a username."))
355 return user
349 return user
356
350
357 def shortuser(self, user):
351 def shortuser(self, user):
358 """Return a short representation of a user name or email address."""
352 """Return a short representation of a user name or email address."""
359 if not self.verbose: user = util.shortuser(user)
353 if not self.verbose: user = util.shortuser(user)
360 return user
354 return user
361
355
362 def expandpath(self, loc, default=None):
356 def expandpath(self, loc, default=None):
363 """Return repository location relative to cwd or from [paths]"""
357 """Return repository location relative to cwd or from [paths]"""
364 if "://" in loc or os.path.isdir(os.path.join(loc, '.hg')):
358 if "://" in loc or os.path.isdir(os.path.join(loc, '.hg')):
365 return loc
359 return loc
366
360
367 path = self.config("paths", loc)
361 path = self.config("paths", loc)
368 if not path and default is not None:
362 if not path and default is not None:
369 path = self.config("paths", default)
363 path = self.config("paths", default)
370 return path or loc
364 return path or loc
371
365
372 def pushbuffer(self):
366 def pushbuffer(self):
373 self.buffers.append([])
367 self.buffers.append([])
374
368
375 def popbuffer(self):
369 def popbuffer(self):
376 return "".join(self.buffers.pop())
370 return "".join(self.buffers.pop())
377
371
378 def write(self, *args):
372 def write(self, *args):
379 if self.buffers:
373 if self.buffers:
380 self.buffers[-1].extend([str(a) for a in args])
374 self.buffers[-1].extend([str(a) for a in args])
381 else:
375 else:
382 for a in args:
376 for a in args:
383 sys.stdout.write(str(a))
377 sys.stdout.write(str(a))
384
378
385 def write_err(self, *args):
379 def write_err(self, *args):
386 try:
380 try:
387 if not sys.stdout.closed: sys.stdout.flush()
381 if not sys.stdout.closed: sys.stdout.flush()
388 for a in args:
382 for a in args:
389 sys.stderr.write(str(a))
383 sys.stderr.write(str(a))
390 # stderr may be buffered under win32 when redirected to files,
384 # stderr may be buffered under win32 when redirected to files,
391 # including stdout.
385 # including stdout.
392 if not sys.stderr.closed: sys.stderr.flush()
386 if not sys.stderr.closed: sys.stderr.flush()
393 except IOError, inst:
387 except IOError, inst:
394 if inst.errno != errno.EPIPE:
388 if inst.errno != errno.EPIPE:
395 raise
389 raise
396
390
397 def flush(self):
391 def flush(self):
398 try: sys.stdout.flush()
392 try: sys.stdout.flush()
399 except: pass
393 except: pass
400 try: sys.stderr.flush()
394 try: sys.stderr.flush()
401 except: pass
395 except: pass
402
396
403 def readline(self):
397 def readline(self):
404 return sys.stdin.readline()[:-1]
398 return sys.stdin.readline()[:-1]
405 def prompt(self, msg, pat=None, default="y"):
399 def prompt(self, msg, pat=None, default="y"):
406 if not self.interactive: return default
400 if not self.interactive: return default
407 while 1:
401 while 1:
408 self.write(msg, " ")
402 self.write(msg, " ")
409 r = self.readline()
403 r = self.readline()
410 if not pat or re.match(pat, r):
404 if not pat or re.match(pat, r):
411 return r
405 return r
412 else:
406 else:
413 self.write(_("unrecognized response\n"))
407 self.write(_("unrecognized response\n"))
414 def getpass(self, prompt=None, default=None):
408 def getpass(self, prompt=None, default=None):
415 if not self.interactive: return default
409 if not self.interactive: return default
416 return getpass.getpass(prompt or _('password: '))
410 return getpass.getpass(prompt or _('password: '))
417 def status(self, *msg):
411 def status(self, *msg):
418 if not self.quiet: self.write(*msg)
412 if not self.quiet: self.write(*msg)
419 def warn(self, *msg):
413 def warn(self, *msg):
420 self.write_err(*msg)
414 self.write_err(*msg)
421 def note(self, *msg):
415 def note(self, *msg):
422 if self.verbose: self.write(*msg)
416 if self.verbose: self.write(*msg)
423 def debug(self, *msg):
417 def debug(self, *msg):
424 if self.debugflag: self.write(*msg)
418 if self.debugflag: self.write(*msg)
425 def edit(self, text, user):
419 def edit(self, text, user):
426 (fd, name) = tempfile.mkstemp(prefix="hg-editor-", suffix=".txt",
420 (fd, name) = tempfile.mkstemp(prefix="hg-editor-", suffix=".txt",
427 text=True)
421 text=True)
428 try:
422 try:
429 f = os.fdopen(fd, "w")
423 f = os.fdopen(fd, "w")
430 f.write(text)
424 f.write(text)
431 f.close()
425 f.close()
432
426
433 editor = (os.environ.get("HGEDITOR") or
427 editor = (os.environ.get("HGEDITOR") or
434 self.config("ui", "editor") or
428 self.config("ui", "editor") or
435 os.environ.get("EDITOR", "vi"))
429 os.environ.get("EDITOR", "vi"))
436
430
437 util.system("%s \"%s\"" % (editor, name),
431 util.system("%s \"%s\"" % (editor, name),
438 environ={'HGUSER': user},
432 environ={'HGUSER': user},
439 onerr=util.Abort, errprefix=_("edit failed"))
433 onerr=util.Abort, errprefix=_("edit failed"))
440
434
441 f = open(name)
435 f = open(name)
442 t = f.read()
436 t = f.read()
443 f.close()
437 f.close()
444 t = re.sub("(?m)^HG:.*\n", "", t)
438 t = re.sub("(?m)^HG:.*\n", "", t)
445 finally:
439 finally:
446 os.unlink(name)
440 os.unlink(name)
447
441
448 return t
442 return t
449
443
450 def print_exc(self):
444 def print_exc(self):
451 '''print exception traceback if traceback printing enabled.
445 '''print exception traceback if traceback printing enabled.
452 only to call in exception handler. returns true if traceback
446 only to call in exception handler. returns true if traceback
453 printed.'''
447 printed.'''
454 if self.traceback:
448 if self.traceback:
455 traceback.print_exc()
449 traceback.print_exc()
456 return self.traceback
450 return self.traceback
@@ -1,206 +1,206 b''
1 # verify.py - repository integrity checking for Mercurial
1 # verify.py - repository integrity checking for Mercurial
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import revlog, mdiff
10 import revlog, mdiff
11
11
12 def verify(repo):
12 def verify(repo):
13 filelinkrevs = {}
13 filelinkrevs = {}
14 filenodes = {}
14 filenodes = {}
15 changesets = revisions = files = 0
15 changesets = revisions = files = 0
16 errors = [0]
16 errors = [0]
17 warnings = [0]
17 warnings = [0]
18 neededmanifests = {}
18 neededmanifests = {}
19
19
20 def err(msg):
20 def err(msg):
21 repo.ui.warn(msg + "\n")
21 repo.ui.warn(msg + "\n")
22 errors[0] += 1
22 errors[0] += 1
23
23
24 def warn(msg):
24 def warn(msg):
25 repo.ui.warn(msg + "\n")
25 repo.ui.warn(msg + "\n")
26 warnings[0] += 1
26 warnings[0] += 1
27
27
28 def checksize(obj, name):
28 def checksize(obj, name):
29 d = obj.checksize()
29 d = obj.checksize()
30 if d[0]:
30 if d[0]:
31 err(_("%s data length off by %d bytes") % (name, d[0]))
31 err(_("%s data length off by %d bytes") % (name, d[0]))
32 if d[1]:
32 if d[1]:
33 err(_("%s index contains %d extra bytes") % (name, d[1]))
33 err(_("%s index contains %d extra bytes") % (name, d[1]))
34
34
35 def checkversion(obj, name):
35 def checkversion(obj, name):
36 if obj.version != revlog.REVLOGV0:
36 if obj.version != revlog.REVLOGV0:
37 if not revlogv1:
37 if not revlogv1:
38 warn(_("warning: `%s' uses revlog format 1") % name)
38 warn(_("warning: `%s' uses revlog format 1") % name)
39 elif revlogv1:
39 elif revlogv1:
40 warn(_("warning: `%s' uses revlog format 0") % name)
40 warn(_("warning: `%s' uses revlog format 0") % name)
41
41
42 revlogv1 = repo.revlogversion != revlog.REVLOGV0
42 revlogv1 = repo.changelog.version != revlog.REVLOGV0
43 if repo.ui.verbose or revlogv1 != repo.revlogv1:
43 if repo.ui.verbose or not revlogv1:
44 repo.ui.status(_("repository uses revlog format %d\n") %
44 repo.ui.status(_("repository uses revlog format %d\n") %
45 (revlogv1 and 1 or 0))
45 (revlogv1 and 1 or 0))
46
46
47 seen = {}
47 seen = {}
48 repo.ui.status(_("checking changesets\n"))
48 repo.ui.status(_("checking changesets\n"))
49 checksize(repo.changelog, "changelog")
49 checksize(repo.changelog, "changelog")
50
50
51 for i in xrange(repo.changelog.count()):
51 for i in xrange(repo.changelog.count()):
52 changesets += 1
52 changesets += 1
53 n = repo.changelog.node(i)
53 n = repo.changelog.node(i)
54 l = repo.changelog.linkrev(n)
54 l = repo.changelog.linkrev(n)
55 if l != i:
55 if l != i:
56 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
56 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
57 if n in seen:
57 if n in seen:
58 err(_("duplicate changeset at revision %d") % i)
58 err(_("duplicate changeset at revision %d") % i)
59 seen[n] = 1
59 seen[n] = 1
60
60
61 for p in repo.changelog.parents(n):
61 for p in repo.changelog.parents(n):
62 if p not in repo.changelog.nodemap:
62 if p not in repo.changelog.nodemap:
63 err(_("changeset %s has unknown parent %s") %
63 err(_("changeset %s has unknown parent %s") %
64 (short(n), short(p)))
64 (short(n), short(p)))
65 try:
65 try:
66 changes = repo.changelog.read(n)
66 changes = repo.changelog.read(n)
67 except KeyboardInterrupt:
67 except KeyboardInterrupt:
68 repo.ui.warn(_("interrupted"))
68 repo.ui.warn(_("interrupted"))
69 raise
69 raise
70 except Exception, inst:
70 except Exception, inst:
71 err(_("unpacking changeset %s: %s") % (short(n), inst))
71 err(_("unpacking changeset %s: %s") % (short(n), inst))
72 continue
72 continue
73
73
74 neededmanifests[changes[0]] = n
74 neededmanifests[changes[0]] = n
75
75
76 for f in changes[3]:
76 for f in changes[3]:
77 filelinkrevs.setdefault(f, []).append(i)
77 filelinkrevs.setdefault(f, []).append(i)
78
78
79 seen = {}
79 seen = {}
80 repo.ui.status(_("checking manifests\n"))
80 repo.ui.status(_("checking manifests\n"))
81 checkversion(repo.manifest, "manifest")
81 checkversion(repo.manifest, "manifest")
82 checksize(repo.manifest, "manifest")
82 checksize(repo.manifest, "manifest")
83
83
84 for i in xrange(repo.manifest.count()):
84 for i in xrange(repo.manifest.count()):
85 n = repo.manifest.node(i)
85 n = repo.manifest.node(i)
86 l = repo.manifest.linkrev(n)
86 l = repo.manifest.linkrev(n)
87
87
88 if l < 0 or l >= repo.changelog.count():
88 if l < 0 or l >= repo.changelog.count():
89 err(_("bad manifest link (%d) at revision %d") % (l, i))
89 err(_("bad manifest link (%d) at revision %d") % (l, i))
90
90
91 if n in neededmanifests:
91 if n in neededmanifests:
92 del neededmanifests[n]
92 del neededmanifests[n]
93
93
94 if n in seen:
94 if n in seen:
95 err(_("duplicate manifest at revision %d") % i)
95 err(_("duplicate manifest at revision %d") % i)
96
96
97 seen[n] = 1
97 seen[n] = 1
98
98
99 for p in repo.manifest.parents(n):
99 for p in repo.manifest.parents(n):
100 if p not in repo.manifest.nodemap:
100 if p not in repo.manifest.nodemap:
101 err(_("manifest %s has unknown parent %s") %
101 err(_("manifest %s has unknown parent %s") %
102 (short(n), short(p)))
102 (short(n), short(p)))
103
103
104 try:
104 try:
105 for f, fn in repo.manifest.readdelta(n).iteritems():
105 for f, fn in repo.manifest.readdelta(n).iteritems():
106 filenodes.setdefault(f, {})[fn] = 1
106 filenodes.setdefault(f, {})[fn] = 1
107 except KeyboardInterrupt:
107 except KeyboardInterrupt:
108 repo.ui.warn(_("interrupted"))
108 repo.ui.warn(_("interrupted"))
109 raise
109 raise
110 except Exception, inst:
110 except Exception, inst:
111 err(_("reading delta for manifest %s: %s") % (short(n), inst))
111 err(_("reading delta for manifest %s: %s") % (short(n), inst))
112 continue
112 continue
113
113
114 repo.ui.status(_("crosschecking files in changesets and manifests\n"))
114 repo.ui.status(_("crosschecking files in changesets and manifests\n"))
115
115
116 for m, c in neededmanifests.items():
116 for m, c in neededmanifests.items():
117 err(_("Changeset %s refers to unknown manifest %s") %
117 err(_("Changeset %s refers to unknown manifest %s") %
118 (short(m), short(c)))
118 (short(m), short(c)))
119 del neededmanifests
119 del neededmanifests
120
120
121 for f in filenodes:
121 for f in filenodes:
122 if f not in filelinkrevs:
122 if f not in filelinkrevs:
123 err(_("file %s in manifest but not in changesets") % f)
123 err(_("file %s in manifest but not in changesets") % f)
124
124
125 for f in filelinkrevs:
125 for f in filelinkrevs:
126 if f not in filenodes:
126 if f not in filenodes:
127 err(_("file %s in changeset but not in manifest") % f)
127 err(_("file %s in changeset but not in manifest") % f)
128
128
129 repo.ui.status(_("checking files\n"))
129 repo.ui.status(_("checking files\n"))
130 ff = filenodes.keys()
130 ff = filenodes.keys()
131 ff.sort()
131 ff.sort()
132 for f in ff:
132 for f in ff:
133 if f == "/dev/null":
133 if f == "/dev/null":
134 continue
134 continue
135 files += 1
135 files += 1
136 if not f:
136 if not f:
137 err(_("file without name in manifest %s") % short(n))
137 err(_("file without name in manifest %s") % short(n))
138 continue
138 continue
139 fl = repo.file(f)
139 fl = repo.file(f)
140 checkversion(fl, f)
140 checkversion(fl, f)
141 checksize(fl, f)
141 checksize(fl, f)
142
142
143 nodes = {nullid: 1}
143 nodes = {nullid: 1}
144 seen = {}
144 seen = {}
145 for i in xrange(fl.count()):
145 for i in xrange(fl.count()):
146 revisions += 1
146 revisions += 1
147 n = fl.node(i)
147 n = fl.node(i)
148
148
149 if n in seen:
149 if n in seen:
150 err(_("%s: duplicate revision %d") % (f, i))
150 err(_("%s: duplicate revision %d") % (f, i))
151 if n not in filenodes[f]:
151 if n not in filenodes[f]:
152 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
152 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
153 else:
153 else:
154 del filenodes[f][n]
154 del filenodes[f][n]
155
155
156 flr = fl.linkrev(n)
156 flr = fl.linkrev(n)
157 if flr not in filelinkrevs.get(f, []):
157 if flr not in filelinkrevs.get(f, []):
158 err(_("%s:%s points to unexpected changeset %d")
158 err(_("%s:%s points to unexpected changeset %d")
159 % (f, short(n), flr))
159 % (f, short(n), flr))
160 else:
160 else:
161 filelinkrevs[f].remove(flr)
161 filelinkrevs[f].remove(flr)
162
162
163 # verify contents
163 # verify contents
164 try:
164 try:
165 t = fl.read(n)
165 t = fl.read(n)
166 except KeyboardInterrupt:
166 except KeyboardInterrupt:
167 repo.ui.warn(_("interrupted"))
167 repo.ui.warn(_("interrupted"))
168 raise
168 raise
169 except Exception, inst:
169 except Exception, inst:
170 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
170 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
171
171
172 # verify parents
172 # verify parents
173 (p1, p2) = fl.parents(n)
173 (p1, p2) = fl.parents(n)
174 if p1 not in nodes:
174 if p1 not in nodes:
175 err(_("file %s:%s unknown parent 1 %s") %
175 err(_("file %s:%s unknown parent 1 %s") %
176 (f, short(n), short(p1)))
176 (f, short(n), short(p1)))
177 if p2 not in nodes:
177 if p2 not in nodes:
178 err(_("file %s:%s unknown parent 2 %s") %
178 err(_("file %s:%s unknown parent 2 %s") %
179 (f, short(n), short(p1)))
179 (f, short(n), short(p1)))
180 nodes[n] = 1
180 nodes[n] = 1
181
181
182 # check renames
182 # check renames
183 try:
183 try:
184 rp = fl.renamed(n)
184 rp = fl.renamed(n)
185 if rp:
185 if rp:
186 fl2 = repo.file(rp[0])
186 fl2 = repo.file(rp[0])
187 rev = fl2.rev(rp[1])
187 rev = fl2.rev(rp[1])
188 except KeyboardInterrupt:
188 except KeyboardInterrupt:
189 repo.ui.warn(_("interrupted"))
189 repo.ui.warn(_("interrupted"))
190 raise
190 raise
191 except Exception, inst:
191 except Exception, inst:
192 err(_("checking rename on file %s %s: %s") % (f, short(n), inst))
192 err(_("checking rename on file %s %s: %s") % (f, short(n), inst))
193
193
194 # cross-check
194 # cross-check
195 for node in filenodes[f]:
195 for node in filenodes[f]:
196 err(_("node %s in manifests not in %s") % (hex(node), f))
196 err(_("node %s in manifests not in %s") % (hex(node), f))
197
197
198 repo.ui.status(_("%d files, %d changesets, %d total revisions\n") %
198 repo.ui.status(_("%d files, %d changesets, %d total revisions\n") %
199 (files, changesets, revisions))
199 (files, changesets, revisions))
200
200
201 if warnings[0]:
201 if warnings[0]:
202 repo.ui.warn(_("%d warnings encountered!\n") % warnings[0])
202 repo.ui.warn(_("%d warnings encountered!\n") % warnings[0])
203 if errors[0]:
203 if errors[0]:
204 repo.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
204 repo.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
205 return 1
205 return 1
206
206
General Comments 0
You need to be logged in to leave comments. Login now