Show More
The requested changes are too big and content was truncated. Show full diff
@@ -0,0 +1,25 | |||||
|
1 | #!/usr/bin/env python | |||
|
2 | # Dump revlogs as raw data stream | |||
|
3 | # $ find .hg/store/ -name "*.i" | xargs dumprevlog > repo.dump | |||
|
4 | ||||
|
5 | import sys | |||
|
6 | from mercurial import revlog, node, util | |||
|
7 | ||||
|
8 | for fp in (sys.stdin, sys.stdout, sys.stderr): | |||
|
9 | util.set_binary(fp) | |||
|
10 | ||||
|
11 | for f in sys.argv[1:]: | |||
|
12 | binopen = lambda fn: open(fn, 'rb') | |||
|
13 | r = revlog.revlog(binopen, f) | |||
|
14 | print "file:", f | |||
|
15 | for i in r: | |||
|
16 | n = r.node(i) | |||
|
17 | p = r.parents(n) | |||
|
18 | d = r.revision(n) | |||
|
19 | print "node:", node.hex(n) | |||
|
20 | print "linkrev:", r.linkrev(n) | |||
|
21 | print "parents:", node.hex(p[0]), node.hex(p[1]) | |||
|
22 | print "length:", len(d) | |||
|
23 | print "-start-" | |||
|
24 | print d | |||
|
25 | print "-end-" |
@@ -0,0 +1,37 | |||||
|
1 | #!/usr/bin/env python | |||
|
2 | # Undump a dump from dumprevlog | |||
|
3 | # $ hg init | |||
|
4 | # $ undumprevlog < repo.dump | |||
|
5 | ||||
|
6 | import sys | |||
|
7 | from mercurial import revlog, node, util, transaction | |||
|
8 | ||||
|
9 | for fp in (sys.stdin, sys.stdout, sys.stderr): | |||
|
10 | util.set_binary(fp) | |||
|
11 | ||||
|
12 | opener = util.opener('.', False) | |||
|
13 | tr = transaction.transaction(sys.stderr.write, opener, "undump.journal") | |||
|
14 | while 1: | |||
|
15 | l = sys.stdin.readline() | |||
|
16 | if not l: | |||
|
17 | break | |||
|
18 | if l.startswith("file:"): | |||
|
19 | f = l[6:-1] | |||
|
20 | r = revlog.revlog(opener, f) | |||
|
21 | print f | |||
|
22 | elif l.startswith("node:"): | |||
|
23 | n = node.bin(l[6:-1]) | |||
|
24 | elif l.startswith("linkrev:"): | |||
|
25 | lr = int(l[9:-1]) | |||
|
26 | elif l.startswith("parents:"): | |||
|
27 | p = l[9:-1].split() | |||
|
28 | p1 = node.bin(p[0]) | |||
|
29 | p2 = node.bin(p[1]) | |||
|
30 | elif l.startswith("length:"): | |||
|
31 | length = int(l[8:-1]) | |||
|
32 | sys.stdin.readline() # start marker | |||
|
33 | d = sys.stdin.read(length) | |||
|
34 | sys.stdin.readline() # end marker | |||
|
35 | r.addrevision(d, tr, lr, p1, p2) | |||
|
36 | ||||
|
37 | tr.close() |
@@ -0,0 +1,12 | |||||
|
1 | @echo off | |||
|
2 | rem Windows Driver script for Mercurial | |||
|
3 | ||||
|
4 | setlocal | |||
|
5 | set HG=%~f0 | |||
|
6 | ||||
|
7 | rem Use a full path to Python (relative to this script) as the standard Python | |||
|
8 | rem install does not put python.exe on the PATH... | |||
|
9 | rem %~dp0 is the directory of this script | |||
|
10 | ||||
|
11 | %~dp0..\python "%~dp0hg" %* | |||
|
12 | endlocal |
@@ -0,0 +1,154 | |||||
|
1 | #!/usr/bin/env python | |||
|
2 | # | |||
|
3 | # Commandline front-end for cvsps.py | |||
|
4 | # | |||
|
5 | # Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk> | |||
|
6 | # | |||
|
7 | # This software may be used and distributed according to the terms | |||
|
8 | # of the GNU General Public License, incorporated herein by reference. | |||
|
9 | ||||
|
10 | import sys | |||
|
11 | from mercurial import util | |||
|
12 | from mercurial.i18n import _ | |||
|
13 | from optparse import OptionParser, SUPPRESS_HELP | |||
|
14 | from hgext.convert.cvsps import createlog, createchangeset, logerror | |||
|
15 | ||||
|
16 | def main(): | |||
|
17 | '''Main program to mimic cvsps.''' | |||
|
18 | ||||
|
19 | op = OptionParser(usage='%prog [-bpruvxz] path', | |||
|
20 | description='Read CVS rlog for current directory or named ' | |||
|
21 | 'path in repository, and convert the log to changesets ' | |||
|
22 | 'based on matching commit log entries and dates.') | |||
|
23 | ||||
|
24 | # Options that are ignored for compatibility with cvsps-2.1 | |||
|
25 | op.add_option('-A', dest='Ignore', action='store_true', help=SUPPRESS_HELP) | |||
|
26 | op.add_option('--cvs-direct', dest='Ignore', action='store_true', help=SUPPRESS_HELP) | |||
|
27 | op.add_option('-q', dest='Ignore', action='store_true', help=SUPPRESS_HELP) | |||
|
28 | ||||
|
29 | # Main options shared with cvsps-2.1 | |||
|
30 | op.add_option('-b', dest='Branches', action='append', default=[], | |||
|
31 | help='Only return changes on specified branches') | |||
|
32 | op.add_option('-p', dest='Prefix', action='store', default='', | |||
|
33 | help='Prefix to remove from file names') | |||
|
34 | op.add_option('-r', dest='Revisions', action='append', default=[], | |||
|
35 | help='Only return changes after or between specified tags') | |||
|
36 | op.add_option('-u', dest='Cache', action='store_const', const='update', | |||
|
37 | help="Update cvs log cache") | |||
|
38 | op.add_option('-v', dest='Verbose', action='count', default=0, | |||
|
39 | help='Be verbose') | |||
|
40 | op.add_option('-x', dest='Cache', action='store_const', const='write', | |||
|
41 | help="Create new cvs log cache") | |||
|
42 | op.add_option('-z', dest='Fuzz', action='store', type='int', default=60, | |||
|
43 | help='Set commit time fuzz', metavar='seconds') | |||
|
44 | op.add_option('--root', dest='Root', action='store', default='', | |||
|
45 | help='Specify cvsroot', metavar='cvsroot') | |||
|
46 | ||||
|
47 | # Options specific to this version | |||
|
48 | op.add_option('--parents', dest='Parents', action='store_true', | |||
|
49 | help='Show parent changesets') | |||
|
50 | op.add_option('--ancestors', dest='Ancestors', action='store_true', | |||
|
51 | help='Show current changeset in ancestor branches') | |||
|
52 | ||||
|
53 | options, args = op.parse_args() | |||
|
54 | ||||
|
55 | # Create a ui object for printing progress messages | |||
|
56 | class UI: | |||
|
57 | def __init__(self, verbose): | |||
|
58 | if verbose: | |||
|
59 | self.status = self.message | |||
|
60 | if verbose>1: | |||
|
61 | self.note = self.message | |||
|
62 | if verbose>2: | |||
|
63 | self.debug = self.message | |||
|
64 | def message(self, msg): | |||
|
65 | sys.stderr.write(msg) | |||
|
66 | def nomessage(self, msg): | |||
|
67 | pass | |||
|
68 | status = nomessage | |||
|
69 | note = nomessage | |||
|
70 | debug = nomessage | |||
|
71 | ui = UI(options.Verbose) | |||
|
72 | ||||
|
73 | try: | |||
|
74 | if args: | |||
|
75 | log = [] | |||
|
76 | for d in args: | |||
|
77 | log += createlog(ui, d, root=options.Root, cache=options.Cache) | |||
|
78 | else: | |||
|
79 | log = createlog(ui, root=options.Root, cache=options.Cache) | |||
|
80 | except logerror, e: | |||
|
81 | print e | |||
|
82 | return | |||
|
83 | ||||
|
84 | changesets = createchangeset(ui, log, options.Fuzz) | |||
|
85 | del log | |||
|
86 | ||||
|
87 | # Print changesets (optionally filtered) | |||
|
88 | ||||
|
89 | off = len(options.Revisions) | |||
|
90 | branches = {} # latest version number in each branch | |||
|
91 | ancestors = {} # parent branch | |||
|
92 | for cs in changesets: | |||
|
93 | ||||
|
94 | if options.Ancestors: | |||
|
95 | if cs.branch not in branches and cs.parents and cs.parents[0].id: | |||
|
96 | ancestors[cs.branch] = changesets[cs.parents[0].id-1].branch, cs.parents[0].id | |||
|
97 | branches[cs.branch] = cs.id | |||
|
98 | ||||
|
99 | # limit by branches | |||
|
100 | if options.Branches and (cs.branch or 'HEAD') not in options.Branches: | |||
|
101 | continue | |||
|
102 | ||||
|
103 | if not off: | |||
|
104 | # Note: trailing spaces on several lines here are needed to have | |||
|
105 | # bug-for-bug compatibility with cvsps. | |||
|
106 | print '---------------------' | |||
|
107 | print 'PatchSet %d ' % cs.id | |||
|
108 | print 'Date: %s' % util.datestr(cs.date, '%Y/%m/%d %H:%M:%S %1%2') | |||
|
109 | print 'Author: %s' % cs.author | |||
|
110 | print 'Branch: %s' % (cs.branch or 'HEAD') | |||
|
111 | print 'Tag%s: %s ' % (['', 's'][len(cs.tags)>1], | |||
|
112 | ','.join(cs.tags) or '(none)') | |||
|
113 | if options.Parents and cs.parents: | |||
|
114 | if len(cs.parents)>1: | |||
|
115 | print 'Parents: %s' % (','.join([str(p.id) for p in cs.parents])) | |||
|
116 | else: | |||
|
117 | print 'Parent: %d' % cs.parents[0].id | |||
|
118 | ||||
|
119 | if options.Ancestors: | |||
|
120 | b = cs.branch | |||
|
121 | r = [] | |||
|
122 | while b: | |||
|
123 | b, c = ancestors[b] | |||
|
124 | r.append('%s:%d:%d' % (b or "HEAD", c, branches[b])) | |||
|
125 | if r: | |||
|
126 | print 'Ancestors: %s' % (','.join(r)) | |||
|
127 | ||||
|
128 | print 'Log:' | |||
|
129 | print cs.comment | |||
|
130 | ||||
|
131 | print 'Members: ' | |||
|
132 | for f in cs.entries: | |||
|
133 | fn = f.file | |||
|
134 | if fn.startswith(options.Prefix): | |||
|
135 | fn = fn[len(options.Prefix):] | |||
|
136 | print '\t%s:%s->%s%s ' % (fn, '.'.join([str(x) for x in f.parent]) or 'INITIAL', | |||
|
137 | '.'.join([str(x) for x in f.revision]), ['', '(DEAD)'][f.dead]) | |||
|
138 | ||||
|
139 | ||||
|
140 | # have we seen the start tag? | |||
|
141 | if options.Revisions and off: | |||
|
142 | if options.Revisions[0] == str(cs.id) or \ | |||
|
143 | options.Revisions[0] in cs.tags: | |||
|
144 | off = False | |||
|
145 | ||||
|
146 | # see if we reached the end tag | |||
|
147 | if len(options.Revisions)>1 and not off: | |||
|
148 | if options.Revisions[1] == str(cs.id) or \ | |||
|
149 | options.Revisions[1] in cs.tags: | |||
|
150 | break | |||
|
151 | ||||
|
152 | ||||
|
153 | if __name__ == '__main__': | |||
|
154 | main() |
This diff has been collapsed as it changes many lines, (548 lines changed) Show them Hide them | |||||
@@ -0,0 +1,548 | |||||
|
1 | # | |||
|
2 | # Mercurial built-in replacement for cvsps. | |||
|
3 | # | |||
|
4 | # Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk> | |||
|
5 | # | |||
|
6 | # This software may be used and distributed according to the terms | |||
|
7 | # of the GNU General Public License, incorporated herein by reference. | |||
|
8 | ||||
|
9 | import os | |||
|
10 | import re | |||
|
11 | import sys | |||
|
12 | import cPickle as pickle | |||
|
13 | from mercurial import util | |||
|
14 | from mercurial.i18n import _ | |||
|
15 | ||||
|
16 | def listsort(list, key): | |||
|
17 | "helper to sort by key in Python 2.3" | |||
|
18 | try: | |||
|
19 | list.sort(key=key) | |||
|
20 | except TypeError: | |||
|
21 | list.sort(lambda l, r: cmp(key(l), key(r))) | |||
|
22 | ||||
|
23 | class logentry(object): | |||
|
24 | '''Class logentry has the following attributes: | |||
|
25 | .author - author name as CVS knows it | |||
|
26 | .branch - name of branch this revision is on | |||
|
27 | .branches - revision tuple of branches starting at this revision | |||
|
28 | .comment - commit message | |||
|
29 | .date - the commit date as a (time, tz) tuple | |||
|
30 | .dead - true if file revision is dead | |||
|
31 | .file - Name of file | |||
|
32 | .lines - a tuple (+lines, -lines) or None | |||
|
33 | .parent - Previous revision of this entry | |||
|
34 | .rcs - name of file as returned from CVS | |||
|
35 | .revision - revision number as tuple | |||
|
36 | .tags - list of tags on the file | |||
|
37 | ''' | |||
|
38 | def __init__(self, **entries): | |||
|
39 | self.__dict__.update(entries) | |||
|
40 | ||||
|
41 | class logerror(Exception): | |||
|
42 | pass | |||
|
43 | ||||
|
44 | def createlog(ui, directory=None, root="", rlog=True, cache=None): | |||
|
45 | '''Collect the CVS rlog''' | |||
|
46 | ||||
|
47 | # Because we store many duplicate commit log messages, reusing strings | |||
|
48 | # saves a lot of memory and pickle storage space. | |||
|
49 | _scache = {} | |||
|
50 | def scache(s): | |||
|
51 | "return a shared version of a string" | |||
|
52 | return _scache.setdefault(s, s) | |||
|
53 | ||||
|
54 | ui.status(_('collecting CVS rlog\n')) | |||
|
55 | ||||
|
56 | log = [] # list of logentry objects containing the CVS state | |||
|
57 | ||||
|
58 | # patterns to match in CVS (r)log output, by state of use | |||
|
59 | re_00 = re.compile('RCS file: (.+)$') | |||
|
60 | re_01 = re.compile('cvs \\[r?log aborted\\]: (.+)$') | |||
|
61 | re_02 = re.compile('cvs (r?log|server): (.+)\n$') | |||
|
62 | re_03 = re.compile("(Cannot access.+CVSROOT)|(can't create temporary directory.+)$") | |||
|
63 | re_10 = re.compile('Working file: (.+)$') | |||
|
64 | re_20 = re.compile('symbolic names:') | |||
|
65 | re_30 = re.compile('\t(.+): ([\\d.]+)$') | |||
|
66 | re_31 = re.compile('----------------------------$') | |||
|
67 | re_32 = re.compile('=============================================================================$') | |||
|
68 | re_50 = re.compile('revision ([\\d.]+)(\s+locked by:\s+.+;)?$') | |||
|
69 | re_60 = re.compile(r'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?') | |||
|
70 | re_70 = re.compile('branches: (.+);$') | |||
|
71 | ||||
|
72 | prefix = '' # leading path to strip of what we get from CVS | |||
|
73 | ||||
|
74 | if directory is None: | |||
|
75 | # Current working directory | |||
|
76 | ||||
|
77 | # Get the real directory in the repository | |||
|
78 | try: | |||
|
79 | prefix = file(os.path.join('CVS','Repository')).read().strip() | |||
|
80 | if prefix == ".": | |||
|
81 | prefix = "" | |||
|
82 | directory = prefix | |||
|
83 | except IOError: | |||
|
84 | raise logerror('Not a CVS sandbox') | |||
|
85 | ||||
|
86 | if prefix and not prefix.endswith('/'): | |||
|
87 | prefix += '/' | |||
|
88 | ||||
|
89 | # Use the Root file in the sandbox, if it exists | |||
|
90 | try: | |||
|
91 | root = file(os.path.join('CVS','Root')).read().strip() | |||
|
92 | except IOError: | |||
|
93 | pass | |||
|
94 | ||||
|
95 | if not root: | |||
|
96 | root = os.environ.get('CVSROOT', '') | |||
|
97 | ||||
|
98 | # read log cache if one exists | |||
|
99 | oldlog = [] | |||
|
100 | date = None | |||
|
101 | ||||
|
102 | if cache: | |||
|
103 | cachedir = os.path.expanduser('~/.hg.cvsps') | |||
|
104 | if not os.path.exists(cachedir): | |||
|
105 | os.mkdir(cachedir) | |||
|
106 | ||||
|
107 | # The cvsps cache pickle needs a uniquified name, based on the | |||
|
108 | # repository location. The address may have all sort of nasties | |||
|
109 | # in it, slashes, colons and such. So here we take just the | |||
|
110 | # alphanumerics, concatenated in a way that does not mix up the | |||
|
111 | # various components, so that | |||
|
112 | # :pserver:user@server:/path | |||
|
113 | # and | |||
|
114 | # /pserver/user/server/path | |||
|
115 | # are mapped to different cache file names. | |||
|
116 | cachefile = root.split(":") + [directory, "cache"] | |||
|
117 | cachefile = ['-'.join(re.findall(r'\w+', s)) for s in cachefile if s] | |||
|
118 | cachefile = os.path.join(cachedir, | |||
|
119 | '.'.join([s for s in cachefile if s])) | |||
|
120 | ||||
|
121 | if cache == 'update': | |||
|
122 | try: | |||
|
123 | ui.note(_('reading cvs log cache %s\n') % cachefile) | |||
|
124 | oldlog = pickle.load(file(cachefile)) | |||
|
125 | ui.note(_('cache has %d log entries\n') % len(oldlog)) | |||
|
126 | except Exception, e: | |||
|
127 | ui.note(_('error reading cache: %r\n') % e) | |||
|
128 | ||||
|
129 | if oldlog: | |||
|
130 | date = oldlog[-1].date # last commit date as a (time,tz) tuple | |||
|
131 | date = util.datestr(date, '%Y/%m/%d %H:%M:%S %1%2') | |||
|
132 | ||||
|
133 | # build the CVS commandline | |||
|
134 | cmd = ['cvs', '-q'] | |||
|
135 | if root: | |||
|
136 | cmd.append('-d%s' % root) | |||
|
137 | p = root.split(':')[-1] | |||
|
138 | if not p.endswith('/'): | |||
|
139 | p += '/' | |||
|
140 | prefix = p + prefix | |||
|
141 | cmd.append(['log', 'rlog'][rlog]) | |||
|
142 | if date: | |||
|
143 | # no space between option and date string | |||
|
144 | cmd.append('-d>%s' % date) | |||
|
145 | cmd.append(directory) | |||
|
146 | ||||
|
147 | # state machine begins here | |||
|
148 | tags = {} # dictionary of revisions on current file with their tags | |||
|
149 | state = 0 | |||
|
150 | store = False # set when a new record can be appended | |||
|
151 | ||||
|
152 | cmd = [util.shellquote(arg) for arg in cmd] | |||
|
153 | ui.note("running %s\n" % (' '.join(cmd))) | |||
|
154 | ui.debug("prefix=%r directory=%r root=%r\n" % (prefix, directory, root)) | |||
|
155 | ||||
|
156 | for line in util.popen(' '.join(cmd)): | |||
|
157 | if line.endswith('\n'): | |||
|
158 | line = line[:-1] | |||
|
159 | #ui.debug('state=%d line=%r\n' % (state, line)) | |||
|
160 | ||||
|
161 | if state == 0: | |||
|
162 | # initial state, consume input until we see 'RCS file' | |||
|
163 | match = re_00.match(line) | |||
|
164 | if match: | |||
|
165 | rcs = match.group(1) | |||
|
166 | tags = {} | |||
|
167 | if rlog: | |||
|
168 | filename = rcs[:-2] | |||
|
169 | if filename.startswith(prefix): | |||
|
170 | filename = filename[len(prefix):] | |||
|
171 | if filename.startswith('/'): | |||
|
172 | filename = filename[1:] | |||
|
173 | if filename.startswith('Attic/'): | |||
|
174 | filename = filename[6:] | |||
|
175 | else: | |||
|
176 | filename = filename.replace('/Attic/', '/') | |||
|
177 | state = 2 | |||
|
178 | continue | |||
|
179 | state = 1 | |||
|
180 | continue | |||
|
181 | match = re_01.match(line) | |||
|
182 | if match: | |||
|
183 | raise Exception(match.group(1)) | |||
|
184 | match = re_02.match(line) | |||
|
185 | if match: | |||
|
186 | raise Exception(match.group(2)) | |||
|
187 | if re_03.match(line): | |||
|
188 | raise Exception(line) | |||
|
189 | ||||
|
190 | elif state == 1: | |||
|
191 | # expect 'Working file' (only when using log instead of rlog) | |||
|
192 | match = re_10.match(line) | |||
|
193 | assert match, _('RCS file must be followed by working file') | |||
|
194 | filename = match.group(1) | |||
|
195 | state = 2 | |||
|
196 | ||||
|
197 | elif state == 2: | |||
|
198 | # expect 'symbolic names' | |||
|
199 | if re_20.match(line): | |||
|
200 | state = 3 | |||
|
201 | ||||
|
202 | elif state == 3: | |||
|
203 | # read the symbolic names and store as tags | |||
|
204 | match = re_30.match(line) | |||
|
205 | if match: | |||
|
206 | rev = [int(x) for x in match.group(2).split('.')] | |||
|
207 | ||||
|
208 | # Convert magic branch number to an odd-numbered one | |||
|
209 | revn = len(rev) | |||
|
210 | if revn > 3 and (revn % 2) == 0 and rev[-2] == 0: | |||
|
211 | rev = rev[:-2] + rev[-1:] | |||
|
212 | rev = tuple(rev) | |||
|
213 | ||||
|
214 | if rev not in tags: | |||
|
215 | tags[rev] = [] | |||
|
216 | tags[rev].append(match.group(1)) | |||
|
217 | ||||
|
218 | elif re_31.match(line): | |||
|
219 | state = 5 | |||
|
220 | elif re_32.match(line): | |||
|
221 | state = 0 | |||
|
222 | ||||
|
223 | elif state == 4: | |||
|
224 | # expecting '------' separator before first revision | |||
|
225 | if re_31.match(line): | |||
|
226 | state = 5 | |||
|
227 | else: | |||
|
228 | assert not re_32.match(line), _('Must have at least some revisions') | |||
|
229 | ||||
|
230 | elif state == 5: | |||
|
231 | # expecting revision number and possibly (ignored) lock indication | |||
|
232 | # we create the logentry here from values stored in states 0 to 4, | |||
|
233 | # as this state is re-entered for subsequent revisions of a file. | |||
|
234 | match = re_50.match(line) | |||
|
235 | assert match, _('expected revision number') | |||
|
236 | e = logentry(rcs=scache(rcs), file=scache(filename), | |||
|
237 | revision=tuple([int(x) for x in match.group(1).split('.')]), | |||
|
238 | branches=[], parent=None) | |||
|
239 | state = 6 | |||
|
240 | ||||
|
241 | elif state == 6: | |||
|
242 | # expecting date, author, state, lines changed | |||
|
243 | match = re_60.match(line) | |||
|
244 | assert match, _('revision must be followed by date line') | |||
|
245 | d = match.group(1) | |||
|
246 | if d[2] == '/': | |||
|
247 | # Y2K | |||
|
248 | d = '19' + d | |||
|
249 | ||||
|
250 | if len(d.split()) != 3: | |||
|
251 | # cvs log dates always in GMT | |||
|
252 | d = d + ' UTC' | |||
|
253 | e.date = util.parsedate(d, ['%y/%m/%d %H:%M:%S', '%Y/%m/%d %H:%M:%S', '%Y-%m-%d %H:%M:%S']) | |||
|
254 | e.author = scache(match.group(2)) | |||
|
255 | e.dead = match.group(3).lower() == 'dead' | |||
|
256 | ||||
|
257 | if match.group(5): | |||
|
258 | if match.group(6): | |||
|
259 | e.lines = (int(match.group(5)), int(match.group(6))) | |||
|
260 | else: | |||
|
261 | e.lines = (int(match.group(5)), 0) | |||
|
262 | elif match.group(6): | |||
|
263 | e.lines = (0, int(match.group(6))) | |||
|
264 | else: | |||
|
265 | e.lines = None | |||
|
266 | e.comment = [] | |||
|
267 | state = 7 | |||
|
268 | ||||
|
269 | elif state == 7: | |||
|
270 | # read the revision numbers of branches that start at this revision | |||
|
271 | # or store the commit log message otherwise | |||
|
272 | m = re_70.match(line) | |||
|
273 | if m: | |||
|
274 | e.branches = [tuple([int(y) for y in x.strip().split('.')]) | |||
|
275 | for x in m.group(1).split(';')] | |||
|
276 | state = 8 | |||
|
277 | elif re_31.match(line): | |||
|
278 | state = 5 | |||
|
279 | store = True | |||
|
280 | elif re_32.match(line): | |||
|
281 | state = 0 | |||
|
282 | store = True | |||
|
283 | else: | |||
|
284 | e.comment.append(line) | |||
|
285 | ||||
|
286 | elif state == 8: | |||
|
287 | # store commit log message | |||
|
288 | if re_31.match(line): | |||
|
289 | state = 5 | |||
|
290 | store = True | |||
|
291 | elif re_32.match(line): | |||
|
292 | state = 0 | |||
|
293 | store = True | |||
|
294 | else: | |||
|
295 | e.comment.append(line) | |||
|
296 | ||||
|
297 | if store: | |||
|
298 | # clean up the results and save in the log. | |||
|
299 | store = False | |||
|
300 | e.tags = util.sort([scache(x) for x in tags.get(e.revision, [])]) | |||
|
301 | e.comment = scache('\n'.join(e.comment)) | |||
|
302 | ||||
|
303 | revn = len(e.revision) | |||
|
304 | if revn > 3 and (revn % 2) == 0: | |||
|
305 | e.branch = tags.get(e.revision[:-1], [None])[0] | |||
|
306 | else: | |||
|
307 | e.branch = None | |||
|
308 | ||||
|
309 | log.append(e) | |||
|
310 | ||||
|
311 | if len(log) % 100 == 0: | |||
|
312 | ui.status(util.ellipsis('%d %s' % (len(log), e.file), 80)+'\n') | |||
|
313 | ||||
|
314 | listsort(log, key=lambda x:(x.rcs, x.revision)) | |||
|
315 | ||||
|
316 | # find parent revisions of individual files | |||
|
317 | versions = {} | |||
|
318 | for e in log: | |||
|
319 | branch = e.revision[:-1] | |||
|
320 | p = versions.get((e.rcs, branch), None) | |||
|
321 | if p is None: | |||
|
322 | p = e.revision[:-2] | |||
|
323 | e.parent = p | |||
|
324 | versions[(e.rcs, branch)] = e.revision | |||
|
325 | ||||
|
326 | # update the log cache | |||
|
327 | if cache: | |||
|
328 | if log: | |||
|
329 | # join up the old and new logs | |||
|
330 | listsort(log, key=lambda x:x.date) | |||
|
331 | ||||
|
332 | if oldlog and oldlog[-1].date >= log[0].date: | |||
|
333 | raise logerror('Log cache overlaps with new log entries,' | |||
|
334 | ' re-run without cache.') | |||
|
335 | ||||
|
336 | log = oldlog + log | |||
|
337 | ||||
|
338 | # write the new cachefile | |||
|
339 | ui.note(_('writing cvs log cache %s\n') % cachefile) | |||
|
340 | pickle.dump(log, file(cachefile, 'w')) | |||
|
341 | else: | |||
|
342 | log = oldlog | |||
|
343 | ||||
|
344 | ui.status(_('%d log entries\n') % len(log)) | |||
|
345 | ||||
|
346 | return log | |||
|
347 | ||||
|
348 | ||||
|
349 | class changeset(object): | |||
|
350 | '''Class changeset has the following attributes: | |||
|
351 | .author - author name as CVS knows it | |||
|
352 | .branch - name of branch this changeset is on, or None | |||
|
353 | .comment - commit message | |||
|
354 | .date - the commit date as a (time,tz) tuple | |||
|
355 | .entries - list of logentry objects in this changeset | |||
|
356 | .parents - list of one or two parent changesets | |||
|
357 | .tags - list of tags on this changeset | |||
|
358 | ''' | |||
|
359 | def __init__(self, **entries): | |||
|
360 | self.__dict__.update(entries) | |||
|
361 | ||||
|
362 | def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None): | |||
|
363 | '''Convert log into changesets.''' | |||
|
364 | ||||
|
365 | ui.status(_('creating changesets\n')) | |||
|
366 | ||||
|
367 | # Merge changesets | |||
|
368 | ||||
|
369 | listsort(log, key=lambda x:(x.comment, x.author, x.branch, x.date)) | |||
|
370 | ||||
|
371 | changesets = [] | |||
|
372 | files = {} | |||
|
373 | c = None | |||
|
374 | for i, e in enumerate(log): | |||
|
375 | ||||
|
376 | # Check if log entry belongs to the current changeset or not. | |||
|
377 | if not (c and | |||
|
378 | e.comment == c.comment and | |||
|
379 | e.author == c.author and | |||
|
380 | e.branch == c.branch and | |||
|
381 | ((c.date[0] + c.date[1]) <= | |||
|
382 | (e.date[0] + e.date[1]) <= | |||
|
383 | (c.date[0] + c.date[1]) + fuzz) and | |||
|
384 | e.file not in files): | |||
|
385 | c = changeset(comment=e.comment, author=e.author, | |||
|
386 | branch=e.branch, date=e.date, entries=[]) | |||
|
387 | changesets.append(c) | |||
|
388 | files = {} | |||
|
389 | if len(changesets) % 100 == 0: | |||
|
390 | t = '%d %s' % (len(changesets), repr(e.comment)[1:-1]) | |||
|
391 | ui.status(util.ellipsis(t, 80) + '\n') | |||
|
392 | ||||
|
393 | c.entries.append(e) | |||
|
394 | files[e.file] = True | |||
|
395 | c.date = e.date # changeset date is date of latest commit in it | |||
|
396 | ||||
|
397 | # Sort files in each changeset | |||
|
398 | ||||
|
399 | for c in changesets: | |||
|
400 | def pathcompare(l, r): | |||
|
401 | 'Mimic cvsps sorting order' | |||
|
402 | l = l.split('/') | |||
|
403 | r = r.split('/') | |||
|
404 | nl = len(l) | |||
|
405 | nr = len(r) | |||
|
406 | n = min(nl, nr) | |||
|
407 | for i in range(n): | |||
|
408 | if i + 1 == nl and nl < nr: | |||
|
409 | return -1 | |||
|
410 | elif i + 1 == nr and nl > nr: | |||
|
411 | return +1 | |||
|
412 | elif l[i] < r[i]: | |||
|
413 | return -1 | |||
|
414 | elif l[i] > r[i]: | |||
|
415 | return +1 | |||
|
416 | return 0 | |||
|
417 | def entitycompare(l, r): | |||
|
418 | return pathcompare(l.file, r.file) | |||
|
419 | ||||
|
420 | c.entries.sort(entitycompare) | |||
|
421 | ||||
|
422 | # Sort changesets by date | |||
|
423 | ||||
|
424 | def cscmp(l, r): | |||
|
425 | d = sum(l.date) - sum(r.date) | |||
|
426 | if d: | |||
|
427 | return d | |||
|
428 | ||||
|
429 | # detect vendor branches and initial commits on a branch | |||
|
430 | le = {} | |||
|
431 | for e in l.entries: | |||
|
432 | le[e.rcs] = e.revision | |||
|
433 | re = {} | |||
|
434 | for e in r.entries: | |||
|
435 | re[e.rcs] = e.revision | |||
|
436 | ||||
|
437 | d = 0 | |||
|
438 | for e in l.entries: | |||
|
439 | if re.get(e.rcs, None) == e.parent: | |||
|
440 | assert not d | |||
|
441 | d = 1 | |||
|
442 | break | |||
|
443 | ||||
|
444 | for e in r.entries: | |||
|
445 | if le.get(e.rcs, None) == e.parent: | |||
|
446 | assert not d | |||
|
447 | d = -1 | |||
|
448 | break | |||
|
449 | ||||
|
450 | return d | |||
|
451 | ||||
|
452 | changesets.sort(cscmp) | |||
|
453 | ||||
|
454 | # Collect tags | |||
|
455 | ||||
|
456 | globaltags = {} | |||
|
457 | for c in changesets: | |||
|
458 | tags = {} | |||
|
459 | for e in c.entries: | |||
|
460 | for tag in e.tags: | |||
|
461 | # remember which is the latest changeset to have this tag | |||
|
462 | globaltags[tag] = c | |||
|
463 | ||||
|
464 | for c in changesets: | |||
|
465 | tags = {} | |||
|
466 | for e in c.entries: | |||
|
467 | for tag in e.tags: | |||
|
468 | tags[tag] = True | |||
|
469 | # remember tags only if this is the latest changeset to have it | |||
|
470 | c.tags = util.sort([tag for tag in tags if globaltags[tag] is c]) | |||
|
471 | ||||
|
472 | # Find parent changesets, handle {{mergetobranch BRANCHNAME}} | |||
|
473 | # by inserting dummy changesets with two parents, and handle | |||
|
474 | # {{mergefrombranch BRANCHNAME}} by setting two parents. | |||
|
475 | ||||
|
476 | if mergeto is None: | |||
|
477 | mergeto = r'{{mergetobranch ([-\w]+)}}' | |||
|
478 | if mergeto: | |||
|
479 | mergeto = re.compile(mergeto) | |||
|
480 | ||||
|
481 | if mergefrom is None: | |||
|
482 | mergefrom = r'{{mergefrombranch ([-\w]+)}}' | |||
|
483 | if mergefrom: | |||
|
484 | mergefrom = re.compile(mergefrom) | |||
|
485 | ||||
|
486 | versions = {} # changeset index where we saw any particular file version | |||
|
487 | branches = {} # changeset index where we saw a branch | |||
|
488 | n = len(changesets) | |||
|
489 | i = 0 | |||
|
490 | while i<n: | |||
|
491 | c = changesets[i] | |||
|
492 | ||||
|
493 | for f in c.entries: | |||
|
494 | versions[(f.rcs, f.revision)] = i | |||
|
495 | ||||
|
496 | p = None | |||
|
497 | if c.branch in branches: | |||
|
498 | p = branches[c.branch] | |||
|
499 | else: | |||
|
500 | for f in c.entries: | |||
|
501 | p = max(p, versions.get((f.rcs, f.parent), None)) | |||
|
502 | ||||
|
503 | c.parents = [] | |||
|
504 | if p is not None: | |||
|
505 | c.parents.append(changesets[p]) | |||
|
506 | ||||
|
507 | if mergefrom: | |||
|
508 | m = mergefrom.search(c.comment) | |||
|
509 | if m: | |||
|
510 | m = m.group(1) | |||
|
511 | if m == 'HEAD': | |||
|
512 | m = None | |||
|
513 | if m in branches and c.branch != m: | |||
|
514 | c.parents.append(changesets[branches[m]]) | |||
|
515 | ||||
|
516 | if mergeto: | |||
|
517 | m = mergeto.search(c.comment) | |||
|
518 | if m: | |||
|
519 | try: | |||
|
520 | m = m.group(1) | |||
|
521 | if m == 'HEAD': | |||
|
522 | m = None | |||
|
523 | except: | |||
|
524 | m = None # if no group found then merge to HEAD | |||
|
525 | if m in branches and c.branch != m: | |||
|
526 | # insert empty changeset for merge | |||
|
527 | cc = changeset(author=c.author, branch=m, date=c.date, | |||
|
528 | comment='convert-repo: CVS merge from branch %s' % c.branch, | |||
|
529 | entries=[], tags=[], parents=[changesets[branches[m]], c]) | |||
|
530 | changesets.insert(i + 1, cc) | |||
|
531 | branches[m] = i + 1 | |||
|
532 | ||||
|
533 | # adjust our loop counters now we have inserted a new entry | |||
|
534 | n += 1 | |||
|
535 | i += 2 | |||
|
536 | continue | |||
|
537 | ||||
|
538 | branches[c.branch] = i | |||
|
539 | i += 1 | |||
|
540 | ||||
|
541 | # Number changesets | |||
|
542 | ||||
|
543 | for i, c in enumerate(changesets): | |||
|
544 | c.id = i + 1 | |||
|
545 | ||||
|
546 | ui.status(_('%d changeset entries\n') % len(changesets)) | |||
|
547 | ||||
|
548 | return changesets |
@@ -0,0 +1,74 | |||||
|
1 | # Revision graph generator for Mercurial | |||
|
2 | # | |||
|
3 | # Copyright 2008 Dirkjan Ochtman <dirkjan@ochtman.nl> | |||
|
4 | # Copyright 2007 Joel Rosdahl <joel@rosdahl.net> | |||
|
5 | # | |||
|
6 | # This software may be used and distributed according to the terms of | |||
|
7 | # the GNU General Public License, incorporated herein by reference. | |||
|
8 | ||||
|
9 | from node import nullrev, short | |||
|
10 | import ui, hg, util, templatefilters | |||
|
11 | ||||
|
12 | def graph(repo, start_rev, stop_rev): | |||
|
13 | """incremental revision grapher | |||
|
14 | ||||
|
15 | This generator function walks through the revision history from | |||
|
16 | revision start_rev to revision stop_rev (which must be less than | |||
|
17 | or equal to start_rev) and for each revision emits tuples with the | |||
|
18 | following elements: | |||
|
19 | ||||
|
20 | - Current node | |||
|
21 | - Column and color for the current node | |||
|
22 | - Edges; a list of (col, next_col, color) indicating the edges between | |||
|
23 | the current node and its parents. | |||
|
24 | - First line of the changeset description | |||
|
25 | - The changeset author | |||
|
26 | - The changeset date/time | |||
|
27 | """ | |||
|
28 | ||||
|
29 | assert start_rev >= stop_rev | |||
|
30 | curr_rev = start_rev | |||
|
31 | revs = [] | |||
|
32 | cl = repo.changelog | |||
|
33 | colors = {} | |||
|
34 | new_color = 1 | |||
|
35 | ||||
|
36 | while curr_rev >= stop_rev: | |||
|
37 | node = cl.node(curr_rev) | |||
|
38 | ||||
|
39 | # Compute revs and next_revs | |||
|
40 | if curr_rev not in revs: | |||
|
41 | revs.append(curr_rev) # new head | |||
|
42 | colors[curr_rev] = new_color | |||
|
43 | new_color += 1 | |||
|
44 | ||||
|
45 | idx = revs.index(curr_rev) | |||
|
46 | color = colors.pop(curr_rev) | |||
|
47 | next = revs[:] | |||
|
48 | ||||
|
49 | # Add parents to next_revs | |||
|
50 | parents = [x for x in cl.parentrevs(curr_rev) if x != nullrev] | |||
|
51 | addparents = [p for p in parents if p not in next] | |||
|
52 | next[idx:idx + 1] = addparents | |||
|
53 | ||||
|
54 | # Set colors for the parents | |||
|
55 | for i, p in enumerate(addparents): | |||
|
56 | if not i: | |||
|
57 | colors[p] = color | |||
|
58 | else: | |||
|
59 | colors[p] = new_color | |||
|
60 | new_color += 1 | |||
|
61 | ||||
|
62 | # Add edges to the graph | |||
|
63 | edges = [] | |||
|
64 | for col, r in enumerate(revs): | |||
|
65 | if r in next: | |||
|
66 | edges.append((col, next.index(r), colors[r])) | |||
|
67 | elif r == curr_rev: | |||
|
68 | for p in parents: | |||
|
69 | edges.append((col, next.index(p), colors[p])) | |||
|
70 | ||||
|
71 | # Yield and move on | |||
|
72 | yield (repo[curr_rev], (idx, color), edges) | |||
|
73 | revs = next | |||
|
74 | curr_rev -= 1 |
@@ -0,0 +1,140 | |||||
|
1 | # hgweb/webutil.py - utility library for the web interface. | |||
|
2 | # | |||
|
3 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> | |||
|
4 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> | |||
|
5 | # | |||
|
6 | # This software may be used and distributed according to the terms | |||
|
7 | # of the GNU General Public License, incorporated herein by reference. | |||
|
8 | ||||
|
9 | import os | |||
|
10 | from mercurial.node import hex, nullid | |||
|
11 | from mercurial.repo import RepoError | |||
|
12 | from mercurial import util | |||
|
13 | ||||
|
14 | def up(p): | |||
|
15 | if p[0] != "/": | |||
|
16 | p = "/" + p | |||
|
17 | if p[-1] == "/": | |||
|
18 | p = p[:-1] | |||
|
19 | up = os.path.dirname(p) | |||
|
20 | if up == "/": | |||
|
21 | return "/" | |||
|
22 | return up + "/" | |||
|
23 | ||||
|
24 | def revnavgen(pos, pagelen, limit, nodefunc): | |||
|
25 | def seq(factor, limit=None): | |||
|
26 | if limit: | |||
|
27 | yield limit | |||
|
28 | if limit >= 20 and limit <= 40: | |||
|
29 | yield 50 | |||
|
30 | else: | |||
|
31 | yield 1 * factor | |||
|
32 | yield 3 * factor | |||
|
33 | for f in seq(factor * 10): | |||
|
34 | yield f | |||
|
35 | ||||
|
36 | def nav(**map): | |||
|
37 | l = [] | |||
|
38 | last = 0 | |||
|
39 | for f in seq(1, pagelen): | |||
|
40 | if f < pagelen or f <= last: | |||
|
41 | continue | |||
|
42 | if f > limit: | |||
|
43 | break | |||
|
44 | last = f | |||
|
45 | if pos + f < limit: | |||
|
46 | l.append(("+%d" % f, hex(nodefunc(pos + f).node()))) | |||
|
47 | if pos - f >= 0: | |||
|
48 | l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node()))) | |||
|
49 | ||||
|
50 | try: | |||
|
51 | yield {"label": "(0)", "node": hex(nodefunc('0').node())} | |||
|
52 | ||||
|
53 | for label, node in l: | |||
|
54 | yield {"label": label, "node": node} | |||
|
55 | ||||
|
56 | yield {"label": "tip", "node": "tip"} | |||
|
57 | except RepoError: | |||
|
58 | pass | |||
|
59 | ||||
|
60 | return nav | |||
|
61 | ||||
|
62 | def siblings(siblings=[], hiderev=None, **args): | |||
|
63 | siblings = [s for s in siblings if s.node() != nullid] | |||
|
64 | if len(siblings) == 1 and siblings[0].rev() == hiderev: | |||
|
65 | return | |||
|
66 | for s in siblings: | |||
|
67 | d = {'node': hex(s.node()), 'rev': s.rev()} | |||
|
68 | if hasattr(s, 'path'): | |||
|
69 | d['file'] = s.path() | |||
|
70 | d.update(args) | |||
|
71 | yield d | |||
|
72 | ||||
|
73 | def renamelink(fctx): | |||
|
74 | r = fctx.renamed() | |||
|
75 | if r: | |||
|
76 | return [dict(file=r[0], node=hex(r[1]))] | |||
|
77 | return [] | |||
|
78 | ||||
|
79 | def nodetagsdict(repo, node): | |||
|
80 | return [{"name": i} for i in repo.nodetags(node)] | |||
|
81 | ||||
|
82 | def nodebranchdict(repo, ctx): | |||
|
83 | branches = [] | |||
|
84 | branch = ctx.branch() | |||
|
85 | # If this is an empty repo, ctx.node() == nullid, | |||
|
86 | # ctx.branch() == 'default', but branchtags() is | |||
|
87 | # an empty dict. Using dict.get avoids a traceback. | |||
|
88 | if repo.branchtags().get(branch) == ctx.node(): | |||
|
89 | branches.append({"name": branch}) | |||
|
90 | return branches | |||
|
91 | ||||
|
92 | def nodeinbranch(repo, ctx): | |||
|
93 | branches = [] | |||
|
94 | branch = ctx.branch() | |||
|
95 | if branch != 'default' and repo.branchtags().get(branch) != ctx.node(): | |||
|
96 | branches.append({"name": branch}) | |||
|
97 | return branches | |||
|
98 | ||||
|
99 | def nodebranchnodefault(ctx): | |||
|
100 | branches = [] | |||
|
101 | branch = ctx.branch() | |||
|
102 | if branch != 'default': | |||
|
103 | branches.append({"name": branch}) | |||
|
104 | return branches | |||
|
105 | ||||
|
106 | def showtag(repo, tmpl, t1, node=nullid, **args): | |||
|
107 | for t in repo.nodetags(node): | |||
|
108 | yield tmpl(t1, tag=t, **args) | |||
|
109 | ||||
|
110 | def cleanpath(repo, path): | |||
|
111 | path = path.lstrip('/') | |||
|
112 | return util.canonpath(repo.root, '', path) | |||
|
113 | ||||
|
114 | def changectx(repo, req): | |||
|
115 | changeid = "tip" | |||
|
116 | if 'node' in req.form: | |||
|
117 | changeid = req.form['node'][0] | |||
|
118 | elif 'manifest' in req.form: | |||
|
119 | changeid = req.form['manifest'][0] | |||
|
120 | ||||
|
121 | try: | |||
|
122 | ctx = repo[changeid] | |||
|
123 | except RepoError: | |||
|
124 | man = repo.manifest | |||
|
125 | ctx = repo[man.linkrev(man.lookup(changeid))] | |||
|
126 | ||||
|
127 | return ctx | |||
|
128 | ||||
|
129 | def filectx(repo, req): | |||
|
130 | path = cleanpath(repo, req.form['file'][0]) | |||
|
131 | if 'node' in req.form: | |||
|
132 | changeid = req.form['node'][0] | |||
|
133 | else: | |||
|
134 | changeid = req.form['filenode'][0] | |||
|
135 | try: | |||
|
136 | fctx = repo[changeid][path] | |||
|
137 | except RepoError: | |||
|
138 | fctx = repo.filectx(path, fileid=changeid) | |||
|
139 | ||||
|
140 | return fctx |
@@ -0,0 +1,47 | |||||
|
1 | import util | |||
|
2 | ||||
|
3 | class _match(object): | |||
|
4 | def __init__(self, root, cwd, files, mf, ap): | |||
|
5 | self._root = root | |||
|
6 | self._cwd = cwd | |||
|
7 | self._files = files | |||
|
8 | self._fmap = dict.fromkeys(files) | |||
|
9 | self.matchfn = mf | |||
|
10 | self._anypats = ap | |||
|
11 | def __call__(self, fn): | |||
|
12 | return self.matchfn(fn) | |||
|
13 | def __iter__(self): | |||
|
14 | for f in self._files: | |||
|
15 | yield f | |||
|
16 | def bad(self, f, msg): | |||
|
17 | return True | |||
|
18 | def dir(self, f): | |||
|
19 | pass | |||
|
20 | def missing(self, f): | |||
|
21 | pass | |||
|
22 | def exact(self, f): | |||
|
23 | return f in self._fmap | |||
|
24 | def rel(self, f): | |||
|
25 | return util.pathto(self._root, self._cwd, f) | |||
|
26 | def files(self): | |||
|
27 | return self._files | |||
|
28 | def anypats(self): | |||
|
29 | return self._anypats | |||
|
30 | ||||
|
31 | class always(_match): | |||
|
32 | def __init__(self, root, cwd): | |||
|
33 | _match.__init__(self, root, cwd, [], lambda f: True, False) | |||
|
34 | ||||
|
35 | class never(_match): | |||
|
36 | def __init__(self, root, cwd): | |||
|
37 | _match.__init__(self, root, cwd, [], lambda f: False, False) | |||
|
38 | ||||
|
39 | class exact(_match): | |||
|
40 | def __init__(self, root, cwd, files): | |||
|
41 | _match.__init__(self, root, cwd, files, lambda f: f in files, False) | |||
|
42 | ||||
|
43 | class match(_match): | |||
|
44 | def __init__(self, root, cwd, patterns, include, exclude, default): | |||
|
45 | f, mf, ap = util.matcher(root, cwd, patterns, include, exclude, | |||
|
46 | None, default) | |||
|
47 | _match.__init__(self, root, cwd, f, mf, ap) |
@@ -0,0 +1,169 | |||||
|
1 | /* | |||
|
2 | parsers.c - efficient content parsing | |||
|
3 | ||||
|
4 | Copyright 2008 Matt Mackall <mpm@selenic.com> and others | |||
|
5 | ||||
|
6 | This software may be used and distributed according to the terms of | |||
|
7 | the GNU General Public License, incorporated herein by reference. | |||
|
8 | */ | |||
|
9 | ||||
|
10 | #include <Python.h> | |||
|
11 | #include <ctype.h> | |||
|
12 | #include <string.h> | |||
|
13 | ||||
|
14 | static int hexdigit(char c) | |||
|
15 | { | |||
|
16 | if (c >= '0' && c <= '9') | |||
|
17 | return c - '0'; | |||
|
18 | ||||
|
19 | if (c >= 'A' && c <= 'F') | |||
|
20 | return c - 'A' + 10; | |||
|
21 | ||||
|
22 | if (c >= 'a' && c <= 'f') | |||
|
23 | return c - 'a' + 10; | |||
|
24 | ||||
|
25 | return -1; | |||
|
26 | } | |||
|
27 | ||||
|
28 | /* | |||
|
29 | * Turn a hex-encoded string into binary. | |||
|
30 | */ | |||
|
31 | static PyObject *unhexlify(const char *str, int len) | |||
|
32 | { | |||
|
33 | PyObject *ret = NULL; | |||
|
34 | const char *c; | |||
|
35 | char *d; | |||
|
36 | ||||
|
37 | if (len % 2) { | |||
|
38 | PyErr_SetString(PyExc_ValueError, | |||
|
39 | "input is not even in length"); | |||
|
40 | goto bail; | |||
|
41 | } | |||
|
42 | ||||
|
43 | ret = PyString_FromStringAndSize(NULL, len / 2); | |||
|
44 | if (!ret) | |||
|
45 | goto bail; | |||
|
46 | ||||
|
47 | d = PyString_AsString(ret); | |||
|
48 | if (!d) | |||
|
49 | goto bail; | |||
|
50 | ||||
|
51 | for (c = str; c < str + len;) { | |||
|
52 | int hi = hexdigit(*c++); | |||
|
53 | int lo = hexdigit(*c++); | |||
|
54 | ||||
|
55 | if (hi == -1 || lo == -1) { | |||
|
56 | PyErr_SetString(PyExc_ValueError, | |||
|
57 | "input contains non-hex character"); | |||
|
58 | goto bail; | |||
|
59 | } | |||
|
60 | ||||
|
61 | *d++ = (hi << 4) | lo; | |||
|
62 | } | |||
|
63 | ||||
|
64 | goto done; | |||
|
65 | ||||
|
66 | bail: | |||
|
67 | Py_XDECREF(ret); | |||
|
68 | ret = NULL; | |||
|
69 | done: | |||
|
70 | return ret; | |||
|
71 | } | |||
|
72 | ||||
|
73 | /* | |||
|
74 | * This code assumes that a manifest is stitched together with newline | |||
|
75 | * ('\n') characters. | |||
|
76 | */ | |||
|
77 | static PyObject *parse_manifest(PyObject *self, PyObject *args) | |||
|
78 | { | |||
|
79 | PyObject *mfdict, *fdict; | |||
|
80 | char *str, *cur, *start, *zero; | |||
|
81 | int len; | |||
|
82 | ||||
|
83 | if (!PyArg_ParseTuple(args, "O!O!s#:parse_manifest", | |||
|
84 | &PyDict_Type, &mfdict, | |||
|
85 | &PyDict_Type, &fdict, | |||
|
86 | &str, &len)) | |||
|
87 | goto quit; | |||
|
88 | ||||
|
89 | for (start = cur = str, zero = NULL; cur < str + len; cur++) { | |||
|
90 | PyObject *file = NULL, *node = NULL; | |||
|
91 | PyObject *flags = NULL; | |||
|
92 | int nlen; | |||
|
93 | ||||
|
94 | if (!*cur) { | |||
|
95 | zero = cur; | |||
|
96 | continue; | |||
|
97 | } | |||
|
98 | else if (*cur != '\n') | |||
|
99 | continue; | |||
|
100 | ||||
|
101 | if (!zero) { | |||
|
102 | PyErr_SetString(PyExc_ValueError, | |||
|
103 | "manifest entry has no separator"); | |||
|
104 | goto quit; | |||
|
105 | } | |||
|
106 | ||||
|
107 | file = PyString_FromStringAndSize(start, zero - start); | |||
|
108 | if (!file) | |||
|
109 | goto bail; | |||
|
110 | ||||
|
111 | nlen = cur - zero - 1; | |||
|
112 | ||||
|
113 | node = unhexlify(zero + 1, nlen > 40 ? 40 : nlen); | |||
|
114 | if (!node) | |||
|
115 | goto bail; | |||
|
116 | ||||
|
117 | if (nlen > 40) { | |||
|
118 | PyObject *flags; | |||
|
119 | ||||
|
120 | flags = PyString_FromStringAndSize(zero + 41, | |||
|
121 | nlen - 40); | |||
|
122 | if (!flags) | |||
|
123 | goto bail; | |||
|
124 | ||||
|
125 | if (PyDict_SetItem(fdict, file, flags) == -1) | |||
|
126 | goto bail; | |||
|
127 | } | |||
|
128 | ||||
|
129 | if (PyDict_SetItem(mfdict, file, node) == -1) | |||
|
130 | goto bail; | |||
|
131 | ||||
|
132 | start = cur + 1; | |||
|
133 | zero = NULL; | |||
|
134 | ||||
|
135 | Py_XDECREF(flags); | |||
|
136 | Py_XDECREF(node); | |||
|
137 | Py_XDECREF(file); | |||
|
138 | continue; | |||
|
139 | bail: | |||
|
140 | Py_XDECREF(flags); | |||
|
141 | Py_XDECREF(node); | |||
|
142 | Py_XDECREF(file); | |||
|
143 | goto quit; | |||
|
144 | } | |||
|
145 | ||||
|
146 | if (len > 0 && *(cur - 1) != '\n') { | |||
|
147 | PyErr_SetString(PyExc_ValueError, | |||
|
148 | "manifest contains trailing garbage"); | |||
|
149 | goto quit; | |||
|
150 | } | |||
|
151 | ||||
|
152 | Py_INCREF(Py_None); | |||
|
153 | return Py_None; | |||
|
154 | ||||
|
155 | quit: | |||
|
156 | return NULL; | |||
|
157 | } | |||
|
158 | ||||
|
159 | static char parsers_doc[] = "Efficient content parsing."; | |||
|
160 | ||||
|
161 | static PyMethodDef methods[] = { | |||
|
162 | {"parse_manifest", parse_manifest, METH_VARARGS, "parse a manifest\n"}, | |||
|
163 | {NULL, NULL} | |||
|
164 | }; | |||
|
165 | ||||
|
166 | PyMODINIT_FUNC initparsers(void) | |||
|
167 | { | |||
|
168 | Py_InitModule3("parsers", methods, parsers_doc); | |||
|
169 | } |
@@ -0,0 +1,125 | |||||
|
1 | # store.py - repository store handling for Mercurial | |||
|
2 | # | |||
|
3 | # Copyright 2008 Matt Mackall <mpm@selenic.com> | |||
|
4 | # | |||
|
5 | # This software may be used and distributed according to the terms | |||
|
6 | # of the GNU General Public License, incorporated herein by reference. | |||
|
7 | ||||
|
8 | import os, stat, osutil, util | |||
|
9 | ||||
|
10 | def _buildencodefun(): | |||
|
11 | e = '_' | |||
|
12 | win_reserved = [ord(x) for x in '\\:*?"<>|'] | |||
|
13 | cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ]) | |||
|
14 | for x in (range(32) + range(126, 256) + win_reserved): | |||
|
15 | cmap[chr(x)] = "~%02x" % x | |||
|
16 | for x in range(ord("A"), ord("Z")+1) + [ord(e)]: | |||
|
17 | cmap[chr(x)] = e + chr(x).lower() | |||
|
18 | dmap = {} | |||
|
19 | for k, v in cmap.iteritems(): | |||
|
20 | dmap[v] = k | |||
|
21 | def decode(s): | |||
|
22 | i = 0 | |||
|
23 | while i < len(s): | |||
|
24 | for l in xrange(1, 4): | |||
|
25 | try: | |||
|
26 | yield dmap[s[i:i+l]] | |||
|
27 | i += l | |||
|
28 | break | |||
|
29 | except KeyError: | |||
|
30 | pass | |||
|
31 | else: | |||
|
32 | raise KeyError | |||
|
33 | return (lambda s: "".join([cmap[c] for c in s]), | |||
|
34 | lambda s: "".join(list(decode(s)))) | |||
|
35 | ||||
|
36 | encodefilename, decodefilename = _buildencodefun() | |||
|
37 | ||||
|
38 | def _dirwalk(path, recurse): | |||
|
39 | '''yields (filename, size)''' | |||
|
40 | for e, kind, st in osutil.listdir(path, stat=True): | |||
|
41 | pe = os.path.join(path, e) | |||
|
42 | if kind == stat.S_IFDIR: | |||
|
43 | if recurse: | |||
|
44 | for x in _dirwalk(pe, True): | |||
|
45 | yield x | |||
|
46 | elif kind == stat.S_IFREG: | |||
|
47 | yield pe, st.st_size | |||
|
48 | ||||
|
49 | class _store: | |||
|
50 | '''base class for local repository stores''' | |||
|
51 | def __init__(self, path): | |||
|
52 | self.path = path | |||
|
53 | try: | |||
|
54 | # files in .hg/ will be created using this mode | |||
|
55 | mode = os.stat(self.path).st_mode | |||
|
56 | # avoid some useless chmods | |||
|
57 | if (0777 & ~util._umask) == (0777 & mode): | |||
|
58 | mode = None | |||
|
59 | except OSError: | |||
|
60 | mode = None | |||
|
61 | self.createmode = mode | |||
|
62 | ||||
|
63 | def join(self, f): | |||
|
64 | return os.path.join(self.path, f) | |||
|
65 | ||||
|
66 | def _revlogfiles(self, relpath='', recurse=False): | |||
|
67 | '''yields (filename, size)''' | |||
|
68 | if relpath: | |||
|
69 | path = os.path.join(self.path, relpath) | |||
|
70 | else: | |||
|
71 | path = self.path | |||
|
72 | striplen = len(self.path) + len(os.sep) | |||
|
73 | filetypes = ('.d', '.i') | |||
|
74 | for f, size in _dirwalk(path, recurse): | |||
|
75 | if (len(f) > 2) and f[-2:] in filetypes: | |||
|
76 | yield util.pconvert(f[striplen:]), size | |||
|
77 | ||||
|
78 | def _datafiles(self): | |||
|
79 | for x in self._revlogfiles('data', True): | |||
|
80 | yield x | |||
|
81 | ||||
|
82 | def walk(self): | |||
|
83 | '''yields (direncoded filename, size)''' | |||
|
84 | # yield data files first | |||
|
85 | for x in self._datafiles(): | |||
|
86 | yield x | |||
|
87 | # yield manifest before changelog | |||
|
88 | meta = util.sort(self._revlogfiles()) | |||
|
89 | meta.reverse() | |||
|
90 | for x in meta: | |||
|
91 | yield x | |||
|
92 | ||||
|
93 | class directstore(_store): | |||
|
94 | def __init__(self, path): | |||
|
95 | _store.__init__(self, path) | |||
|
96 | self.encodefn = lambda x: x | |||
|
97 | self.opener = util.opener(self.path) | |||
|
98 | self.opener.createmode = self.createmode | |||
|
99 | ||||
|
100 | class encodedstore(_store): | |||
|
101 | def __init__(self, path): | |||
|
102 | _store.__init__(self, os.path.join(path, 'store')) | |||
|
103 | self.encodefn = encodefilename | |||
|
104 | op = util.opener(self.path) | |||
|
105 | op.createmode = self.createmode | |||
|
106 | self.opener = lambda f, *args, **kw: op(self.encodefn(f), *args, **kw) | |||
|
107 | ||||
|
108 | def _datafiles(self): | |||
|
109 | for f, size in self._revlogfiles('data', True): | |||
|
110 | yield decodefilename(f), size | |||
|
111 | ||||
|
112 | def join(self, f): | |||
|
113 | return os.path.join(self.path, self.encodefn(f)) | |||
|
114 | ||||
|
115 | def encodefn(requirements): | |||
|
116 | if 'store' not in requirements: | |||
|
117 | return lambda x: x | |||
|
118 | else: | |||
|
119 | return encodefilename | |||
|
120 | ||||
|
121 | def store(requirements, path): | |||
|
122 | if 'store' not in requirements: | |||
|
123 | return directstore(path) | |||
|
124 | else: | |||
|
125 | return encodedstore(path) |
@@ -0,0 +1,72 | |||||
|
1 | {header} | |||
|
2 | <title>{repo|escape}: {node|short}</title> | |||
|
3 | </head> | |||
|
4 | <body> | |||
|
5 | <div class="container"> | |||
|
6 | <div class="menu"> | |||
|
7 | <div class="logo"> | |||
|
8 | <a href="http://www.selenic.com/mercurial/"> | |||
|
9 | <img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a> | |||
|
10 | </div> | |||
|
11 | <ul> | |||
|
12 | <li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li> | |||
|
13 | <li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li> | |||
|
14 | <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> | |||
|
15 | </ul> | |||
|
16 | <ul> | |||
|
17 | <li class="active">changeset</li> | |||
|
18 | <li><a href="{url}file/{node|short}{sessionvars%urlparameter}">browse</a></li> | |||
|
19 | </ul> | |||
|
20 | <ul> | |||
|
21 | {archives%archiveentry}</ul> | |||
|
22 | </ul> | |||
|
23 | </div> | |||
|
24 | ||||
|
25 | <div class="main"> | |||
|
26 | ||||
|
27 | <h2>{repo|escape}</h2> | |||
|
28 | <h3>changeset {rev}:{node|short} {changesettag}</h3> | |||
|
29 | ||||
|
30 | <form class="search" action="{url}log"> | |||
|
31 | {sessionvars%hiddenformentry} | |||
|
32 | <p><input name="rev" id="search1" type="text" size="30"></p> | |||
|
33 | </form> | |||
|
34 | ||||
|
35 | <div class="description">{desc|strip|escape|addbreaks}</div> | |||
|
36 | ||||
|
37 | <table id="changesetEntry"> | |||
|
38 | <tr> | |||
|
39 | <th class="author">author</th> | |||
|
40 | <td class="author">{author|obfuscate}</td> | |||
|
41 | </tr> | |||
|
42 | <tr> | |||
|
43 | <th class="date">date</th> | |||
|
44 | <td class="date">{date|date} ({date|age} ago)</td></tr> | |||
|
45 | <tr> | |||
|
46 | <th class="author">parents</th> | |||
|
47 | <td class="author">{parent%changesetparent}</td> | |||
|
48 | </tr> | |||
|
49 | <tr> | |||
|
50 | <th class="author">children</th> | |||
|
51 | <td class="author">{child%changesetchild}</td> | |||
|
52 | </tr> | |||
|
53 | <tr> | |||
|
54 | <th class="files">files</th> | |||
|
55 | <td class="files">{files}</td></tr> | |||
|
56 | </tr> | |||
|
57 | </table> | |||
|
58 | <tr> | |||
|
59 | ||||
|
60 | <div class="overflow"> | |||
|
61 | <table class="bigtable"> | |||
|
62 | <tr> | |||
|
63 | <th class="lineno">line</th> | |||
|
64 | <th class="source">diff</th> | |||
|
65 | </tr> | |||
|
66 | </table> | |||
|
67 | {diff} | |||
|
68 | </div> | |||
|
69 | </div> | |||
|
70 | {footer} | |||
|
71 | ||||
|
72 |
@@ -0,0 +1,40 | |||||
|
1 | {header} | |||
|
2 | <title>{repo|escape}: error</title> | |||
|
3 | </head> | |||
|
4 | <body> | |||
|
5 | ||||
|
6 | <div class="content"> | |||
|
7 | <div class="menu"> | |||
|
8 | <div class="logo"> | |||
|
9 | <a href="http://www.selenic.com/mercurial/"> | |||
|
10 | <img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a> | |||
|
11 | </div> | |||
|
12 | <ul> | |||
|
13 | <li><a href="{url}log{sessionvars%urlparameter}">log</a></li> | |||
|
14 | <li><a href="{url}graph{sessionvars%urlparameter}">graph</a></li> | |||
|
15 | <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> | |||
|
16 | </ul> | |||
|
17 | </div> | |||
|
18 | ||||
|
19 | <div class="main"> | |||
|
20 | ||||
|
21 | <h2>{repo|escape}</h2> | |||
|
22 | <h3>error</h3> | |||
|
23 | ||||
|
24 | <form class="search" action="{url}log"> | |||
|
25 | {sessionvars%hiddenformentry} | |||
|
26 | <p><input name="rev" id="search1" type="text" size="30"></p> | |||
|
27 | </form> | |||
|
28 | ||||
|
29 | <div class="description"> | |||
|
30 | <p> | |||
|
31 | An error occurred while processing your request: | |||
|
32 | </p> | |||
|
33 | <p> | |||
|
34 | {error|escape} | |||
|
35 | </p> | |||
|
36 | </div> | |||
|
37 | </div> | |||
|
38 | </div> | |||
|
39 | ||||
|
40 | {footer} |
@@ -0,0 +1,77 | |||||
|
1 | {header} | |||
|
2 | <title>{repo|escape}: {file|escape} annotate</title> | |||
|
3 | </head> | |||
|
4 | <body> | |||
|
5 | ||||
|
6 | <div class="container"> | |||
|
7 | <div class="menu"> | |||
|
8 | <div class="logo"> | |||
|
9 | <a href="http://www.selenic.com/mercurial/"> | |||
|
10 | <img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a> | |||
|
11 | </div> | |||
|
12 | <ul> | |||
|
13 | <li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li> | |||
|
14 | <li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li> | |||
|
15 | <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> | |||
|
16 | </ul> | |||
|
17 | ||||
|
18 | <ul> | |||
|
19 | <li><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a></li> | |||
|
20 | <li><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">browse</a></li> | |||
|
21 | </ul> | |||
|
22 | <ul> | |||
|
23 | <li><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a></li> | |||
|
24 | <li><a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a></li> | |||
|
25 | <li class="active">annotate</li> | |||
|
26 | <li><a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file log</a></li> | |||
|
27 | <li><a href="{url}raw-annotate/{node|short}/{file|urlescape}">raw</a></li> | |||
|
28 | </ul> | |||
|
29 | </div> | |||
|
30 | ||||
|
31 | <div class="main"> | |||
|
32 | <h2>{repo|escape}</h2> | |||
|
33 | <h3>annotate {file|escape} @ {rev}:{node|short}</h2> | |||
|
34 | ||||
|
35 | <form class="search" action="{url}log"> | |||
|
36 | {sessionvars%hiddenformentry} | |||
|
37 | <p><input name="rev" id="search1" type="text" size="30"></p> | |||
|
38 | </form> | |||
|
39 | ||||
|
40 | <div class="description">{desc|strip|escape|addbreaks}</div> | |||
|
41 | ||||
|
42 | <table id="changesetEntry"> | |||
|
43 | <tr> | |||
|
44 | <th class="author">author</th> | |||
|
45 | <td class="author">{author|obfuscate}</td> | |||
|
46 | </tr> | |||
|
47 | <tr> | |||
|
48 | <th class="date">date</th> | |||
|
49 | <td class="date">{date|date} ({date|age} ago)</td> | |||
|
50 | </tr> | |||
|
51 | <tr> | |||
|
52 | <th class="author">parents</th> | |||
|
53 | <td class="author">{parent%filerevparent}</td> | |||
|
54 | </tr> | |||
|
55 | <tr> | |||
|
56 | <th class="author">children</th> | |||
|
57 | <td class="author">{child%filerevchild}</td> | |||
|
58 | </tr> | |||
|
59 | {changesettag} | |||
|
60 | </table> | |||
|
61 | ||||
|
62 | <br/> | |||
|
63 | ||||
|
64 | <div class="overflow"> | |||
|
65 | <table class="bigtable"> | |||
|
66 | <tr> | |||
|
67 | <th class="annotate">rev</th> | |||
|
68 | <th class="lineno">line</th> | |||
|
69 | <th class="line">source</th> | |||
|
70 | </tr> | |||
|
71 | {annotate%annotateline} | |||
|
72 | </table> | |||
|
73 | </div> | |||
|
74 | </div> | |||
|
75 | </div> | |||
|
76 | ||||
|
77 | {footer} |
@@ -0,0 +1,75 | |||||
|
1 | {header} | |||
|
2 | <title>{repo|escape}: {file|escape} diff</title> | |||
|
3 | </head> | |||
|
4 | <body> | |||
|
5 | ||||
|
6 | <div class="container"> | |||
|
7 | <div class="menu"> | |||
|
8 | <div class="logo"> | |||
|
9 | <a href="http://www.selenic.com/mercurial/"> | |||
|
10 | <img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a> | |||
|
11 | </div> | |||
|
12 | <ul> | |||
|
13 | <li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li> | |||
|
14 | <li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li> | |||
|
15 | <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> | |||
|
16 | </ul> | |||
|
17 | <ul> | |||
|
18 | <li><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a></li> | |||
|
19 | <li><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">browse</a></li> | |||
|
20 | </ul> | |||
|
21 | <ul> | |||
|
22 | <li><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a></li> | |||
|
23 | <li class="active">diff</li> | |||
|
24 | <li><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a></li> | |||
|
25 | <li><a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file log</a></li> | |||
|
26 | <li><a href="{url}raw-file/{node|short}/{file|urlescape}">raw</a></li> | |||
|
27 | </ul> | |||
|
28 | </div> | |||
|
29 | ||||
|
30 | <div class="main"> | |||
|
31 | <h2>{repo|escape}</h2> | |||
|
32 | <h3>diff {file|escape} @ {rev}:{node|short}</h3> | |||
|
33 | ||||
|
34 | <form class="search" action="{url}log"> | |||
|
35 | {sessionvars%hiddenformentry} | |||
|
36 | <p><input name="rev" id="search1" type="text" size="30"></p> | |||
|
37 | </form> | |||
|
38 | ||||
|
39 | <div class="description">{desc|strip|escape|addbreaks}</div> | |||
|
40 | ||||
|
41 | <table id="changesetEntry"> | |||
|
42 | <tr> | |||
|
43 | <th>author</th> | |||
|
44 | <td>{author|obfuscate}</td> | |||
|
45 | </tr> | |||
|
46 | <tr> | |||
|
47 | <th>date</th> | |||
|
48 | <td>{date|date} ({date|age} ago)</td> | |||
|
49 | </tr> | |||
|
50 | <tr> | |||
|
51 | <th>parents</th> | |||
|
52 | <td>{parent%filerevparent}</td> | |||
|
53 | </tr> | |||
|
54 | <tr> | |||
|
55 | <th>children</th> | |||
|
56 | <td>{child%filerevchild}</td> | |||
|
57 | </tr> | |||
|
58 | {changesettag} | |||
|
59 | </table> | |||
|
60 | ||||
|
61 | <div class="overflow"> | |||
|
62 | <table class="bigtable"> | |||
|
63 | <tr> | |||
|
64 | <th class="lineno">line</th> | |||
|
65 | <th class="source">diff</th> | |||
|
66 | </tr> | |||
|
67 | <table> | |||
|
68 | {diff} | |||
|
69 | </div> | |||
|
70 | </div> | |||
|
71 | </div> | |||
|
72 | ||||
|
73 | {footer} | |||
|
74 | ||||
|
75 |
@@ -0,0 +1,59 | |||||
|
1 | {header} | |||
|
2 | <title>{repo|escape}: {file|escape} history</title> | |||
|
3 | <link rel="alternate" type="application/atom+xml" | |||
|
4 | href="{url}atom-log/tip/{file|urlescape}" title="Atom feed for {repo|escape}:{file}"> | |||
|
5 | <link rel="alternate" type="application/rss+xml" | |||
|
6 | href="{url}rss-log/tip/{file|urlescape}" title="RSS feed for {repo|escape}:{file}"> | |||
|
7 | </head> | |||
|
8 | </head> | |||
|
9 | <body> | |||
|
10 | ||||
|
11 | <div class="container"> | |||
|
12 | <div class="menu"> | |||
|
13 | <div class="logo"> | |||
|
14 | <a href="http://www.selenic.com/mercurial/"> | |||
|
15 | <img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a> | |||
|
16 | </div> | |||
|
17 | <ul> | |||
|
18 | <li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li> | |||
|
19 | <li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li> | |||
|
20 | <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> | |||
|
21 | </ul> | |||
|
22 | <ul> | |||
|
23 | <li><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a></li> | |||
|
24 | <li><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">browse</a></li> | |||
|
25 | </ul> | |||
|
26 | <ul> | |||
|
27 | <li><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a></li> | |||
|
28 | <li><a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a></li> | |||
|
29 | <li><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a></li> | |||
|
30 | <li class="active">file log</li> | |||
|
31 | <li><a href="{url}raw-file/{node|short}/{file|urlescape}">raw</a></li> | |||
|
32 | </ul> | |||
|
33 | </div> | |||
|
34 | ||||
|
35 | <div class="main"> | |||
|
36 | ||||
|
37 | <h2>{repo|escape}</h2> | |||
|
38 | <h3>log {file|escape}</h3> | |||
|
39 | ||||
|
40 | <form class="search" action="{url}log"> | |||
|
41 | {sessionvars%hiddenformentry} | |||
|
42 | <p><input name="rev" id="search1" type="text" size="30"></p> | |||
|
43 | </form> | |||
|
44 | ||||
|
45 | <div class="navigate">{nav%filenaventry}</div> | |||
|
46 | ||||
|
47 | <table class="bigtable"> | |||
|
48 | <tr> | |||
|
49 | <th class="age">age</td> | |||
|
50 | <th class="author">author</td> | |||
|
51 | <th class="description">description</td> | |||
|
52 | </tr> | |||
|
53 | {entries%filelogentry} | |||
|
54 | </table> | |||
|
55 | ||||
|
56 | </div> | |||
|
57 | </div> | |||
|
58 | ||||
|
59 | {footer} |
@@ -0,0 +1,5 | |||||
|
1 | <tr class="parity{parity}"> | |||
|
2 | <td class="age">{date|age}</td> | |||
|
3 | <td class="author">{author|person}</td> | |||
|
4 | <td class="description"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{desc|strip|firstline|escape}</a></td> | |||
|
5 | </tr> |
@@ -0,0 +1,74 | |||||
|
1 | {header} | |||
|
2 | <title>{repo|escape}: {node|short} {file|escape}</title> | |||
|
3 | </head> | |||
|
4 | <body> | |||
|
5 | ||||
|
6 | <div class="container"> | |||
|
7 | <div class="menu"> | |||
|
8 | <div class="logo"> | |||
|
9 | <a href="http://www.selenic.com/mercurial/"> | |||
|
10 | <img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a> | |||
|
11 | </div> | |||
|
12 | <ul> | |||
|
13 | <li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li> | |||
|
14 | <li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li> | |||
|
15 | <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> | |||
|
16 | </ul> | |||
|
17 | <ul> | |||
|
18 | <li><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a></li> | |||
|
19 | <li><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">browse</a></li> | |||
|
20 | </ul> | |||
|
21 | <ul> | |||
|
22 | <li class="active">file</li> | |||
|
23 | <li><a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a></li> | |||
|
24 | <li><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a></li> | |||
|
25 | <li><a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file log</a></li> | |||
|
26 | <li><a href="{url}raw-file/{node|short}/{file|urlescape}">raw</a></li> | |||
|
27 | </ul> | |||
|
28 | </div> | |||
|
29 | ||||
|
30 | <div class="main"> | |||
|
31 | ||||
|
32 | <h2>{repo|escape}</h2> | |||
|
33 | <h3>view {file|escape} @ {rev}:{node|short}</h3> | |||
|
34 | ||||
|
35 | <form class="search" action="{url}log"> | |||
|
36 | {sessionvars%hiddenformentry} | |||
|
37 | <p><input name="rev" id="search1" type="text" size="30"></p> | |||
|
38 | </form> | |||
|
39 | ||||
|
40 | <div class="description">{desc|strip|escape|addbreaks}</div> | |||
|
41 | ||||
|
42 | <table id="changesetEntry"> | |||
|
43 | <tr> | |||
|
44 | <th class="author">author</th> | |||
|
45 | <td class="author">{author|obfuscate}</td> | |||
|
46 | </tr> | |||
|
47 | <tr> | |||
|
48 | <th class="date">date</th> | |||
|
49 | <td class="date">{date|date} ({date|age} ago)</td> | |||
|
50 | </tr> | |||
|
51 | <tr> | |||
|
52 | <th class="author">parents</th> | |||
|
53 | <td class="author">{parent%filerevparent}</td> | |||
|
54 | </tr> | |||
|
55 | <tr> | |||
|
56 | <th class="author">children</th> | |||
|
57 | <td class="author">{child%filerevchild}</td> | |||
|
58 | </tr> | |||
|
59 | {changesettag} | |||
|
60 | </table> | |||
|
61 | ||||
|
62 | <div class="overflow"> | |||
|
63 | <table class="bigtable"> | |||
|
64 | <tr> | |||
|
65 | <th class="lineno">line</th> | |||
|
66 | <th class="source">source</th> | |||
|
67 | </tr> | |||
|
68 | {text%fileline} | |||
|
69 | </table> | |||
|
70 | </div> | |||
|
71 | </div> | |||
|
72 | </div> | |||
|
73 | ||||
|
74 | {footer} |
@@ -0,0 +1,113 | |||||
|
1 | {header} | |||
|
2 | <title>{repo|escape}: revision graph</title> | |||
|
3 | <link rel="alternate" type="application/atom+xml" | |||
|
4 | href="{url}atom-log" title="Atom feed for {repo|escape}: log"> | |||
|
5 | <link rel="alternate" type="application/rss+xml" | |||
|
6 | href="{url}rss-log" title="RSS feed for {repo|escape}: log"> | |||
|
7 | <!--[if IE]><script type="text/javascript" src="{staticurl}excanvas.js"></script><![endif]--> | |||
|
8 | </head> | |||
|
9 | <body> | |||
|
10 | ||||
|
11 | <div class="container"> | |||
|
12 | <div class="menu"> | |||
|
13 | <div class="logo"> | |||
|
14 | <a href="http://www.selenic.com/mercurial/"> | |||
|
15 | <img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a> | |||
|
16 | </div> | |||
|
17 | <ul> | |||
|
18 | <li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li> | |||
|
19 | <li class="active">graph</li> | |||
|
20 | <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> | |||
|
21 | </ul> | |||
|
22 | <ul> | |||
|
23 | <li><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a></li> | |||
|
24 | <li><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">browse</a></li> | |||
|
25 | </ul> | |||
|
26 | </div> | |||
|
27 | ||||
|
28 | <div class="main"> | |||
|
29 | <h2>{repo|escape}</h2> | |||
|
30 | <h3>graph</h3> | |||
|
31 | ||||
|
32 | <form class="search" action="{url}log"> | |||
|
33 | {sessionvars%hiddenformentry} | |||
|
34 | <p><input name="rev" id="search1" type="text" size="30"></p> | |||
|
35 | </form> | |||
|
36 | ||||
|
37 | <div class="navigate"> | |||
|
38 | <a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountless}">less</a> | |||
|
39 | <a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountmore}">more</a> | |||
|
40 | | {changenav%navgraphentry} | |||
|
41 | </div> | |||
|
42 | ||||
|
43 | <div id="noscript">The revision graph only works with JavaScript-enabled browsers.</div> | |||
|
44 | ||||
|
45 | <div id="wrapper"> | |||
|
46 | <ul id="nodebgs"></ul> | |||
|
47 | <canvas id="graph" width="224" height="{canvasheight}"></canvas> | |||
|
48 | <ul id="graphnodes"></ul> | |||
|
49 | </div> | |||
|
50 | ||||
|
51 | <script type="text/javascript" src="{staticurl}graph.js"></script> | |||
|
52 | <script> | |||
|
53 | <!-- hide script content | |||
|
54 | ||||
|
55 | document.getElementById('noscript').style.display = 'none'; | |||
|
56 | ||||
|
57 | var data = {jsdata|json}; | |||
|
58 | var graph = new Graph(); | |||
|
59 | graph.scale({bg_height}); | |||
|
60 | ||||
|
61 | graph.edge = function(x0, y0, x1, y1, color) { | |||
|
62 | ||||
|
63 | this.setColor(color, 0.0, 0.65); | |||
|
64 | this.ctx.beginPath(); | |||
|
65 | this.ctx.moveTo(x0, y0); | |||
|
66 | this.ctx.lineTo(x1, y1); | |||
|
67 | this.ctx.stroke(); | |||
|
68 | ||||
|
69 | } | |||
|
70 | ||||
|
71 | var revlink = '<li style="_STYLE"><span class="desc">'; | |||
|
72 | revlink += '<a href="{url}rev/_NODEID{sessionvars%urlparameter}" title="_NODEID">_DESC</a>'; | |||
|
73 | revlink += '</span><span class="tag">_TAGS</span>'; | |||
|
74 | revlink += '<span class="info">_DATE ago, by _USER</span></li>'; | |||
|
75 | ||||
|
76 | graph.vertex = function(x, y, color, parity, cur) { | |||
|
77 | ||||
|
78 | this.ctx.beginPath(); | |||
|
79 | color = this.setColor(color, 0.25, 0.75); | |||
|
80 | this.ctx.arc(x, y, radius, 0, Math.PI * 2, true); | |||
|
81 | this.ctx.fill(); | |||
|
82 | ||||
|
83 | var bg = '<li class="bg parity' + parity + '"></li>'; | |||
|
84 | var left = (this.columns + 1) * this.bg_height; | |||
|
85 | var nstyle = 'padding-left: ' + left + 'px;'; | |||
|
86 | var item = revlink.replace(/_STYLE/, nstyle); | |||
|
87 | item = item.replace(/_PARITY/, 'parity' + parity); | |||
|
88 | item = item.replace(/_NODEID/, cur[0]); | |||
|
89 | item = item.replace(/_NODEID/, cur[0]); | |||
|
90 | item = item.replace(/_DESC/, cur[3]); | |||
|
91 | item = item.replace(/_USER/, cur[4]); | |||
|
92 | item = item.replace(/_DATE/, cur[5]); | |||
|
93 | item = item.replace(/_TAGS/, cur[7].join(' ')); | |||
|
94 | ||||
|
95 | return [bg, item]; | |||
|
96 | ||||
|
97 | } | |||
|
98 | ||||
|
99 | graph.render(data); | |||
|
100 | ||||
|
101 | // stop hiding script --> | |||
|
102 | </script> | |||
|
103 | ||||
|
104 | <div class="navigate"> | |||
|
105 | <a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountless}">less</a> | |||
|
106 | <a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountmore}">more</a> | |||
|
107 | | {changenav%navgraphentry} | |||
|
108 | </div> | |||
|
109 | ||||
|
110 | </div> | |||
|
111 | </div> | |||
|
112 | ||||
|
113 | {footer} |
@@ -0,0 +1,7 | |||||
|
1 | <!-- quirksmode --> | |||
|
2 | <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"> | |||
|
3 | <html> | |||
|
4 | <head> | |||
|
5 | <link rel="icon" href="{staticurl}hgicon.png" type="image/png"> | |||
|
6 | <meta name="robots" content="index, nofollow" /> | |||
|
7 | <link rel="stylesheet" href="{staticurl}style-coal.css" type="text/css" /> |
@@ -0,0 +1,26 | |||||
|
1 | {header} | |||
|
2 | <title>Mercurial repositories index</title> | |||
|
3 | </head> | |||
|
4 | <body> | |||
|
5 | ||||
|
6 | <div class="container"> | |||
|
7 | <div class="menu"> | |||
|
8 | <a href="http://www.selenic.com/mercurial/"> | |||
|
9 | <img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a> | |||
|
10 | </div> | |||
|
11 | <div class="main"> | |||
|
12 | <h2>Mercurial Repositories</h2> | |||
|
13 | ||||
|
14 | <table class="bigtable"> | |||
|
15 | <tr> | |||
|
16 | <th><a href="?sort={sort_name}">Name</a></th> | |||
|
17 | <th><a href="?sort={sort_description}">Description</a></th> | |||
|
18 | <th><a href="?sort={sort_contact}">Contact</a></th> | |||
|
19 | <th><a href="?sort={sort_lastchange}">Last change</a></th> | |||
|
20 | <th> </th> | |||
|
21 | <tr> | |||
|
22 | {entries%indexentry} | |||
|
23 | </table> | |||
|
24 | </div> | |||
|
25 | </div> | |||
|
26 | {footer} |
@@ -0,0 +1,52 | |||||
|
1 | {header} | |||
|
2 | <title>{repo|escape}: {node|short} {path|escape}</title> | |||
|
3 | </head> | |||
|
4 | <body> | |||
|
5 | ||||
|
6 | <div class="container"> | |||
|
7 | <div class="menu"> | |||
|
8 | <div class="logo"> | |||
|
9 | <a href="http://www.selenic.com/mercurial/"> | |||
|
10 | <img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a> | |||
|
11 | </div> | |||
|
12 | <ul> | |||
|
13 | <li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li> | |||
|
14 | <li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li> | |||
|
15 | <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> | |||
|
16 | </ul> | |||
|
17 | <ul> | |||
|
18 | <li><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a></li> | |||
|
19 | <li class="active">browse</li> | |||
|
20 | </ul> | |||
|
21 | <ul> | |||
|
22 | {archives%archiveentry} | |||
|
23 | </ul> | |||
|
24 | </div> | |||
|
25 | ||||
|
26 | <div class="main"> | |||
|
27 | ||||
|
28 | <h2>{repo|escape}</h2> | |||
|
29 | <h3>directory {path|escape} @ {rev}:{node|short} {tags%changelogtag}</h3> | |||
|
30 | ||||
|
31 | <form class="search" action="{url}log"> | |||
|
32 | {sessionvars%hiddenformentry} | |||
|
33 | <p><input name="rev" id="search1" type="text" size="30"></p> | |||
|
34 | </form> | |||
|
35 | ||||
|
36 | <table class="bigtable"> | |||
|
37 | <tr> | |||
|
38 | <th class="name">name</th> | |||
|
39 | <th class="size">size</th> | |||
|
40 | <th class="permissions">permissions</th> | |||
|
41 | </tr> | |||
|
42 | <tr class="fileline parity{upparity}"> | |||
|
43 | <td class="name"><a href="{url}file/{node|short}{up|urlescape}{sessionvars%urlparameter}">[up]</a></td> | |||
|
44 | <td class="size"></td> | |||
|
45 | <td class="permissions">drwxr-xr-x</td> | |||
|
46 | </tr> | |||
|
47 | {dentries%direntry} | |||
|
48 | {fentries%fileentry} | |||
|
49 | </table> | |||
|
50 | </div> | |||
|
51 | </div> | |||
|
52 | {footer} |
@@ -0,0 +1,71 | |||||
|
1 | default = 'shortlog' | |||
|
2 | ||||
|
3 | mimetype = 'text/html; charset={encoding}' | |||
|
4 | header = header.tmpl | |||
|
5 | footer = footer.tmpl | |||
|
6 | search = search.tmpl | |||
|
7 | ||||
|
8 | changelog = shortlog.tmpl | |||
|
9 | shortlog = shortlog.tmpl | |||
|
10 | shortlogentry = shortlogentry.tmpl | |||
|
11 | graph = graph.tmpl | |||
|
12 | ||||
|
13 | naventry = '<a href="{url}log/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' | |||
|
14 | navshortentry = '<a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' | |||
|
15 | navgraphentry = '<a href="{url}graph/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' | |||
|
16 | filenaventry = '<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{label|escape}</a> ' | |||
|
17 | filedifflink = '<a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{file|escape}</a> ' | |||
|
18 | filenodelink = '<a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{file|escape}</a> ' | |||
|
19 | fileellipses = '...' | |||
|
20 | changelogentry = shortlogentry.tmpl | |||
|
21 | searchentry = shortlogentry.tmpl | |||
|
22 | changeset = changeset.tmpl | |||
|
23 | manifest = manifest.tmpl | |||
|
24 | ||||
|
25 | direntry = '<tr class="fileline parity{parity}"><td class="name"><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}"><img src="{staticurl}coal-folder.png"> {basename|escape}/</a><td class="size"></td><td class="permissions">drwxr-xr-x</td></tr>' | |||
|
26 | fileentry = '<tr class="fileline parity{parity}"><td class="filename"><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}#l1"><img src="{staticurl}coal-file.png"> {basename|escape}</a></td><td class="size">{size}</td><td class="permissions">{permissions|permissions}</td></tr>' | |||
|
27 | ||||
|
28 | filerevision = filerevision.tmpl | |||
|
29 | fileannotate = fileannotate.tmpl | |||
|
30 | filediff = filediff.tmpl | |||
|
31 | filelog = filelog.tmpl | |||
|
32 | fileline = '<tr class="parity{parity}"><td class="lineno"><a href="#{lineid}" id="{lineid}">{linenumber}</a></td><td class="source">{line|escape}</td></tr>' | |||
|
33 | filelogentry = filelogentry.tmpl | |||
|
34 | ||||
|
35 | annotateline = '<tr class="parity{parity}"><td class="annotate"><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}#{targetline}" title="{node|short}: {desc|escape|firstline}">{author|user}@{rev}</a></td><td class="lineno"><a href="#{lineid}" id="{lineid}">{linenumber}</a></td><td class="source">{line|escape}</td></tr>' | |||
|
36 | ||||
|
37 | diffblock = '<table class="bigtable parity{parity}">{lines}</table>' | |||
|
38 | difflineplus = '<tr><td class="lineno"><a href="#{lineid}" id="{lineid}">{linenumber}</a></td><td class="source plusline">{line|escape}</td></tr>' | |||
|
39 | difflineminus = '<tr><td class="lineno"><a href="#{lineid}" id="{lineid}">{linenumber}</a></td><td class="source minusline">{line|escape}</td></tr>' | |||
|
40 | difflineat = '<tr><td class="lineno"><a href="#{lineid}" id="{lineid}">{linenumber}</a></td><td class="source atline">{line|escape}</td></tr>' | |||
|
41 | diffline = '<tr><td class="lineno"><a href="#{lineid}" id="{lineid}">{linenumber}</a></td><td class="source">{line|escape}</td></tr>' | |||
|
42 | ||||
|
43 | changelogparent = '<tr><th class="parent">parent {rev}:</th><td class="parent"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr>' | |||
|
44 | ||||
|
45 | changesetparent = '<a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a> ' | |||
|
46 | ||||
|
47 | filerevparent = '<a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{rename%filerename}{node|short}</a> ' | |||
|
48 | filerevchild = '<a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a> ' | |||
|
49 | ||||
|
50 | filerename = '{file|escape}@' | |||
|
51 | filelogrename = '<tr><th>base:</th><td><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{file|escape}@{node|short}</a></td></tr>' | |||
|
52 | fileannotateparent = '<tr><td class="metatag">parent:</td><td><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{rename%filerename}{node|short}</a></td></tr>' | |||
|
53 | changesetchild = '<a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a>' | |||
|
54 | changelogchild = '<tr><th class="child">child</th><td class="child"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr>' | |||
|
55 | fileannotatechild = '<tr><td class="metatag">child:</td><td><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>' | |||
|
56 | tags = tags.tmpl | |||
|
57 | tagentry = '<tr class="tagEntry parity{parity}"><td><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{tag|escape}</a></td><td class="node">{node|short}</td></tr>' | |||
|
58 | changelogtag = '<span class="tag">{name|escape}</span> ' | |||
|
59 | changesettag = '<span class="tag">{tag|escape}</span> ' | |||
|
60 | filediffparent = '<tr><th class="parent">parent {rev}:</th><td class="parent"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr>' | |||
|
61 | filelogparent = '<tr><th>parent {rev}:</th><td><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>' | |||
|
62 | filediffchild = '<tr><th class="child">child {rev}:</th><td class="child"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr>' | |||
|
63 | filelogchild = '<tr><th>child {rev}:</th><td><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>' | |||
|
64 | indexentry = '<tr class="parity{parity}"><td><a href="{url}{sessionvars%urlparameter}">{name|escape}</a></td><td>{description}</td><td>{contact|obfuscate}</td><td class="age">{lastchange|age} ago</td><td class="indexlinks">{archives%indexarchiveentry}</td></tr>\n' | |||
|
65 | indexarchiveentry = '<a href="{url}archive/{node|short}{extension|urlescape}"> ↓{type|escape}</a>' | |||
|
66 | index = index.tmpl | |||
|
67 | archiveentry = '<li><a href="{url}archive/{node|short}{extension|urlescape}">{type|escape}</a></li>' | |||
|
68 | notfound = notfound.tmpl | |||
|
69 | error = error.tmpl | |||
|
70 | urlparameter = '{separator}{name}={value|urlescape}' | |||
|
71 | hiddenformentry = '<input type="hidden" name="{name}" value="{value|escape}" />' |
@@ -0,0 +1,12 | |||||
|
1 | {header} | |||
|
2 | <title>Mercurial repository not found</title> | |||
|
3 | </head> | |||
|
4 | <body> | |||
|
5 | ||||
|
6 | <h2>Mercurial repository not found</h2> | |||
|
7 | ||||
|
8 | The specified repository "{repo|escape}" is unknown, sorry. | |||
|
9 | ||||
|
10 | Please go back to the main repository list page. | |||
|
11 | ||||
|
12 | {footer} |
@@ -0,0 +1,41 | |||||
|
1 | {header} | |||
|
2 | <title>{repo|escape}: searching for {query|escape}</title> | |||
|
3 | </head> | |||
|
4 | <body> | |||
|
5 | ||||
|
6 | <div class="container"> | |||
|
7 | <div class="menu"> | |||
|
8 | <div class="logo"> | |||
|
9 | <a href="http://www.selenic.com/mercurial/"> | |||
|
10 | <img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a> | |||
|
11 | </div> | |||
|
12 | <ul> | |||
|
13 | <li><a href="{url}shortlog{sessionvars%urlparameter}">log</a></li> | |||
|
14 | <li><a href="{url}graph{sessionvars%urlparameter}">graph</a></li> | |||
|
15 | <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> | |||
|
16 | </ul> | |||
|
17 | </div> | |||
|
18 | ||||
|
19 | <div class="main"> | |||
|
20 | ||||
|
21 | <h2>{repo|escape}</h2> | |||
|
22 | <h3>searching for '{query|escape}'</h3> | |||
|
23 | ||||
|
24 | <form class="search" action="{url}log"> | |||
|
25 | {sessionvars%hiddenformentry} | |||
|
26 | <p><input name="rev" id="search1" type="text" size="30"></p> | |||
|
27 | </form> | |||
|
28 | ||||
|
29 | <table class="bigtable"> | |||
|
30 | <tr> | |||
|
31 | <th class="age">age</td> | |||
|
32 | <th class="author">author</td> | |||
|
33 | <th class="description">description</td> | |||
|
34 | </tr> | |||
|
35 | {entries} | |||
|
36 | </table> | |||
|
37 | ||||
|
38 | </div> | |||
|
39 | </div> | |||
|
40 | ||||
|
41 | {footer} |
@@ -0,0 +1,55 | |||||
|
1 | {header} | |||
|
2 | <title>{repo|escape}: log</title> | |||
|
3 | <link rel="alternate" type="application/atom+xml" | |||
|
4 | href="{url}atom-log" title="Atom feed for {repo|escape}"> | |||
|
5 | <link rel="alternate" type="application/rss+xml" | |||
|
6 | href="{url}rss-log" title="RSS feed for {repo|escape}"> | |||
|
7 | </head> | |||
|
8 | <body> | |||
|
9 | ||||
|
10 | <div class="container"> | |||
|
11 | <div class="menu"> | |||
|
12 | <div class="logo"> | |||
|
13 | <a href="http://www.selenic.com/mercurial/"> | |||
|
14 | <img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a> | |||
|
15 | </div> | |||
|
16 | <ul> | |||
|
17 | <li class="active">log</li> | |||
|
18 | <li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li> | |||
|
19 | <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> | |||
|
20 | </ul> | |||
|
21 | <ul> | |||
|
22 | <li><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a></li> | |||
|
23 | <li><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">browse</a></li> | |||
|
24 | </ul> | |||
|
25 | <ul> | |||
|
26 | {archives%archiveentry} | |||
|
27 | </ul> | |||
|
28 | </div> | |||
|
29 | ||||
|
30 | <div class="main"> | |||
|
31 | ||||
|
32 | <h2>{repo|escape}</h2> | |||
|
33 | <h3>log</h3> | |||
|
34 | ||||
|
35 | <form class="search" action="{url}log"> | |||
|
36 | {sessionvars%hiddenformentry} | |||
|
37 | <p><input name="rev" id="search1" type="text" size="30"></p> | |||
|
38 | </form> | |||
|
39 | ||||
|
40 | <div class="navigate">rev {rev}: {changenav%navshortentry}</div> | |||
|
41 | ||||
|
42 | <table class="bigtable"> | |||
|
43 | <tr> | |||
|
44 | <th class="age">age</td> | |||
|
45 | <th class="author">author</td> | |||
|
46 | <th class="description">description</td> | |||
|
47 | </tr> | |||
|
48 | {entries%shortlogentry} | |||
|
49 | </table> | |||
|
50 | ||||
|
51 | <div class="navigate">rev {rev}: {changenav%navshortentry}</div> | |||
|
52 | </div> | |||
|
53 | </div> | |||
|
54 | ||||
|
55 | {footer} |
@@ -0,0 +1,5 | |||||
|
1 | <tr class="parity{parity}"> | |||
|
2 | <td class="age">{date|age}</td> | |||
|
3 | <td class="author">{author|person}</td> | |||
|
4 | <td class="description"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{desc|strip|firstline|escape}</a>{tags%changelogtag}</td> | |||
|
5 | </tr> |
@@ -0,0 +1,42 | |||||
|
1 | {header} | |||
|
2 | <title>{repo|escape}: tags</title> | |||
|
3 | <link rel="alternate" type="application/atom+xml" | |||
|
4 | href="{url}atom-tags" title="Atom feed for {repo|escape}: tags"> | |||
|
5 | <link rel="alternate" type="application/rss+xml" | |||
|
6 | href="{url}rss-tags" title="RSS feed for {repo|escape}: tags"> | |||
|
7 | </head> | |||
|
8 | <body> | |||
|
9 | ||||
|
10 | <div class="container"> | |||
|
11 | <div class="menu"> | |||
|
12 | <div class="logo"> | |||
|
13 | <a href="http://www.selenic.com/mercurial/"> | |||
|
14 | <img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a> | |||
|
15 | </div> | |||
|
16 | <ul> | |||
|
17 | <li><a href="{url}shortlog{sessionvars%urlparameter}">log</a></li> | |||
|
18 | <li><a href="{url}graph{sessionvars%urlparameter}">graph</a></li> | |||
|
19 | <li class="active">tags</li> | |||
|
20 | </ul> | |||
|
21 | </div> | |||
|
22 | ||||
|
23 | <div class="main"> | |||
|
24 | <h2>{repo|escape}</h2> | |||
|
25 | <h3>tags</h3> | |||
|
26 | ||||
|
27 | <form class="search" action="{url}log"> | |||
|
28 | {sessionvars%hiddenformentry} | |||
|
29 | <p><input name="rev" id="search1" type="text" size="30"></p> | |||
|
30 | </form> | |||
|
31 | ||||
|
32 | <table class="bigtable"> | |||
|
33 | <tr> | |||
|
34 | <th>tag</th> | |||
|
35 | <th>node</th> | |||
|
36 | </tr> | |||
|
37 | {entries%tagentry} | |||
|
38 | </table> | |||
|
39 | </div> | |||
|
40 | </div> | |||
|
41 | ||||
|
42 | {footer} |
@@ -0,0 +1,121 | |||||
|
1 | #header# | |||
|
2 | <title>#repo|escape#: Graph</title> | |||
|
3 | <link rel="alternate" type="application/atom+xml" | |||
|
4 | href="{url}atom-log" title="Atom feed for #repo|escape#"/> | |||
|
5 | <link rel="alternate" type="application/rss+xml" | |||
|
6 | href="{url}rss-log" title="RSS feed for #repo|escape#"/> | |||
|
7 | </head> | |||
|
8 | <body> | |||
|
9 | ||||
|
10 | <div class="page_header"> | |||
|
11 | <a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / graph | |||
|
12 | </div> | |||
|
13 | ||||
|
14 | <form action="{url}log"> | |||
|
15 | {sessionvars%hiddenformentry} | |||
|
16 | <div class="search"> | |||
|
17 | <input type="text" name="rev" /> | |||
|
18 | </div> | |||
|
19 | </form> | |||
|
20 | <div class="page_nav"> | |||
|
21 | <a href="{url}summary{sessionvars%urlparameter}">summary</a> | | |||
|
22 | <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | | |||
|
23 | <a href="{url}log/#rev#{sessionvars%urlparameter}">changelog</a> | | |||
|
24 | graph | | |||
|
25 | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | | |||
|
26 | <a href="{url}file/#node|short#{sessionvars%urlparameter}">files</a> | |||
|
27 | <br/> | |||
|
28 | <a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountless}">less</a> | |||
|
29 | <a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountmore}">more</a> | |||
|
30 | | #changenav%navgraphentry#<br/> | |||
|
31 | </div> | |||
|
32 | ||||
|
33 | <div class="title"> </div> | |||
|
34 | ||||
|
35 | <div id="noscript">The revision graph only works with JavaScript-enabled browsers.</div> | |||
|
36 | ||||
|
37 | <div id="wrapper"> | |||
|
38 | <ul id="nodebgs"></ul> | |||
|
39 | <canvas id="graph" width="224" height="#canvasheight#"></canvas> | |||
|
40 | <ul id="graphnodes"></ul> | |||
|
41 | </div> | |||
|
42 | ||||
|
43 | <script type="text/javascript" src="#staticurl#graph.js"></script> | |||
|
44 | <script> | |||
|
45 | <!-- hide script content | |||
|
46 | ||||
|
47 | document.getElementById('noscript').style.display = 'none'; | |||
|
48 | ||||
|
49 | var data = {jsdata|json}; | |||
|
50 | var graph = new Graph(); | |||
|
51 | graph.scale({bg_height}); | |||
|
52 | ||||
|
53 | graph.edge = function(x0, y0, x1, y1, color) { | |||
|
54 | ||||
|
55 | this.setColor(color, 0.0, 0.65); | |||
|
56 | this.ctx.beginPath(); | |||
|
57 | this.ctx.moveTo(x0, y0); | |||
|
58 | this.ctx.lineTo(x1, y1); | |||
|
59 | this.ctx.stroke(); | |||
|
60 | ||||
|
61 | } | |||
|
62 | ||||
|
63 | var revlink = '<li style="_STYLE"><span class="desc">'; | |||
|
64 | revlink += '<a class="list" href="{url}rev/_NODEID{sessionvars%urlparameter}" title="_NODEID"><b>_DESC</b></a>'; | |||
|
65 | revlink += '</span> _TAGS'; | |||
|
66 | revlink += '<span class="info">_DATE ago, by _USER</span></li>'; | |||
|
67 | ||||
|
68 | graph.vertex = function(x, y, color, parity, cur) { | |||
|
69 | ||||
|
70 | this.ctx.beginPath(); | |||
|
71 | color = this.setColor(color, 0.25, 0.75); | |||
|
72 | this.ctx.arc(x, y, radius, 0, Math.PI * 2, true); | |||
|
73 | this.ctx.fill(); | |||
|
74 | ||||
|
75 | var bg = '<li class="bg parity' + parity + '"></li>'; | |||
|
76 | var left = (this.columns + 1) * this.bg_height; | |||
|
77 | var nstyle = 'padding-left: ' + left + 'px;'; | |||
|
78 | var item = revlink.replace(/_STYLE/, nstyle); | |||
|
79 | item = item.replace(/_PARITY/, 'parity' + parity); | |||
|
80 | item = item.replace(/_NODEID/, cur[0]); | |||
|
81 | item = item.replace(/_NODEID/, cur[0]); | |||
|
82 | item = item.replace(/_DESC/, cur[3]); | |||
|
83 | item = item.replace(/_USER/, cur[4]); | |||
|
84 | item = item.replace(/_DATE/, cur[5]); | |||
|
85 | ||||
|
86 | var tagspan = ''; | |||
|
87 | if (cur[7].length || (cur[6][0] != 'default' || cur[6][1])) { | |||
|
88 | tagspan = '<span class="logtags">'; | |||
|
89 | if (cur[6][1]) { | |||
|
90 | tagspan += '<span class="branchtag" title="' + cur[6][0] + '">'; | |||
|
91 | tagspan += cur[6][0] + '</span> '; | |||
|
92 | } else if (!cur[6][1] && cur[6][0] != 'default') { | |||
|
93 | tagspan += '<span class="inbranchtag" title="' + cur[6][0] + '">'; | |||
|
94 | tagspan += cur[6][0] + '</span> '; | |||
|
95 | } | |||
|
96 | if (cur[7].length) { | |||
|
97 | for (var t in cur[7]) { | |||
|
98 | var tag = cur[7][t]; | |||
|
99 | tagspan += '<span class="tagtag">' + tag + '</span> '; | |||
|
100 | } | |||
|
101 | } | |||
|
102 | tagspan += '</span>'; | |||
|
103 | } | |||
|
104 | ||||
|
105 | item = item.replace(/_TAGS/, tagspan); | |||
|
106 | return [bg, item]; | |||
|
107 | ||||
|
108 | } | |||
|
109 | ||||
|
110 | graph.render(data); | |||
|
111 | ||||
|
112 | // stop hiding script --> | |||
|
113 | </script> | |||
|
114 | ||||
|
115 | <div class="page_nav"> | |||
|
116 | <a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountless}">less</a> | |||
|
117 | <a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountmore}">more</a> | |||
|
118 | | {changenav%navgraphentry} | |||
|
119 | </div> | |||
|
120 | ||||
|
121 | #footer# |
@@ -0,0 +1,97 | |||||
|
1 | #header# | |||
|
2 | <title>#repo|escape#: graph</title> | |||
|
3 | <link rel="alternate" type="application/atom+xml" | |||
|
4 | href="#url#atom-tags" title="Atom feed for #repo|escape#: tags"> | |||
|
5 | <link rel="alternate" type="application/rss+xml" | |||
|
6 | href="#url#rss-tags" title="RSS feed for #repo|escape#: tags"> | |||
|
7 | <!--[if IE]><script type="text/javascript" src="#staticurl#excanvas.js"></script><![endif]--> | |||
|
8 | </head> | |||
|
9 | <body> | |||
|
10 | ||||
|
11 | <div class="buttons"> | |||
|
12 | <a href="#url#log{sessionvars%urlparameter}">changelog</a> | |||
|
13 | <a href="#url#shortlog{sessionvars%urlparameter}">shortlog</a> | |||
|
14 | <a href="#url#tags{sessionvars%urlparameter}">tags</a> | |||
|
15 | <a href="#url#file/#node|short#/{sessionvars%urlparameter}">files</a> | |||
|
16 | </div> | |||
|
17 | ||||
|
18 | <h2>graph</h2> | |||
|
19 | ||||
|
20 | <form action="#url#log"> | |||
|
21 | {sessionvars%hiddenformentry} | |||
|
22 | <p> | |||
|
23 | <label for="search1">search:</label> | |||
|
24 | <input name="rev" id="search1" type="text" size="30"> | |||
|
25 | navigate: <small class="navigate">#changenav%navgraphentry#</small> | |||
|
26 | </p> | |||
|
27 | </form> | |||
|
28 | ||||
|
29 | <div id="noscript">The revision graph only works with JavaScript-enabled browsers.</div> | |||
|
30 | ||||
|
31 | <div id="wrapper"> | |||
|
32 | <ul id="nodebgs"></ul> | |||
|
33 | <canvas id="graph" width="224" height="#canvasheight#"></canvas> | |||
|
34 | <ul id="graphnodes"></ul> | |||
|
35 | </div> | |||
|
36 | ||||
|
37 | <script type="text/javascript" src="#staticurl#graph.js"></script> | |||
|
38 | <script> | |||
|
39 | <!-- hide script content | |||
|
40 | ||||
|
41 | document.getElementById('noscript').style.display = 'none'; | |||
|
42 | ||||
|
43 | var data = {jsdata|json}; | |||
|
44 | var graph = new Graph(); | |||
|
45 | graph.scale({bg_height}); | |||
|
46 | ||||
|
47 | graph.edge = function(x0, y0, x1, y1, color) { | |||
|
48 | ||||
|
49 | this.setColor(color, 0.0, 0.65); | |||
|
50 | this.ctx.beginPath(); | |||
|
51 | this.ctx.moveTo(x0, y0); | |||
|
52 | this.ctx.lineTo(x1, y1); | |||
|
53 | this.ctx.stroke(); | |||
|
54 | ||||
|
55 | } | |||
|
56 | ||||
|
57 | var revlink = '<li style="_STYLE"><span class="desc">'; | |||
|
58 | revlink += '<a href="{url}rev/_NODEID{sessionvars%urlparameter}" title="_NODEID">_DESC</a>'; | |||
|
59 | revlink += '</span><span class="info">_DATE ago, by _USER</span></li>'; | |||
|
60 | ||||
|
61 | graph.vertex = function(x, y, color, parity, cur) { | |||
|
62 | ||||
|
63 | this.ctx.beginPath(); | |||
|
64 | color = this.setColor(color, 0.25, 0.75); | |||
|
65 | this.ctx.arc(x, y, radius, 0, Math.PI * 2, true); | |||
|
66 | this.ctx.fill(); | |||
|
67 | ||||
|
68 | var bg = '<li class="bg parity' + parity + '"></li>'; | |||
|
69 | var left = (this.columns + 1) * this.bg_height; | |||
|
70 | var nstyle = 'padding-left: ' + left + 'px;'; | |||
|
71 | var item = revlink.replace(/_STYLE/, nstyle); | |||
|
72 | item = item.replace(/_PARITY/, 'parity' + parity); | |||
|
73 | item = item.replace(/_NODEID/, cur[0]); | |||
|
74 | item = item.replace(/_NODEID/, cur[0]); | |||
|
75 | item = item.replace(/_DESC/, cur[3]); | |||
|
76 | item = item.replace(/_USER/, cur[4]); | |||
|
77 | item = item.replace(/_DATE/, cur[5]); | |||
|
78 | ||||
|
79 | return [bg, item]; | |||
|
80 | ||||
|
81 | } | |||
|
82 | ||||
|
83 | graph.render(data); | |||
|
84 | ||||
|
85 | // stop hiding script --> | |||
|
86 | </script> | |||
|
87 | ||||
|
88 | <form action="#url#log"> | |||
|
89 | {sessionvars%hiddenformentry} | |||
|
90 | <p> | |||
|
91 | <label for="search1">search:</label> | |||
|
92 | <input name="rev" id="search1" type="text" size="30"> | |||
|
93 | navigate: <small class="navigate">#changenav%navgraphentry#</small> | |||
|
94 | </p> | |||
|
95 | </form> | |||
|
96 | ||||
|
97 | #footer# |
@@ -0,0 +1,7 | |||||
|
1 | <!-- quirksmode --> | |||
|
2 | <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"> | |||
|
3 | <html> | |||
|
4 | <head> | |||
|
5 | <link rel="icon" href="{staticurl}hgicon.png" type="image/png"> | |||
|
6 | <meta name="robots" content="index, nofollow" /> | |||
|
7 | <link rel="stylesheet" href="{staticurl}style-paper.css" type="text/css" /> |
@@ -0,0 +1,72 | |||||
|
1 | default = 'shortlog' | |||
|
2 | ||||
|
3 | mimetype = 'text/html; charset={encoding}' | |||
|
4 | header = header.tmpl | |||
|
5 | footer = ../coal/footer.tmpl | |||
|
6 | search = ../coal/search.tmpl | |||
|
7 | ||||
|
8 | changelog = ../coal/shortlog.tmpl | |||
|
9 | shortlog = ../coal/shortlog.tmpl | |||
|
10 | shortlogentry = ../coal/shortlogentry.tmpl | |||
|
11 | graph = ../coal/graph.tmpl | |||
|
12 | ||||
|
13 | naventry = '<a href="{url}log/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' | |||
|
14 | navshortentry = '<a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' | |||
|
15 | navgraphentry = '<a href="{url}graph/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' | |||
|
16 | filenaventry = '<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{label|escape}</a> ' | |||
|
17 | filedifflink = '<a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{file|escape}</a> ' | |||
|
18 | filenodelink = '<a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{file|escape}</a> ' | |||
|
19 | fileellipses = '...' | |||
|
20 | changelogentry = ../coal/shortlogentry.tmpl | |||
|
21 | searchentry = ../coal/shortlogentry.tmpl | |||
|
22 | changeset = ../coal/changeset.tmpl | |||
|
23 | manifest = ../coal/manifest.tmpl | |||
|
24 | ||||
|
25 | direntry = '<tr class="fileline parity{parity}"><td class="name"><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}"><img src="{staticurl}coal-folder.png"> {basename|escape}/</a><td class="size"></td><td class="permissions">drwxr-xr-x</td></tr>' | |||
|
26 | fileentry = '<tr class="fileline parity{parity}"><td clase="filename"><a href="{url}file/{node|short}/{file|urlescape}#l1{sessionvars%urlparameter}"><img src="{staticurl}coal-file.png"> {basename|escape}</a></td><td class="size">{size}</td><td class="permissions">{permissions|permissions}</td></tr>' | |||
|
27 | ||||
|
28 | filerevision = ../coal/filerevision.tmpl | |||
|
29 | fileannotate = ../coal/fileannotate.tmpl | |||
|
30 | filediff = ../coal/filediff.tmpl | |||
|
31 | filelog = ../coal/filelog.tmpl | |||
|
32 | fileline = '<tr class="parity{parity}"><td class="lineno"><a href="#{lineid}" id="{lineid}">{linenumber}</a></td><td class="source">{line|escape}</td></tr>' | |||
|
33 | filelogentry = ../coal/filelogentry.tmpl | |||
|
34 | ||||
|
35 | annotateline = '<tr class="parity{parity}"><td class="annotate"><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}#{targetline}" title="{node|short}: {desc|escape|firstline}">{author|user}@{rev}</a></td><td class="lineno"><a href="#{lineid}" id="{lineid}">{linenumber}</a></td><td class="source">{line|escape}</td></tr>' | |||
|
36 | ||||
|
37 | diffblock = '<table class="bigtable parity{parity}">{lines}</table>' | |||
|
38 | difflineplus = '<tr><td class="lineno"><a href="#{lineid}" id="{lineid}">{linenumber}</a></td><td class="source plusline">{line|escape}</td></tr>' | |||
|
39 | difflineminus = '<tr><td class="lineno"><a href="#{lineid}" id="{lineid}">{linenumber}</a></td><td class="source minusline">{line|escape}</td></tr>' | |||
|
40 | difflineat = '<tr><td class="lineno"><a href="#{lineid}" id="{lineid}">{linenumber}</a></td><td class="source atline">{line|escape}</td></tr>' | |||
|
41 | diffline = '<tr><td class="lineno"><a href="#{lineid}" id="{lineid}">{linenumber}</a></td><td class="source">{line|escape}</td></tr>' | |||
|
42 | ||||
|
43 | changelogparent = '<tr><th class="parent">parent {rev}:</th><td class="parent"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr>' | |||
|
44 | ||||
|
45 | changesetparent = '<a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a> ' | |||
|
46 | ||||
|
47 | filerevparent = '<a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{rename%filerename}{node|short}</a> ' | |||
|
48 | filerevchild = '<a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a> ' | |||
|
49 | ||||
|
50 | filerename = '{file|escape}@' | |||
|
51 | filelogrename = '<tr><th>base:</th><td><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{file|escape}@{node|short}</a></td></tr>' | |||
|
52 | fileannotateparent = '<tr><td class="metatag">parent:</td><td><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{rename%filerename}{node|short}</a></td></tr>' | |||
|
53 | changesetchild = '<a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a>' | |||
|
54 | changelogchild = '<tr><th class="child">child</th><td class="child"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr>' | |||
|
55 | fileannotatechild = '<tr><td class="metatag">child:</td><td><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>' | |||
|
56 | tags = ../coal/tags.tmpl | |||
|
57 | tagentry = '<tr class="tagEntry parity{parity}"><td><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{tag|escape}</a></td><td class="node">{node|short}</td></tr>' | |||
|
58 | changelogtag = '<tr><th class="tag">tag:</th><td class="tag">{tag|escape}</td></tr>' | |||
|
59 | changelogtag = '<span class="tag">{name|escape}</span> ' | |||
|
60 | changesettag = '<span class="tag">{tag|escape}</span> ' | |||
|
61 | filediffparent = '<tr><th class="parent">parent {rev}:</th><td class="parent"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr>' | |||
|
62 | filelogparent = '<tr><th>parent {rev}:</th><td><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>' | |||
|
63 | filediffchild = '<tr><th class="child">child {rev}:</th><td class="child"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr>' | |||
|
64 | filelogchild = '<tr><th>child {rev}:</th><td><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>' | |||
|
65 | indexentry = '<tr class="parity{parity}"><td><a href="{url}{sessionvars%urlparameter}">{name|escape}</a></td><td>{description}</td><td>{contact|obfuscate}</td><td class="age">{lastchange|age} ago</td><td class="indexlinks">{archives%indexarchiveentry}</td></tr>\n' | |||
|
66 | indexarchiveentry = '<a href="{url}archive/{node|short}{extension|urlescape}"> ↓{type|escape}</a>' | |||
|
67 | index = ../coal/index.tmpl | |||
|
68 | archiveentry = '<li><a href="{url}archive/{node|short}{extension|urlescape}">{type|escape}</a></li>' | |||
|
69 | notfound = ../coal/notfound.tmpl | |||
|
70 | error = ../coal/error.tmpl | |||
|
71 | urlparameter = '{separator}{name}={value|urlescape}' | |||
|
72 | hiddenformentry = '<input type="hidden" name="{name}" value="{value|escape}" />' |
1 | NO CONTENT: new file 100644, binary diff hidden |
|
NO CONTENT: new file 100644, binary diff hidden |
1 | NO CONTENT: new file 100644, binary diff hidden |
|
NO CONTENT: new file 100644, binary diff hidden |
1 | NO CONTENT: new file 100644, binary diff hidden |
|
NO CONTENT: new file 100644, binary diff hidden |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100755 |
|
NO CONTENT: new file 100755 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100755 |
|
NO CONTENT: new file 100755 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100755 |
|
NO CONTENT: new file 100755 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100755 |
|
NO CONTENT: new file 100755 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100755 |
|
NO CONTENT: new file 100755 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100755 |
|
NO CONTENT: new file 100755 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100755 |
|
NO CONTENT: new file 100755 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100755 |
|
NO CONTENT: new file 100755 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
@@ -7,6 +7,7 syntax: glob | |||||
7 | *.mergebackup |
|
7 | *.mergebackup | |
8 | *.o |
|
8 | *.o | |
9 | *.so |
|
9 | *.so | |
|
10 | *.pyd | |||
10 | *.pyc |
|
11 | *.pyc | |
11 | *.swp |
|
12 | *.swp | |
12 | *.prof |
|
13 | *.prof |
@@ -35,8 +35,10 | |||||
35 | ;; This code has been developed under XEmacs 21.5, and may not work as |
|
35 | ;; This code has been developed under XEmacs 21.5, and may not work as | |
36 | ;; well under GNU Emacs (albeit tested under 21.4). Patches to |
|
36 | ;; well under GNU Emacs (albeit tested under 21.4). Patches to | |
37 | ;; enhance the portability of this code, fix bugs, and add features |
|
37 | ;; enhance the portability of this code, fix bugs, and add features | |
38 | ;; are most welcome. You can clone a Mercurial repository for this |
|
38 | ;; are most welcome. | |
39 | ;; package from http://www.serpentine.com/hg/hg-emacs |
|
39 | ||
|
40 | ;; As of version 22.3, GNU Emacs's VC mode has direct support for | |||
|
41 | ;; Mercurial, so this package may not prove as useful there. | |||
40 |
|
42 | |||
41 | ;; Please send problem reports and suggestions to bos@serpentine.com. |
|
43 | ;; Please send problem reports and suggestions to bos@serpentine.com. | |
42 |
|
44 |
@@ -205,8 +205,7 typeset -A _hg_cmd_globals | |||||
205 |
|
205 | |||
206 | _hg_config() { |
|
206 | _hg_config() { | |
207 | typeset -a items |
|
207 | typeset -a items | |
208 | local line |
|
208 | items=(${${(%f)"$(_call_program hg hg showconfig)"}%%\=*}) | |
209 | items=(${${(%f)"$(_hg_cmd showconfig)"}%%\=*}) |
|
|||
210 | (( $#items )) && _describe -t config 'config item' items |
|
209 | (( $#items )) && _describe -t config 'config item' items | |
211 | } |
|
210 | } | |
212 |
|
211 | |||
@@ -291,10 +290,14 typeset -A _hg_cmd_globals | |||||
291 | '--cwd[change working directory]:new working directory:_files -/' |
|
290 | '--cwd[change working directory]:new working directory:_files -/' | |
292 | '(--noninteractive -y)'{-y,--noninteractive}'[do not prompt, assume yes for any required answers]' |
|
291 | '(--noninteractive -y)'{-y,--noninteractive}'[do not prompt, assume yes for any required answers]' | |
293 | '(--verbose -v)'{-v,--verbose}'[enable additional output]' |
|
292 | '(--verbose -v)'{-v,--verbose}'[enable additional output]' | |
|
293 | '*--config[set/override config option]:defined config items:_hg_config' | |||
294 | '(--quiet -q)'{-q,--quiet}'[suppress output]' |
|
294 | '(--quiet -q)'{-q,--quiet}'[suppress output]' | |
295 | '(--help -h)'{-h,--help}'[display help and exit]' |
|
295 | '(--help -h)'{-h,--help}'[display help and exit]' | |
296 | '--debug[debug mode]' |
|
296 | '--debug[debug mode]' | |
297 | '--debugger[start debugger]' |
|
297 | '--debugger[start debugger]' | |
|
298 | '--encoding[set the charset encoding (default: UTF8)]' | |||
|
299 | '--encodingmode[set the charset encoding mode (default: strict)]' | |||
|
300 | '--lsprof[print improved command execution profile]' | |||
298 | '--traceback[print traceback on exception]' |
|
301 | '--traceback[print traceback on exception]' | |
299 | '--time[time how long the command takes]' |
|
302 | '--time[time how long the command takes]' | |
300 | '--profile[profile]' |
|
303 | '--profile[profile]' |
@@ -69,6 +69,7 def show_doc(ui): | |||||
69 | if f.startswith("debug"): continue |
|
69 | if f.startswith("debug"): continue | |
70 | d = get_cmd(h[f]) |
|
70 | d = get_cmd(h[f]) | |
71 | # synopsis |
|
71 | # synopsis | |
|
72 | ui.write("[[%s]]\n" % d['cmd']) | |||
72 | ui.write("%s::\n" % d['synopsis'].replace("hg ","", 1)) |
|
73 | ui.write("%s::\n" % d['synopsis'].replace("hg ","", 1)) | |
73 | # description |
|
74 | # description | |
74 | ui.write("%s\n\n" % d['desc'][1]) |
|
75 | ui.write("%s\n\n" % d['desc'][1]) | |
@@ -91,11 +92,10 def show_doc(ui): | |||||
91 | ui.write(_(" aliases: %s\n\n") % " ".join(d['aliases'])) |
|
92 | ui.write(_(" aliases: %s\n\n") % " ".join(d['aliases'])) | |
92 |
|
93 | |||
93 | # print topics |
|
94 | # print topics | |
94 | for t in helptable: |
|
95 | for t, doc in helptable: | |
95 | l = t.split("|") |
|
96 | l = t.split("|") | |
96 | section = l[-1] |
|
97 | section = l[-1] | |
97 | underlined(_(section).upper()) |
|
98 | underlined(_(section).upper()) | |
98 | doc = helptable[t] |
|
|||
99 | if callable(doc): |
|
99 | if callable(doc): | |
100 | doc = doc() |
|
100 | doc = doc() | |
101 | ui.write(_(doc)) |
|
101 | ui.write(_(doc)) |
@@ -37,58 +37,6 repository path:: | |||||
37 |
|
37 | |||
38 | include::hg.1.gendoc.txt[] |
|
38 | include::hg.1.gendoc.txt[] | |
39 |
|
39 | |||
40 | SPECIFYING SINGLE REVISIONS |
|
|||
41 | --------------------------- |
|
|||
42 |
|
||||
43 | Mercurial accepts several notations for identifying individual |
|
|||
44 | revisions. |
|
|||
45 |
|
||||
46 | A plain integer is treated as a revision number. Negative |
|
|||
47 | integers are treated as offsets from the tip, with -1 denoting the |
|
|||
48 | tip. |
|
|||
49 |
|
||||
50 | A 40-digit hexadecimal string is treated as a unique revision |
|
|||
51 | identifier. |
|
|||
52 |
|
||||
53 | A hexadecimal string less than 40 characters long is treated as a |
|
|||
54 | unique revision identifier, and referred to as a short-form |
|
|||
55 | identifier. A short-form identifier is only valid if it is the |
|
|||
56 | prefix of one full-length identifier. |
|
|||
57 |
|
||||
58 | Any other string is treated as a tag name, which is a symbolic |
|
|||
59 | name associated with a revision identifier. Tag names may not |
|
|||
60 | contain the ":" character. |
|
|||
61 |
|
||||
62 | The reserved name "tip" is a special tag that always identifies |
|
|||
63 | the most recent revision. |
|
|||
64 |
|
||||
65 | The reserved name "null" indicates the null revision. This is the |
|
|||
66 | revision of an empty repository, and the parent of revision 0. |
|
|||
67 |
|
||||
68 | The reserved name "." indicates the working directory parent. If |
|
|||
69 | no working directory is checked out, it is equivalent to null. |
|
|||
70 | If an uncommitted merge is in progress, "." is the revision of |
|
|||
71 | the first parent. |
|
|||
72 |
|
||||
73 | SPECIFYING MULTIPLE REVISIONS |
|
|||
74 | ----------------------------- |
|
|||
75 |
|
||||
76 | When Mercurial accepts more than one revision, they may be |
|
|||
77 | specified individually, or provided as a continuous range, |
|
|||
78 | separated by the ":" character. |
|
|||
79 |
|
||||
80 | The syntax of range notation is [BEGIN]:[END], where BEGIN and END |
|
|||
81 | are revision identifiers. Both BEGIN and END are optional. If |
|
|||
82 | BEGIN is not specified, it defaults to revision number 0. If END |
|
|||
83 | is not specified, it defaults to the tip. The range ":" thus |
|
|||
84 | means "all revisions". |
|
|||
85 |
|
||||
86 | If BEGIN is greater than END, revisions are treated in reverse |
|
|||
87 | order. |
|
|||
88 |
|
||||
89 | A range acts as a closed interval. This means that a range of 3:5 |
|
|||
90 | gives 3, 4 and 5. Similarly, a range of 4:2 gives 4, 3, and 2. |
|
|||
91 |
|
||||
92 | FILES |
|
40 | FILES | |
93 | ----- |
|
41 | ----- | |
94 | .hgignore:: |
|
42 | .hgignore:: |
@@ -100,6 +100,7 This section describes the different sec | |||||
100 | Mercurial "hgrc" file, the purpose of each section, its possible |
|
100 | Mercurial "hgrc" file, the purpose of each section, its possible | |
101 | keys, and their possible values. |
|
101 | keys, and their possible values. | |
102 |
|
102 | |||
|
103 | [[decode]] | |||
103 | decode/encode:: |
|
104 | decode/encode:: | |
104 | Filters for transforming files on checkout/checkin. This would |
|
105 | Filters for transforming files on checkout/checkin. This would | |
105 | typically be used for newline processing or other |
|
106 | typically be used for newline processing or other | |
@@ -158,6 +159,7 decode/encode:: | |||||
158 | [decode] |
|
159 | [decode] | |
159 | **.txt = dumbdecode: |
|
160 | **.txt = dumbdecode: | |
160 |
|
161 | |||
|
162 | [[defaults]] | |||
161 | defaults:: |
|
163 | defaults:: | |
162 | Use the [defaults] section to define command defaults, i.e. the |
|
164 | Use the [defaults] section to define command defaults, i.e. the | |
163 | default options/arguments to pass to the specified commands. |
|
165 | default options/arguments to pass to the specified commands. | |
@@ -173,6 +175,7 defaults:: | |||||
173 | defining command defaults. The command defaults will also be |
|
175 | defining command defaults. The command defaults will also be | |
174 | applied to the aliases of the commands defined. |
|
176 | applied to the aliases of the commands defined. | |
175 |
|
177 | |||
|
178 | [[diff]] | |||
176 | diff:: |
|
179 | diff:: | |
177 | Settings used when displaying diffs. They are all boolean and |
|
180 | Settings used when displaying diffs. They are all boolean and | |
178 | defaults to False. |
|
181 | defaults to False. | |
@@ -189,6 +192,7 diff:: | |||||
189 | ignoreblanklines;; |
|
192 | ignoreblanklines;; | |
190 | Ignore changes whose lines are all blank. |
|
193 | Ignore changes whose lines are all blank. | |
191 |
|
194 | |||
|
195 | [[email]] | |||
192 | email:: |
|
196 | email:: | |
193 | Settings for extensions that send email messages. |
|
197 | Settings for extensions that send email messages. | |
194 | from;; |
|
198 | from;; | |
@@ -217,6 +221,7 email:: | |||||
217 | from = Joseph User <joe.user@example.com> |
|
221 | from = Joseph User <joe.user@example.com> | |
218 | method = /usr/sbin/sendmail |
|
222 | method = /usr/sbin/sendmail | |
219 |
|
223 | |||
|
224 | [[extensions]] | |||
220 | extensions:: |
|
225 | extensions:: | |
221 | Mercurial has an extension mechanism for adding new features. To |
|
226 | Mercurial has an extension mechanism for adding new features. To | |
222 | enable an extension, create an entry for it in this section. |
|
227 | enable an extension, create an entry for it in this section. | |
@@ -241,6 +246,7 extensions:: | |||||
241 | # (this extension will get loaded from the file specified) |
|
246 | # (this extension will get loaded from the file specified) | |
242 | myfeature = ~/.hgext/myfeature.py |
|
247 | myfeature = ~/.hgext/myfeature.py | |
243 |
|
248 | |||
|
249 | [[format]] | |||
244 | format:: |
|
250 | format:: | |
245 |
|
251 | |||
246 | usestore;; |
|
252 | usestore;; | |
@@ -250,6 +256,7 format:: | |||||
250 | you to store longer filenames in some situations at the expense of |
|
256 | you to store longer filenames in some situations at the expense of | |
251 | compatibility. |
|
257 | compatibility. | |
252 |
|
258 | |||
|
259 | [[merge-patterns]] | |||
253 | merge-patterns:: |
|
260 | merge-patterns:: | |
254 | This section specifies merge tools to associate with particular file |
|
261 | This section specifies merge tools to associate with particular file | |
255 | patterns. Tools matched here will take precedence over the default |
|
262 | patterns. Tools matched here will take precedence over the default | |
@@ -261,6 +268,7 merge-patterns:: | |||||
261 | **.c = kdiff3 |
|
268 | **.c = kdiff3 | |
262 | **.jpg = myimgmerge |
|
269 | **.jpg = myimgmerge | |
263 |
|
270 | |||
|
271 | [[merge-tools]] | |||
264 | merge-tools:: |
|
272 | merge-tools:: | |
265 | This section configures external merge tools to use for file-level |
|
273 | This section configures external merge tools to use for file-level | |
266 | merges. |
|
274 | merges. | |
@@ -281,6 +289,7 merge-tools:: | |||||
281 | myHtmlTool.priority = 1 |
|
289 | myHtmlTool.priority = 1 | |
282 |
|
290 | |||
283 | Supported arguments: |
|
291 | Supported arguments: | |
|
292 | ||||
284 | priority;; |
|
293 | priority;; | |
285 | The priority in which to evaluate this tool. |
|
294 | The priority in which to evaluate this tool. | |
286 | Default: 0. |
|
295 | Default: 0. | |
@@ -313,7 +322,7 merge-tools:: | |||||
313 | fixeol;; |
|
322 | fixeol;; | |
314 | Attempt to fix up EOL changes caused by the merge tool. |
|
323 | Attempt to fix up EOL changes caused by the merge tool. | |
315 | Default: False |
|
324 | Default: False | |
316 |
gui |
|
325 | gui;; | |
317 | This tool requires a graphical interface to run. Default: False |
|
326 | This tool requires a graphical interface to run. Default: False | |
318 | regkey;; |
|
327 | regkey;; | |
319 | Windows registry key which describes install location of this tool. |
|
328 | Windows registry key which describes install location of this tool. | |
@@ -326,6 +335,7 merge-tools:: | |||||
326 | String to append to the value read from the registry, typically the |
|
335 | String to append to the value read from the registry, typically the | |
327 |
executable name of the tool. |
|
336 | executable name of the tool. Default: None | |
328 |
|
337 | |||
|
338 | [[hooks]] | |||
329 | hooks:: |
|
339 | hooks:: | |
330 | Commands or Python functions that get automatically executed by |
|
340 | Commands or Python functions that get automatically executed by | |
331 | various actions such as starting or finishing a commit. Multiple |
|
341 | various actions such as starting or finishing a commit. Multiple | |
@@ -448,6 +458,7 hooks:: | |||||
448 | If a Python hook returns a "true" value or raises an exception, this |
|
458 | If a Python hook returns a "true" value or raises an exception, this | |
449 | is treated as failure of the hook. |
|
459 | is treated as failure of the hook. | |
450 |
|
460 | |||
|
461 | [[http_proxy]] | |||
451 | http_proxy:: |
|
462 | http_proxy:: | |
452 | Used to access web-based Mercurial repositories through a HTTP |
|
463 | Used to access web-based Mercurial repositories through a HTTP | |
453 | proxy. |
|
464 | proxy. | |
@@ -462,6 +473,7 http_proxy:: | |||||
462 | user;; |
|
473 | user;; | |
463 |
Optional. |
|
474 | Optional. User name to authenticate with at the proxy server. | |
464 |
|
475 | |||
|
476 | [[smtp]] | |||
465 | smtp:: |
|
477 | smtp:: | |
466 | Configuration for extensions that need to send email messages. |
|
478 | Configuration for extensions that need to send email messages. | |
467 | host;; |
|
479 | host;; | |
@@ -483,6 +495,7 smtp:: | |||||
483 |
Optional. |
|
495 | Optional. It's the hostname that the sender can use to identify itself | |
484 | to the MTA. |
|
496 | to the MTA. | |
485 |
|
497 | |||
|
498 | [[paths]] | |||
486 | paths:: |
|
499 | paths:: | |
487 |
Assigns symbolic names to repositories. |
|
500 | Assigns symbolic names to repositories. The left side is the | |
488 | symbolic name, and the right gives the directory or URL that is the |
|
501 | symbolic name, and the right gives the directory or URL that is the | |
@@ -496,6 +509,7 paths:: | |||||
496 |
Optional. |
|
509 | Optional. Directory or URL to use when pushing if no destination | |
497 | is specified. |
|
510 | is specified. | |
498 |
|
511 | |||
|
512 | [[server]] | |||
499 | server:: |
|
513 | server:: | |
500 | Controls generic server settings. |
|
514 | Controls generic server settings. | |
501 | uncompressed;; |
|
515 | uncompressed;; | |
@@ -508,6 +522,7 server:: | |||||
508 | 6Mbps), uncompressed streaming is slower, because of the extra |
|
522 | 6Mbps), uncompressed streaming is slower, because of the extra | |
509 |
data transfer overhead. |
|
523 | data transfer overhead. Default is False. | |
510 |
|
524 | |||
|
525 | [[trusted]] | |||
511 | trusted:: |
|
526 | trusted:: | |
512 | For security reasons, Mercurial will not use the settings in |
|
527 | For security reasons, Mercurial will not use the settings in | |
513 | the .hg/hgrc file from a repository if it doesn't belong to a |
|
528 | the .hg/hgrc file from a repository if it doesn't belong to a | |
@@ -524,6 +539,7 trusted:: | |||||
524 | groups;; |
|
539 | groups;; | |
525 | Comma-separated list of trusted groups. |
|
540 | Comma-separated list of trusted groups. | |
526 |
|
541 | |||
|
542 | [[ui]] | |||
527 | ui:: |
|
543 | ui:: | |
528 | User interface controls. |
|
544 | User interface controls. | |
529 | archivemeta;; |
|
545 | archivemeta;; | |
@@ -531,6 +547,12 ui:: | |||||
531 | (hashes for the repository base and for tip) in archives created by |
|
547 | (hashes for the repository base and for tip) in archives created by | |
532 | the hg archive command or downloaded via hgweb. |
|
548 | the hg archive command or downloaded via hgweb. | |
533 | Default is true. |
|
549 | Default is true. | |
|
550 | askusername;; | |||
|
551 | Whether to prompt for a username when committing. If True, and | |||
|
552 | neither $HGUSER nor $EMAIL has been specified, then the user will | |||
|
553 | be prompted to enter a username. If no username is entered, the | |||
|
554 | default USER@HOST is used instead. | |||
|
555 | Default is False. | |||
534 | debug;; |
|
556 | debug;; | |
535 |
Print debugging information. |
|
557 | Print debugging information. True or False. Default is False. | |
536 | editor;; |
|
558 | editor;; | |
@@ -563,6 +585,7 ui:: | |||||
563 | fail to merge |
|
585 | fail to merge | |
564 |
|
586 | |||
565 | See the merge-tools section for more information on configuring tools. |
|
587 | See the merge-tools section for more information on configuring tools. | |
|
588 | ||||
566 | patch;; |
|
589 | patch;; | |
567 | command to use to apply patches. Look for 'gpatch' or 'patch' in PATH if |
|
590 | command to use to apply patches. Look for 'gpatch' or 'patch' in PATH if | |
568 | unset. |
|
591 | unset. | |
@@ -599,6 +622,7 ui:: | |||||
599 |
Increase the amount of output printed. |
|
622 | Increase the amount of output printed. True or False. Default is False. | |
600 |
|
623 | |||
601 |
|
624 | |||
|
625 | [[web]] | |||
602 | web:: |
|
626 | web:: | |
603 | Web interface configuration. |
|
627 | Web interface configuration. | |
604 | accesslog;; |
|
628 | accesslog;; |
@@ -46,79 +46,45 | |||||
46 | # ** = user6 |
|
46 | # ** = user6 | |
47 |
|
47 | |||
48 | from mercurial.i18n import _ |
|
48 | from mercurial.i18n import _ | |
49 | from mercurial.node import bin, short |
|
|||
50 | from mercurial import util |
|
49 | from mercurial import util | |
51 | import getpass |
|
50 | import getpass | |
52 |
|
51 | |||
53 | class checker(object): |
|
52 | def buildmatch(ui, repo, user, key): | |
54 | '''acl checker.''' |
|
|||
55 |
|
||||
56 | def buildmatch(self, key): |
|
|||
57 |
|
|
53 | '''return tuple of (match function, list enabled).''' | |
58 |
|
|
54 | if not ui.has_section(key): | |
59 |
|
|
55 | ui.debug(_('acl: %s not enabled\n') % key) | |
60 |
|
|
56 | return None | |
61 |
|
||||
62 | thisuser = self.getuser() |
|
|||
63 | pats = [pat for pat, users in self.ui.configitems(key) |
|
|||
64 | if thisuser in users.replace(',', ' ').split()] |
|
|||
65 | self.ui.debug(_('acl: %s enabled, %d entries for user %s\n') % |
|
|||
66 | (key, len(pats), thisuser)) |
|
|||
67 | if pats: |
|
|||
68 | match = util.matcher(self.repo.root, names=pats)[1] |
|
|||
69 | else: |
|
|||
70 | match = util.never |
|
|||
71 | return match, True |
|
|||
72 |
|
||||
73 | def getuser(self): |
|
|||
74 | '''return name of authenticated user.''' |
|
|||
75 | return self.user |
|
|||
76 |
|
57 | |||
77 | def __init__(self, ui, repo): |
|
58 | pats = [pat for pat, users in ui.configitems(key) | |
78 | self.ui = ui |
|
59 | if user in users.replace(',', ' ').split()] | |
79 | self.repo = repo |
|
60 | ui.debug(_('acl: %s enabled, %d entries for user %s\n') % | |
80 | self.user = getpass.getuser() |
|
61 | (key, len(pats), user)) | |
81 | cfg = self.ui.config('acl', 'config') |
|
62 | if pats: | |
82 | if cfg: |
|
63 | return util.matcher(repo.root, names=pats)[1] | |
83 | self.ui.readsections(cfg, 'acl.allow', 'acl.deny') |
|
64 | return util.never | |
84 | self.allow, self.allowable = self.buildmatch('acl.allow') |
|
|||
85 | self.deny, self.deniable = self.buildmatch('acl.deny') |
|
|||
86 |
|
||||
87 | def skipsource(self, source): |
|
|||
88 | '''true if incoming changes from this source should be skipped.''' |
|
|||
89 | ok_sources = self.ui.config('acl', 'sources', 'serve').split() |
|
|||
90 | return source not in ok_sources |
|
|||
91 |
|
||||
92 | def check(self, node): |
|
|||
93 | '''return if access allowed, raise exception if not.''' |
|
|||
94 | files = self.repo.changectx(node).files() |
|
|||
95 | if self.deniable: |
|
|||
96 | for f in files: |
|
|||
97 | if self.deny(f): |
|
|||
98 | self.ui.debug(_('acl: user %s denied on %s\n') % |
|
|||
99 | (self.getuser(), f)) |
|
|||
100 | raise util.Abort(_('acl: access denied for changeset %s') % |
|
|||
101 | short(node)) |
|
|||
102 | if self.allowable: |
|
|||
103 | for f in files: |
|
|||
104 | if not self.allow(f): |
|
|||
105 | self.ui.debug(_('acl: user %s not allowed on %s\n') % |
|
|||
106 | (self.getuser(), f)) |
|
|||
107 | raise util.Abort(_('acl: access denied for changeset %s') % |
|
|||
108 | short(node)) |
|
|||
109 | self.ui.debug(_('acl: allowing changeset %s\n') % short(node)) |
|
|||
110 |
|
65 | |||
111 | def hook(ui, repo, hooktype, node=None, source=None, **kwargs): |
|
66 | def hook(ui, repo, hooktype, node=None, source=None, **kwargs): | |
112 | if hooktype != 'pretxnchangegroup': |
|
67 | if hooktype != 'pretxnchangegroup': | |
113 | raise util.Abort(_('config error - hook type "%s" cannot stop ' |
|
68 | raise util.Abort(_('config error - hook type "%s" cannot stop ' | |
114 | 'incoming changesets') % hooktype) |
|
69 | 'incoming changesets') % hooktype) | |
115 |
|
70 | if source not in ui.config('acl', 'sources', 'serve').split(): | ||
116 | c = checker(ui, repo) |
|
|||
117 | if c.skipsource(source): |
|
|||
118 | ui.debug(_('acl: changes have source "%s" - skipping\n') % source) |
|
71 | ui.debug(_('acl: changes have source "%s" - skipping\n') % source) | |
119 | return |
|
72 | return | |
120 |
|
73 | |||
121 | start = repo.changelog.rev(bin(node)) |
|
74 | user = getpass.getuser() | |
122 | end = repo.changelog.count() |
|
75 | cfg = ui.config('acl', 'config') | |
123 | for rev in xrange(start, end): |
|
76 | if cfg: | |
124 | c.check(repo.changelog.node(rev)) |
|
77 | ui.readsections(cfg, 'acl.allow', 'acl.deny') | |
|
78 | allow = buildmatch(ui, repo, user, 'acl.allow') | |||
|
79 | deny = buildmatch(ui, repo, user, 'acl.deny') | |||
|
80 | ||||
|
81 | for rev in xrange(repo[node], len(repo)): | |||
|
82 | ctx = repo[rev] | |||
|
83 | for f in ctx.files(): | |||
|
84 | if deny and deny(f): | |||
|
85 | ui.debug(_('acl: user %s denied on %s\n') % (user, f)) | |||
|
86 | raise util.Abort(_('acl: access denied for changeset %s') % ctx) | |||
|
87 | if allow and not allow(f): | |||
|
88 | ui.debug(_('acl: user %s not allowed on %s\n') % (user, f)) | |||
|
89 | raise util.Abort(_('acl: access denied for changeset %s') % ctx) | |||
|
90 | ui.debug(_('acl: allowing changeset %s\n') % ctx) |
@@ -55,7 +55,7 | |||||
55 | from mercurial.i18n import _ |
|
55 | from mercurial.i18n import _ | |
56 | from mercurial.node import short |
|
56 | from mercurial.node import short | |
57 | from mercurial import cmdutil, templater, util |
|
57 | from mercurial import cmdutil, templater, util | |
58 |
import |
|
58 | import re, time | |
59 |
|
59 | |||
60 | MySQLdb = None |
|
60 | MySQLdb = None | |
61 |
|
61 | |||
@@ -99,9 +99,7 class bugzilla_2_16(object): | |||||
99 | def filter_real_bug_ids(self, ids): |
|
99 | def filter_real_bug_ids(self, ids): | |
100 | '''filter not-existing bug ids from list.''' |
|
100 | '''filter not-existing bug ids from list.''' | |
101 | self.run('select bug_id from bugs where bug_id in %s' % buglist(ids)) |
|
101 | self.run('select bug_id from bugs where bug_id in %s' % buglist(ids)) | |
102 |
|
|
102 | return util.sort([c[0] for c in self.cursor.fetchall()]) | |
103 | ids.sort() |
|
|||
104 | return ids |
|
|||
105 |
|
103 | |||
106 | def filter_unknown_bug_ids(self, node, ids): |
|
104 | def filter_unknown_bug_ids(self, node, ids): | |
107 | '''filter bug ids from list that already refer to this changeset.''' |
|
105 | '''filter bug ids from list that already refer to this changeset.''' | |
@@ -114,9 +112,7 class bugzilla_2_16(object): | |||||
114 | self.ui.status(_('bug %d already knows about changeset %s\n') % |
|
112 | self.ui.status(_('bug %d already knows about changeset %s\n') % | |
115 | (id, short(node))) |
|
113 | (id, short(node))) | |
116 | unknown.pop(id, None) |
|
114 | unknown.pop(id, None) | |
117 |
|
|
115 | return util.sort(unknown.keys()) | |
118 | ids.sort() |
|
|||
119 | return ids |
|
|||
120 |
|
116 | |||
121 | def notify(self, ids): |
|
117 | def notify(self, ids): | |
122 | '''tell bugzilla to send mail.''' |
|
118 | '''tell bugzilla to send mail.''' | |
@@ -127,7 +123,7 class bugzilla_2_16(object): | |||||
127 | cmd = self.ui.config('bugzilla', 'notify', |
|
123 | cmd = self.ui.config('bugzilla', 'notify', | |
128 | 'cd /var/www/html/bugzilla && ' |
|
124 | 'cd /var/www/html/bugzilla && ' | |
129 | './processmail %s nobody@nowhere.com') % id |
|
125 | './processmail %s nobody@nowhere.com') % id | |
130 |
fp = |
|
126 | fp = util.popen('(%s) 2>&1' % cmd) | |
131 | out = fp.read() |
|
127 | out = fp.read() | |
132 | ret = fp.close() |
|
128 | ret = fp.close() | |
133 | if ret: |
|
129 | if ret: | |
@@ -300,7 +296,7 def hook(ui, repo, hooktype, node=None, | |||||
300 | hooktype) |
|
296 | hooktype) | |
301 | try: |
|
297 | try: | |
302 | bz = bugzilla(ui, repo) |
|
298 | bz = bugzilla(ui, repo) | |
303 |
ctx = repo |
|
299 | ctx = repo[node] | |
304 | ids = bz.find_bug_ids(ctx) |
|
300 | ids = bz.find_bug_ids(ctx) | |
305 | if ids: |
|
301 | if ids: | |
306 | for id in ids: |
|
302 | for id in ids: |
@@ -25,7 +25,7 def children(ui, repo, file_=None, **opt | |||||
25 | if file_: |
|
25 | if file_: | |
26 | ctx = repo.filectx(file_, changeid=rev) |
|
26 | ctx = repo.filectx(file_, changeid=rev) | |
27 | else: |
|
27 | else: | |
28 |
ctx = repo |
|
28 | ctx = repo[rev] | |
29 |
|
29 | |||
30 | displayer = cmdutil.show_changeset(ui, repo, opts) |
|
30 | displayer = cmdutil.show_changeset(ui, repo, opts) | |
31 | for node in [cp.node() for cp in ctx.children()]: |
|
31 | for node in [cp.node() for cp in ctx.children()]: |
@@ -4,15 +4,10 | |||||
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms |
|
5 | # This software may be used and distributed according to the terms | |
6 | # of the GNU General Public License, incorporated herein by reference. |
|
6 | # of the GNU General Public License, incorporated herein by reference. | |
7 | # |
|
7 | '''allow graphing the number of lines changed per contributor''' | |
8 | # |
|
|||
9 | # Aliases map file format is simple one alias per line in the following |
|
|||
10 | # format: |
|
|||
11 | # |
|
|||
12 | # <alias email> <actual email> |
|
|||
13 |
|
8 | |||
14 | from mercurial.i18n import gettext as _ |
|
9 | from mercurial.i18n import gettext as _ | |
15 |
from mercurial import |
|
10 | from mercurial import patch, cmdutil, util, node | |
16 | import os, sys |
|
11 | import os, sys | |
17 |
|
12 | |||
18 | def get_tty_width(): |
|
13 | def get_tty_width(): | |
@@ -36,98 +31,41 def get_tty_width(): | |||||
36 | pass |
|
31 | pass | |
37 | return 80 |
|
32 | return 80 | |
38 |
|
33 | |||
39 | def __gather(ui, repo, node1, node2): |
|
34 | def countrevs(ui, repo, amap, revs, progress=False): | |
40 | def dirtywork(f, mmap1, mmap2): |
|
|||
41 | lines = 0 |
|
|||
42 |
|
||||
43 | to = mmap1 and repo.file(f).read(mmap1[f]) or None |
|
|||
44 | tn = mmap2 and repo.file(f).read(mmap2[f]) or None |
|
|||
45 |
|
||||
46 | diff = mdiff.unidiff(to, "", tn, "", f, f).split("\n") |
|
|||
47 |
|
||||
48 | for line in diff: |
|
|||
49 | if not line: |
|
|||
50 | continue # skip EOF |
|
|||
51 | if line.startswith(" "): |
|
|||
52 | continue # context line |
|
|||
53 | if line.startswith("--- ") or line.startswith("+++ "): |
|
|||
54 | continue # begining of diff |
|
|||
55 | if line.startswith("@@ "): |
|
|||
56 | continue # info line |
|
|||
57 |
|
||||
58 | # changed lines |
|
|||
59 | lines += 1 |
|
|||
60 |
|
||||
61 | return lines |
|
|||
62 |
|
||||
63 | ## |
|
|||
64 |
|
||||
65 | lines = 0 |
|
|||
66 |
|
||||
67 | changes = repo.status(node1, node2, None, util.always)[:5] |
|
|||
68 |
|
||||
69 | modified, added, removed, deleted, unknown = changes |
|
|||
70 |
|
||||
71 | who = repo.changelog.read(node2)[1] |
|
|||
72 | who = util.email(who) # get the email of the person |
|
|||
73 |
|
||||
74 | mmap1 = repo.manifest.read(repo.changelog.read(node1)[0]) |
|
|||
75 | mmap2 = repo.manifest.read(repo.changelog.read(node2)[0]) |
|
|||
76 | for f in modified: |
|
|||
77 | lines += dirtywork(f, mmap1, mmap2) |
|
|||
78 |
|
||||
79 | for f in added: |
|
|||
80 | lines += dirtywork(f, None, mmap2) |
|
|||
81 |
|
||||
82 | for f in removed: |
|
|||
83 | lines += dirtywork(f, mmap1, None) |
|
|||
84 |
|
||||
85 | for f in deleted: |
|
|||
86 | lines += dirtywork(f, mmap1, mmap2) |
|
|||
87 |
|
||||
88 | for f in unknown: |
|
|||
89 | lines += dirtywork(f, mmap1, mmap2) |
|
|||
90 |
|
||||
91 | return (who, lines) |
|
|||
92 |
|
||||
93 | def gather_stats(ui, repo, amap, revs=None, progress=False): |
|
|||
94 | stats = {} |
|
35 | stats = {} | |
95 |
|
36 | count = pct = 0 | ||
96 | cl = repo.changelog |
|
|||
97 |
|
||||
98 | if not revs: |
|
37 | if not revs: | |
99 |
revs = range( |
|
38 | revs = range(len(repo)) | |
100 |
|
||||
101 | nr_revs = len(revs) |
|
|||
102 | cur_rev = 0 |
|
|||
103 |
|
39 | |||
104 | for rev in revs: |
|
40 | for rev in revs: | |
105 | cur_rev += 1 # next revision |
|
41 | ctx2 = repo[rev] | |
106 |
|
42 | parents = ctx2.parents() | ||
107 | node2 = cl.node(rev) |
|
43 | if len(parents) > 1: | |
108 | node1 = cl.parents(node2)[0] |
|
|||
109 |
|
||||
110 | if cl.parents(node2)[1] != node.nullid: |
|
|||
111 | ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,)) |
|
44 | ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,)) | |
112 | continue |
|
45 | continue | |
113 |
|
46 | |||
114 | who, lines = __gather(ui, repo, node1, node2) |
|
47 | ctx1 = parents[0] | |
|
48 | lines = 0 | |||
|
49 | ui.pushbuffer() | |||
|
50 | patch.diff(repo, ctx1.node(), ctx2.node()) | |||
|
51 | diff = ui.popbuffer() | |||
115 |
|
52 | |||
116 | # remap the owner if possible |
|
53 | for l in diff.split('\n'): | |
117 | if who in amap: |
|
54 | if (l.startswith("+") and not l.startswith("+++ ") or | |
118 | ui.note("using '%s' alias for '%s'\n" % (amap[who], who)) |
|
55 | l.startswith("-") and not l.startswith("--- ")): | |
119 | who = amap[who] |
|
56 | lines += 1 | |
120 |
|
57 | |||
121 | if not who in stats: |
|
58 | user = util.email(ctx2.user()) | |
122 | stats[who] = 0 |
|
59 | user = amap.get(user, user) # remap | |
123 |
stats[ |
|
60 | stats[user] = stats.get(user, 0) + lines | |
124 |
|
61 | ui.debug("rev %d: %d lines by %s\n" % (rev, lines, user)) | ||
125 | ui.note("rev %d: %d lines by %s\n" % (rev, lines, who)) |
|
|||
126 |
|
62 | |||
127 | if progress: |
|
63 | if progress: | |
128 | nr_revs = max(nr_revs, 1) |
|
64 | count += 1 | |
129 | if int(100.0*(cur_rev - 1)/nr_revs) < int(100.0*cur_rev/nr_revs): |
|
65 | newpct = int(100.0 * count / max(len(revs), 1)) | |
130 | ui.write("\rGenerating stats: %d%%" % (int(100.0*cur_rev/nr_revs),)) |
|
66 | if pct < newpct: | |
|
67 | pct = newpct | |||
|
68 | ui.write("\rGenerating stats: %d%%" % pct) | |||
131 | sys.stdout.flush() |
|
69 | sys.stdout.flush() | |
132 |
|
70 | |||
133 | if progress: |
|
71 | if progress: | |
@@ -137,64 +75,39 def gather_stats(ui, repo, amap, revs=No | |||||
137 | return stats |
|
75 | return stats | |
138 |
|
76 | |||
139 | def churn(ui, repo, **opts): |
|
77 | def churn(ui, repo, **opts): | |
140 |
|
|
78 | '''graphs the number of lines changed | |
|
79 | ||||
|
80 | The map file format used to specify aliases is fairly simple: | |||
|
81 | ||||
|
82 | <alias email> <actual email>''' | |||
141 |
|
83 | |||
142 | def pad(s, l): |
|
84 | def pad(s, l): | |
143 | if len(s) < l: |
|
85 | return (s + " " * l)[:l] | |
144 | return s + " " * (l-len(s)) |
|
|||
145 | return s[0:l] |
|
|||
146 |
|
||||
147 | def graph(n, maximum, width, char): |
|
|||
148 | maximum = max(1, maximum) |
|
|||
149 | n = int(n * width / float(maximum)) |
|
|||
150 |
|
||||
151 | return char * (n) |
|
|||
152 |
|
||||
153 | def get_aliases(f): |
|
|||
154 | aliases = {} |
|
|||
155 |
|
||||
156 | for l in f.readlines(): |
|
|||
157 | l = l.strip() |
|
|||
158 | alias, actual = l.split() |
|
|||
159 | aliases[alias] = actual |
|
|||
160 |
|
||||
161 | return aliases |
|
|||
162 |
|
86 | |||
163 | amap = {} |
|
87 | amap = {} | |
164 | aliases = opts.get('aliases') |
|
88 | aliases = opts.get('aliases') | |
165 | if aliases: |
|
89 | if aliases: | |
166 | try: |
|
90 | for l in open(aliases, "r"): | |
167 |
|
|
91 | l = l.strip() | |
168 | except OSError, e: |
|
92 | alias, actual = l.split() | |
169 | print "Error: " + e |
|
93 | amap[alias] = actual | |
|
94 | ||||
|
95 | revs = util.sort([int(r) for r in cmdutil.revrange(repo, opts['rev'])]) | |||
|
96 | stats = countrevs(ui, repo, amap, revs, opts.get('progress')) | |||
|
97 | if not stats: | |||
170 |
|
|
98 | return | |
171 |
|
99 | |||
172 | amap = get_aliases(f) |
|
100 | stats = util.sort([(-l, u, l) for u,l in stats.items()]) | |
173 | f.close() |
|
101 | maxchurn = float(max(1, stats[0][2])) | |
174 |
|
102 | maxuser = max([len(u) for k, u, l in stats]) | ||
175 | revs = [int(r) for r in cmdutil.revrange(repo, opts['rev'])] |
|
|||
176 | revs.sort() |
|
|||
177 | stats = gather_stats(ui, repo, amap, revs, opts.get('progress')) |
|
|||
178 |
|
103 | |||
179 | # make a list of tuples (name, lines) and sort it in descending order |
|
104 | ttywidth = get_tty_width() | |
180 | ordered = stats.items() |
|
105 | ui.debug(_("assuming %i character terminal\n") % ttywidth) | |
181 | if not ordered: |
|
106 | width = ttywidth - maxuser - 2 - 6 - 2 - 2 | |
182 | return |
|
|||
183 | ordered.sort(lambda x, y: cmp(y[1], x[1])) |
|
|||
184 | max_churn = ordered[0][1] |
|
|||
185 |
|
107 | |||
186 | tty_width = get_tty_width() |
|
108 | for k, user, churn in stats: | |
187 | ui.note(_("assuming %i character terminal\n") % tty_width) |
|
109 | print "%s %6d %s" % (pad(user, maxuser), churn, | |
188 | tty_width -= 1 |
|
110 | "*" * int(churn * width / maxchurn)) | |
189 |
|
||||
190 | max_user_width = max([len(user) for user, churn in ordered]) |
|
|||
191 |
|
||||
192 | graph_width = tty_width - max_user_width - 1 - 6 - 2 - 2 |
|
|||
193 |
|
||||
194 | for user, churn in ordered: |
|
|||
195 | print "%s %6d %s" % (pad(user, max_user_width), |
|
|||
196 | churn, |
|
|||
197 | graph(churn, max_churn, graph_width, '*')) |
|
|||
198 |
|
111 | |||
199 | cmdtable = { |
|
112 | cmdtable = { | |
200 | "churn": |
|
113 | "churn": |
@@ -4,6 +4,7 | |||||
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms |
|
5 | # This software may be used and distributed according to the terms | |
6 | # of the GNU General Public License, incorporated herein by reference. |
|
6 | # of the GNU General Public License, incorporated herein by reference. | |
|
7 | '''converting foreign VCS repositories to Mercurial''' | |||
7 |
|
8 | |||
8 | import convcmd |
|
9 | import convcmd | |
9 | from mercurial import commands |
|
10 | from mercurial import commands | |
@@ -85,6 +86,50 def convert(ui, src, dest=None, revmapfi | |||||
85 | --config convert.hg.saverev=True (boolean) |
|
86 | --config convert.hg.saverev=True (boolean) | |
86 | allow target to preserve source revision ID |
|
87 | allow target to preserve source revision ID | |
87 |
|
88 | |||
|
89 | CVS Source | |||
|
90 | ---------- | |||
|
91 | ||||
|
92 | CVS source will use a sandbox (i.e. a checked-out copy) from CVS | |||
|
93 | to indicate the starting point of what will be converted. Direct | |||
|
94 | access to the repository files is not needed, unless of course | |||
|
95 | the repository is :local:. The conversion uses the top level | |||
|
96 | directory in the sandbox to find the CVS repository, and then uses | |||
|
97 | CVS rlog commands to find files to convert. This means that unless | |||
|
98 | a filemap is given, all files under the starting directory will be | |||
|
99 | converted, and that any directory reorganisation in the CVS | |||
|
100 | sandbox is ignored. | |||
|
101 | ||||
|
102 | Because CVS does not have changesets, it is necessary to collect | |||
|
103 | individual commits to CVS and merge them into changesets. CVS source | |||
|
104 | can use the external 'cvsps' program (this is a legacy option and may | |||
|
105 | be removed in future) or use its internal changeset merging code. | |||
|
106 | External cvsps is default, and options may be passed to it by setting | |||
|
107 | --config convert.cvsps='cvsps -A -u --cvs-direct -q' | |||
|
108 | The options shown are the defaults. | |||
|
109 | ||||
|
110 | Internal cvsps is selected by setting | |||
|
111 | --config convert.cvsps=builtin | |||
|
112 | and has a few more configurable options: | |||
|
113 | --config convert.cvsps.fuzz=60 (integer) | |||
|
114 | Specify the maximum time (in seconds) that is allowed between | |||
|
115 | commits with identical user and log message in a single | |||
|
116 | changeset. When very large files were checked in as part | |||
|
117 | of a changeset then the default may not be long enough. | |||
|
118 | --config convert.cvsps.mergeto='{{mergetobranch ([-\w]+)}}' | |||
|
119 | Specify a regular expression to which commit log messages are | |||
|
120 | matched. If a match occurs, then the conversion process will | |||
|
121 | insert a dummy revision merging the branch on which this log | |||
|
122 | message occurs to the branch indicated in the regex. | |||
|
123 | --config convert.cvsps.mergefrom='{{mergefrombranch ([-\w]+)}}' | |||
|
124 | Specify a regular expression to which commit log messages are | |||
|
125 | matched. If a match occurs, then the conversion process will | |||
|
126 | add the most recent revision on the branch indicated in the | |||
|
127 | regex as the second parent of the changeset. | |||
|
128 | ||||
|
129 | The hgext/convert/cvsps wrapper script allows the builtin changeset | |||
|
130 | merging code to be run without doing a conversion. Its parameters and | |||
|
131 | output are similar to that of cvsps 2.1. | |||
|
132 | ||||
88 | Subversion Source |
|
133 | Subversion Source | |
89 | ----------------- |
|
134 | ----------------- | |
90 |
|
135 |
@@ -153,26 +153,18 class converter_sink(object): | |||||
153 | mapping equivalent authors identifiers for each system.""" |
|
153 | mapping equivalent authors identifiers for each system.""" | |
154 | return None |
|
154 | return None | |
155 |
|
155 | |||
156 | def putfile(self, f, e, data): |
|
156 | def putcommit(self, files, copies, parents, commit, source): | |
157 | """Put file for next putcommit(). |
|
|||
158 | f: path to file |
|
|||
159 | e: '', 'x', or 'l' (regular file, executable, or symlink) |
|
|||
160 | data: file contents""" |
|
|||
161 | raise NotImplementedError() |
|
|||
162 |
|
||||
163 | def delfile(self, f): |
|
|||
164 | """Delete file for next putcommit(). |
|
|||
165 | f: path to file""" |
|
|||
166 | raise NotImplementedError() |
|
|||
167 |
|
||||
168 | def putcommit(self, files, parents, commit): |
|
|||
169 | """Create a revision with all changed files listed in 'files' |
|
157 | """Create a revision with all changed files listed in 'files' | |
170 | and having listed parents. 'commit' is a commit object containing |
|
158 | and having listed parents. 'commit' is a commit object containing | |
171 | at a minimum the author, date, and message for this changeset. |
|
159 | at a minimum the author, date, and message for this changeset. | |
172 | Called after putfile() and delfile() calls. Note that the sink |
|
160 | 'files' is a list of (path, version) tuples, 'copies'is a dictionary | |
173 | repository is not told to update itself to a particular revision |
|
161 | mapping destinations to sources, and 'source' is the source repository. | |
174 | (or even what that revision would be) before it receives the |
|
162 | Only getfile() and getmode() should be called on 'source'. | |
175 | file data.""" |
|
163 | ||
|
164 | Note that the sink repository is not told to update itself to | |||
|
165 | a particular revision (or even what that revision would be) | |||
|
166 | before it receives the file data. | |||
|
167 | """ | |||
176 | raise NotImplementedError() |
|
168 | raise NotImplementedError() | |
177 |
|
169 | |||
178 | def puttags(self, tags): |
|
170 | def puttags(self, tags): | |
@@ -181,7 +173,7 class converter_sink(object): | |||||
181 | raise NotImplementedError() |
|
173 | raise NotImplementedError() | |
182 |
|
174 | |||
183 | def setbranch(self, branch, pbranches): |
|
175 | def setbranch(self, branch, pbranches): | |
184 |
"""Set the current branch name. Called before the first put |
|
176 | """Set the current branch name. Called before the first putcommit | |
185 | on the branch. |
|
177 | on the branch. | |
186 | branch: branch name for subsequent commits |
|
178 | branch: branch name for subsequent commits | |
187 | pbranches: (converted parent revision, parent branch) tuples""" |
|
179 | pbranches: (converted parent revision, parent branch) tuples""" |
@@ -221,8 +221,6 class converter(object): | |||||
221 |
|
221 | |||
222 | def copy(self, rev): |
|
222 | def copy(self, rev): | |
223 | commit = self.commitcache[rev] |
|
223 | commit = self.commitcache[rev] | |
224 | do_copies = hasattr(self.dest, 'copyfile') |
|
|||
225 | filenames = [] |
|
|||
226 |
|
224 | |||
227 | changes = self.source.getchanges(rev) |
|
225 | changes = self.source.getchanges(rev) | |
228 | if isinstance(changes, basestring): |
|
226 | if isinstance(changes, basestring): | |
@@ -241,21 +239,6 class converter(object): | |||||
241 | pbranches.append((self.map[prev], |
|
239 | pbranches.append((self.map[prev], | |
242 | self.commitcache[prev].branch)) |
|
240 | self.commitcache[prev].branch)) | |
243 | self.dest.setbranch(commit.branch, pbranches) |
|
241 | self.dest.setbranch(commit.branch, pbranches) | |
244 | for f, v in files: |
|
|||
245 | filenames.append(f) |
|
|||
246 | try: |
|
|||
247 | data = self.source.getfile(f, v) |
|
|||
248 | except IOError, inst: |
|
|||
249 | self.dest.delfile(f) |
|
|||
250 | else: |
|
|||
251 | e = self.source.getmode(f, v) |
|
|||
252 | self.dest.putfile(f, e, data) |
|
|||
253 | if do_copies: |
|
|||
254 | if f in copies: |
|
|||
255 | copyf = copies[f] |
|
|||
256 | # Merely marks that a copy happened. |
|
|||
257 | self.dest.copyfile(copyf, f) |
|
|||
258 |
|
||||
259 | try: |
|
242 | try: | |
260 | parents = self.splicemap[rev].replace(',', ' ').split() |
|
243 | parents = self.splicemap[rev].replace(',', ' ').split() | |
261 | self.ui.status('spliced in %s as parents of %s\n' % |
|
244 | self.ui.status('spliced in %s as parents of %s\n' % | |
@@ -263,7 +246,7 class converter(object): | |||||
263 | parents = [self.map.get(p, p) for p in parents] |
|
246 | parents = [self.map.get(p, p) for p in parents] | |
264 | except KeyError: |
|
247 | except KeyError: | |
265 | parents = [b[0] for b in pbranches] |
|
248 | parents = [b[0] for b in pbranches] | |
266 |
newnode = self.dest.putcommit(file |
|
249 | newnode = self.dest.putcommit(files, copies, parents, commit, self.source) | |
267 | self.source.converted(rev, newnode) |
|
250 | self.source.converted(rev, newnode) | |
268 | self.map[rev] = newnode |
|
251 | self.map[rev] = newnode | |
269 |
|
252 |
@@ -3,8 +3,10 | |||||
3 | import os, locale, re, socket |
|
3 | import os, locale, re, socket | |
4 | from cStringIO import StringIO |
|
4 | from cStringIO import StringIO | |
5 | from mercurial import util |
|
5 | from mercurial import util | |
|
6 | from mercurial.i18n import _ | |||
6 |
|
7 | |||
7 | from common import NoRepo, commit, converter_source, checktool |
|
8 | from common import NoRepo, commit, converter_source, checktool | |
|
9 | import cvsps | |||
8 |
|
10 | |||
9 | class convert_cvs(converter_source): |
|
11 | class convert_cvs(converter_source): | |
10 | def __init__(self, ui, path, rev=None): |
|
12 | def __init__(self, ui, path, rev=None): | |
@@ -14,10 +16,13 class convert_cvs(converter_source): | |||||
14 | if not os.path.exists(cvs): |
|
16 | if not os.path.exists(cvs): | |
15 | raise NoRepo("%s does not look like a CVS checkout" % path) |
|
17 | raise NoRepo("%s does not look like a CVS checkout" % path) | |
16 |
|
18 | |||
|
19 | checktool('cvs') | |||
17 | self.cmd = ui.config('convert', 'cvsps', 'cvsps -A -u --cvs-direct -q') |
|
20 | self.cmd = ui.config('convert', 'cvsps', 'cvsps -A -u --cvs-direct -q') | |
18 | cvspsexe = self.cmd.split(None, 1)[0] |
|
21 | cvspsexe = self.cmd.split(None, 1)[0] | |
19 | for tool in (cvspsexe, 'cvs'): |
|
22 | self.builtin = cvspsexe == 'builtin' | |
20 | checktool(tool) |
|
23 | ||
|
24 | if not self.builtin: | |||
|
25 | checktool(cvspsexe) | |||
21 |
|
26 | |||
22 | self.changeset = {} |
|
27 | self.changeset = {} | |
23 | self.files = {} |
|
28 | self.files = {} | |
@@ -28,10 +33,11 class convert_cvs(converter_source): | |||||
28 | self.cvsroot = file(os.path.join(cvs, "Root")).read()[:-1] |
|
33 | self.cvsroot = file(os.path.join(cvs, "Root")).read()[:-1] | |
29 | self.cvsrepo = file(os.path.join(cvs, "Repository")).read()[:-1] |
|
34 | self.cvsrepo = file(os.path.join(cvs, "Repository")).read()[:-1] | |
30 | self.encoding = locale.getpreferredencoding() |
|
35 | self.encoding = locale.getpreferredencoding() | |
31 | self._parse() |
|
36 | ||
|
37 | self._parse(ui) | |||
32 | self._connect() |
|
38 | self._connect() | |
33 |
|
39 | |||
34 | def _parse(self): |
|
40 | def _parse(self, ui): | |
35 | if self.changeset: |
|
41 | if self.changeset: | |
36 | return |
|
42 | return | |
37 |
|
43 | |||
@@ -56,6 +62,40 class convert_cvs(converter_source): | |||||
56 | id = None |
|
62 | id = None | |
57 | state = 0 |
|
63 | state = 0 | |
58 | filerevids = {} |
|
64 | filerevids = {} | |
|
65 | ||||
|
66 | if self.builtin: | |||
|
67 | # builtin cvsps code | |||
|
68 | ui.status(_('using builtin cvsps\n')) | |||
|
69 | ||||
|
70 | db = cvsps.createlog(ui, cache='update') | |||
|
71 | db = cvsps.createchangeset(ui, db, | |||
|
72 | fuzz=int(ui.config('convert', 'cvsps.fuzz', 60)), | |||
|
73 | mergeto=ui.config('convert', 'cvsps.mergeto', None), | |||
|
74 | mergefrom=ui.config('convert', 'cvsps.mergefrom', None)) | |||
|
75 | ||||
|
76 | for cs in db: | |||
|
77 | if maxrev and cs.id>maxrev: | |||
|
78 | break | |||
|
79 | id = str(cs.id) | |||
|
80 | cs.author = self.recode(cs.author) | |||
|
81 | self.lastbranch[cs.branch] = id | |||
|
82 | cs.comment = self.recode(cs.comment) | |||
|
83 | date = util.datestr(cs.date) | |||
|
84 | self.tags.update(dict.fromkeys(cs.tags, id)) | |||
|
85 | ||||
|
86 | files = {} | |||
|
87 | for f in cs.entries: | |||
|
88 | files[f.file] = "%s%s" % ('.'.join([str(x) for x in f.revision]), | |||
|
89 | ['', '(DEAD)'][f.dead]) | |||
|
90 | ||||
|
91 | # add current commit to set | |||
|
92 | c = commit(author=cs.author, date=date, | |||
|
93 | parents=[str(p.id) for p in cs.parents], | |||
|
94 | desc=cs.comment, branch=cs.branch or '') | |||
|
95 | self.changeset[id] = c | |||
|
96 | self.files[id] = files | |||
|
97 | else: | |||
|
98 | # external cvsps | |||
59 | for l in util.popen(cmd): |
|
99 | for l in util.popen(cmd): | |
60 | if state == 0: # header |
|
100 | if state == 0: # header | |
61 | if l.startswith("PatchSet"): |
|
101 | if l.startswith("PatchSet"): | |
@@ -297,10 +337,7 class convert_cvs(converter_source): | |||||
297 |
|
337 | |||
298 | def getchanges(self, rev): |
|
338 | def getchanges(self, rev): | |
299 | self.modecache = {} |
|
339 | self.modecache = {} | |
300 |
|
|
340 | return util.sort(self.files[rev].items()), {} | |
301 | cl = files.items() |
|
|||
302 | cl.sort() |
|
|||
303 | return (cl, {}) |
|
|||
304 |
|
341 | |||
305 | def getcommit(self, rev): |
|
342 | def getcommit(self, rev): | |
306 | return self.changeset[rev] |
|
343 | return self.changeset[rev] | |
@@ -309,7 +346,4 class convert_cvs(converter_source): | |||||
309 | return self.tags |
|
346 | return self.tags | |
310 |
|
347 | |||
311 | def getchangedfiles(self, rev, i): |
|
348 | def getchangedfiles(self, rev, i): | |
312 |
|
|
349 | return util.sort(self.files[rev].keys()) | |
313 | files.sort() |
|
|||
314 | return files |
|
|||
315 |
|
@@ -110,9 +110,8 class darcs_source(converter_source, com | |||||
110 | copies[elt.get('from')] = elt.get('to') |
|
110 | copies[elt.get('from')] = elt.get('to') | |
111 | else: |
|
111 | else: | |
112 | changes.append((elt.text.strip(), rev)) |
|
112 | changes.append((elt.text.strip(), rev)) | |
113 | changes.sort() |
|
|||
114 | self.lastrev = rev |
|
113 | self.lastrev = rev | |
115 | return changes, copies |
|
114 | return util.sort(changes), copies | |
116 |
|
115 | |||
117 | def getfile(self, name, rev): |
|
116 | def getfile(self, name, rev): | |
118 | if rev != self.lastrev: |
|
117 | if rev != self.lastrev: |
@@ -130,10 +130,8 class gnuarch_source(converter_source, c | |||||
130 | for c in cps: |
|
130 | for c in cps: | |
131 | copies[c] = cps[c] |
|
131 | copies[c] = cps[c] | |
132 |
|
132 | |||
133 | changes.sort() |
|
|||
134 | self.lastrev = rev |
|
133 | self.lastrev = rev | |
135 |
|
134 | return util.sort(changes), copies | ||
136 | return changes, copies |
|
|||
137 |
|
135 | |||
138 | def getcommit(self, rev): |
|
136 | def getcommit(self, rev): | |
139 | changes = self.changes[rev] |
|
137 | changes = self.changes[rev] |
@@ -17,7 +17,7 import os, time | |||||
17 | from mercurial.i18n import _ |
|
17 | from mercurial.i18n import _ | |
18 | from mercurial.repo import RepoError |
|
18 | from mercurial.repo import RepoError | |
19 | from mercurial.node import bin, hex, nullid |
|
19 | from mercurial.node import bin, hex, nullid | |
20 | from mercurial import hg, revlog, util |
|
20 | from mercurial import hg, revlog, util, context | |
21 |
|
21 | |||
22 | from common import NoRepo, commit, converter_source, converter_sink |
|
22 | from common import NoRepo, commit, converter_source, converter_sink | |
23 |
|
23 | |||
@@ -54,11 +54,9 class mercurial_sink(converter_sink): | |||||
54 | self.ui.debug(_('run hg sink pre-conversion action\n')) |
|
54 | self.ui.debug(_('run hg sink pre-conversion action\n')) | |
55 | self.wlock = self.repo.wlock() |
|
55 | self.wlock = self.repo.wlock() | |
56 | self.lock = self.repo.lock() |
|
56 | self.lock = self.repo.lock() | |
57 | self.repo.dirstate.clear() |
|
|||
58 |
|
57 | |||
59 | def after(self): |
|
58 | def after(self): | |
60 | self.ui.debug(_('run hg sink post-conversion action\n')) |
|
59 | self.ui.debug(_('run hg sink post-conversion action\n')) | |
61 | self.repo.dirstate.invalidate() |
|
|||
62 | self.lock = None |
|
60 | self.lock = None | |
63 | self.wlock = None |
|
61 | self.wlock = None | |
64 |
|
62 | |||
@@ -72,21 +70,6 class mercurial_sink(converter_sink): | |||||
72 | h = self.repo.changelog.heads() |
|
70 | h = self.repo.changelog.heads() | |
73 | return [ hex(x) for x in h ] |
|
71 | return [ hex(x) for x in h ] | |
74 |
|
72 | |||
75 | def putfile(self, f, e, data): |
|
|||
76 | self.repo.wwrite(f, data, e) |
|
|||
77 | if f not in self.repo.dirstate: |
|
|||
78 | self.repo.dirstate.normallookup(f) |
|
|||
79 |
|
||||
80 | def copyfile(self, source, dest): |
|
|||
81 | self.repo.copy(source, dest) |
|
|||
82 |
|
||||
83 | def delfile(self, f): |
|
|||
84 | try: |
|
|||
85 | util.unlink(self.repo.wjoin(f)) |
|
|||
86 | #self.repo.remove([f]) |
|
|||
87 | except OSError: |
|
|||
88 | pass |
|
|||
89 |
|
||||
90 | def setbranch(self, branch, pbranches): |
|
73 | def setbranch(self, branch, pbranches): | |
91 | if not self.clonebranches: |
|
74 | if not self.clonebranches: | |
92 | return |
|
75 | return | |
@@ -125,13 +108,19 class mercurial_sink(converter_sink): | |||||
125 | self.repo.pull(prepo, [prepo.lookup(h) for h in heads]) |
|
108 | self.repo.pull(prepo, [prepo.lookup(h) for h in heads]) | |
126 | self.before() |
|
109 | self.before() | |
127 |
|
110 | |||
128 | def putcommit(self, files, parents, commit): |
|
111 | def putcommit(self, files, copies, parents, commit, source): | |
129 | seen = {} |
|
112 | ||
|
113 | files = dict(files) | |||
|
114 | def getfilectx(repo, memctx, f): | |||
|
115 | v = files[f] | |||
|
116 | data = source.getfile(f, v) | |||
|
117 | e = source.getmode(f, v) | |||
|
118 | return context.memfilectx(f, data, 'l' in e, 'x' in e, copies.get(f)) | |||
|
119 | ||||
130 | pl = [] |
|
120 | pl = [] | |
131 | for p in parents: |
|
121 | for p in parents: | |
132 |
if p not in |
|
122 | if p not in pl: | |
133 | pl.append(p) |
|
123 | pl.append(p) | |
134 | seen[p] = 1 |
|
|||
135 | parents = pl |
|
124 | parents = pl | |
136 | nparents = len(parents) |
|
125 | nparents = len(parents) | |
137 | if self.filemapmode and nparents == 1: |
|
126 | if self.filemapmode and nparents == 1: | |
@@ -152,9 +141,9 class mercurial_sink(converter_sink): | |||||
152 | while parents: |
|
141 | while parents: | |
153 | p1 = p2 |
|
142 | p1 = p2 | |
154 | p2 = parents.pop(0) |
|
143 | p2 = parents.pop(0) | |
155 | a = self.repo.rawcommit(files, text, commit.author, commit.date, |
|
144 | ctx = context.memctx(self.repo, (p1, p2), text, files.keys(), getfilectx, | |
156 |
|
|
145 | commit.author, commit.date, extra) | |
157 |
self.repo. |
|
146 | a = self.repo.commitctx(ctx) | |
158 | text = "(octopus merge fixup)\n" |
|
147 | text = "(octopus merge fixup)\n" | |
159 | p2 = hex(self.repo.changelog.tip()) |
|
148 | p2 = hex(self.repo.changelog.tip()) | |
160 |
|
149 | |||
@@ -163,42 +152,38 class mercurial_sink(converter_sink): | |||||
163 | mnode = self.repo.changelog.read(bin(p2))[0] |
|
152 | mnode = self.repo.changelog.read(bin(p2))[0] | |
164 | if not man.cmp(m1node, man.revision(mnode)): |
|
153 | if not man.cmp(m1node, man.revision(mnode)): | |
165 | self.repo.rollback() |
|
154 | self.repo.rollback() | |
166 | self.repo.dirstate.clear() |
|
|||
167 | return parent |
|
155 | return parent | |
168 | return p2 |
|
156 | return p2 | |
169 |
|
157 | |||
170 | def puttags(self, tags): |
|
158 | def puttags(self, tags): | |
171 | try: |
|
159 | try: | |
172 |
|
|
160 | parentctx = self.repo[self.tagsbranch] | |
173 | oldlines = old.splitlines(1) |
|
161 | tagparent = parentctx.node() | |
174 | oldlines.sort() |
|
162 | except RepoError, inst: | |
|
163 | parentctx = None | |||
|
164 | tagparent = nullid | |||
|
165 | ||||
|
166 | try: | |||
|
167 | oldlines = util.sort(parentctx['.hgtags'].data().splitlines(1)) | |||
175 | except: |
|
168 | except: | |
176 | oldlines = [] |
|
169 | oldlines = [] | |
177 |
|
170 | |||
178 | k = tags.keys() |
|
171 | newlines = util.sort([("%s %s\n" % (tags[tag], tag)) for tag in tags]) | |
179 | k.sort() |
|
|||
180 | newlines = [] |
|
|||
181 | for tag in k: |
|
|||
182 | newlines.append("%s %s\n" % (tags[tag], tag)) |
|
|||
183 |
|
172 | |||
184 |
newlines |
|
173 | if newlines == oldlines: | |
|
174 | return None | |||
|
175 | data = "".join(newlines) | |||
185 |
|
176 | |||
186 | if newlines != oldlines: |
|
177 | def getfilectx(repo, memctx, f): | |
|
178 | return context.memfilectx(f, data, False, False, None) | |||
|
179 | ||||
187 |
|
|
180 | self.ui.status("updating tags\n") | |
188 | f = self.repo.wfile(".hgtags", "w") |
|
|||
189 | f.write("".join(newlines)) |
|
|||
190 | f.close() |
|
|||
191 | if not oldlines: self.repo.add([".hgtags"]) |
|
|||
192 |
|
|
181 | date = "%s 0" % int(time.mktime(time.gmtime())) | |
193 | extra = {} |
|
182 | extra = {'branch': self.tagsbranch} | |
194 | if self.tagsbranch != 'default': |
|
183 | ctx = context.memctx(self.repo, (tagparent, None), "update tags", | |
195 | extra['branch'] = self.tagsbranch |
|
184 | [".hgtags"], getfilectx, "convert-repo", date, | |
196 | try: |
|
185 | extra) | |
197 | tagparent = self.repo.changectx(self.tagsbranch).node() |
|
186 | self.repo.commitctx(ctx) | |
198 | except RepoError, inst: |
|
|||
199 | tagparent = nullid |
|
|||
200 | self.repo.rawcommit([".hgtags"], "update tags", "convert-repo", |
|
|||
201 | date, tagparent, nullid, extra=extra) |
|
|||
202 |
|
|
187 | return hex(self.repo.changelog.tip()) | |
203 |
|
188 | |||
204 | def setfilemapmode(self, active): |
|
189 | def setfilemapmode(self, active): | |
@@ -224,25 +209,24 class mercurial_source(converter_source) | |||||
224 |
|
209 | |||
225 | def changectx(self, rev): |
|
210 | def changectx(self, rev): | |
226 | if self.lastrev != rev: |
|
211 | if self.lastrev != rev: | |
227 |
self.lastctx = self.repo |
|
212 | self.lastctx = self.repo[rev] | |
228 | self.lastrev = rev |
|
213 | self.lastrev = rev | |
229 | return self.lastctx |
|
214 | return self.lastctx | |
230 |
|
215 | |||
231 | def getheads(self): |
|
216 | def getheads(self): | |
232 | if self.rev: |
|
217 | if self.rev: | |
233 |
return [hex(self.repo |
|
218 | return [hex(self.repo[self.rev].node())] | |
234 | else: |
|
219 | else: | |
235 | return [hex(node) for node in self.repo.heads()] |
|
220 | return [hex(node) for node in self.repo.heads()] | |
236 |
|
221 | |||
237 | def getfile(self, name, rev): |
|
222 | def getfile(self, name, rev): | |
238 | try: |
|
223 | try: | |
239 |
return self.changectx(rev) |
|
224 | return self.changectx(rev)[name].data() | |
240 | except revlog.LookupError, err: |
|
225 | except revlog.LookupError, err: | |
241 | raise IOError(err) |
|
226 | raise IOError(err) | |
242 |
|
227 | |||
243 | def getmode(self, name, rev): |
|
228 | def getmode(self, name, rev): | |
244 |
|
|
229 | return self.changectx(rev).manifest().flags(name) | |
245 | return (m.execf(name) and 'x' or '') + (m.linkf(name) and 'l' or '') |
|
|||
246 |
|
230 | |||
247 | def getchanges(self, rev): |
|
231 | def getchanges(self, rev): | |
248 | ctx = self.changectx(rev) |
|
232 | ctx = self.changectx(rev) | |
@@ -251,8 +235,7 class mercurial_source(converter_source) | |||||
251 | else: |
|
235 | else: | |
252 | m, a, r = self.repo.status(ctx.parents()[0].node(), ctx.node())[:3] |
|
236 | m, a, r = self.repo.status(ctx.parents()[0].node(), ctx.node())[:3] | |
253 | changes = [(name, rev) for name in m + a + r] |
|
237 | changes = [(name, rev) for name in m + a + r] | |
254 | changes.sort() |
|
238 | return util.sort(changes), self.getcopies(ctx, m + a) | |
255 | return (changes, self.getcopies(ctx, m + a)) |
|
|||
256 |
|
239 | |||
257 | def getcopies(self, ctx, files): |
|
240 | def getcopies(self, ctx, files): | |
258 | copies = {} |
|
241 | copies = {} |
@@ -655,8 +655,7 class svn_source(converter_source): | |||||
655 | # This will fail if a directory was copied |
|
655 | # This will fail if a directory was copied | |
656 | # from another branch and then some of its files |
|
656 | # from another branch and then some of its files | |
657 | # were deleted in the same transaction. |
|
657 | # were deleted in the same transaction. | |
658 | children = self._find_children(path, revnum) |
|
658 | children = util.sort(self._find_children(path, revnum)) | |
659 | children.sort() |
|
|||
660 | for child in children: |
|
659 | for child in children: | |
661 | # Can we move a child directory and its |
|
660 | # Can we move a child directory and its | |
662 | # parent in the same commit? (probably can). Could |
|
661 | # parent in the same commit? (probably can). Could | |
@@ -729,8 +728,7 class svn_source(converter_source): | |||||
729 | parents = [] |
|
728 | parents = [] | |
730 | # check whether this revision is the start of a branch or part |
|
729 | # check whether this revision is the start of a branch or part | |
731 | # of a branch renaming |
|
730 | # of a branch renaming | |
732 | orig_paths = orig_paths.items() |
|
731 | orig_paths = util.sort(orig_paths.items()) | |
733 | orig_paths.sort() |
|
|||
734 | root_paths = [(p,e) for p,e in orig_paths if self.module.startswith(p)] |
|
732 | root_paths = [(p,e) for p,e in orig_paths if self.module.startswith(p)] | |
735 | if root_paths: |
|
733 | if root_paths: | |
736 | path, ent = root_paths[-1] |
|
734 | path, ent = root_paths[-1] | |
@@ -1034,12 +1032,6 class svn_sink(converter_sink, commandli | |||||
1034 | if 'x' in flags: |
|
1032 | if 'x' in flags: | |
1035 | self.setexec.append(filename) |
|
1033 | self.setexec.append(filename) | |
1036 |
|
1034 | |||
1037 | def delfile(self, name): |
|
|||
1038 | self.delete.append(name) |
|
|||
1039 |
|
||||
1040 | def copyfile(self, source, dest): |
|
|||
1041 | self.copies.append([source, dest]) |
|
|||
1042 |
|
||||
1043 | def _copyfile(self, source, dest): |
|
1035 | def _copyfile(self, source, dest): | |
1044 | # SVN's copy command pukes if the destination file exists, but |
|
1036 | # SVN's copy command pukes if the destination file exists, but | |
1045 | # our copyfile method expects to record a copy that has |
|
1037 | # our copyfile method expects to record a copy that has | |
@@ -1072,10 +1064,9 class svn_sink(converter_sink, commandli | |||||
1072 | return dirs |
|
1064 | return dirs | |
1073 |
|
1065 | |||
1074 | def add_dirs(self, files): |
|
1066 | def add_dirs(self, files): | |
1075 | add_dirs = [d for d in self.dirs_of(files) |
|
1067 | add_dirs = [d for d in util.sort(self.dirs_of(files)) | |
1076 | if not os.path.exists(self.wjoin(d, '.svn', 'entries'))] |
|
1068 | if not os.path.exists(self.wjoin(d, '.svn', 'entries'))] | |
1077 | if add_dirs: |
|
1069 | if add_dirs: | |
1078 | add_dirs.sort() |
|
|||
1079 | self.xargs(add_dirs, 'add', non_recursive=True, quiet=True) |
|
1070 | self.xargs(add_dirs, 'add', non_recursive=True, quiet=True) | |
1080 | return add_dirs |
|
1071 | return add_dirs | |
1081 |
|
1072 | |||
@@ -1085,8 +1076,7 class svn_sink(converter_sink, commandli | |||||
1085 | return files |
|
1076 | return files | |
1086 |
|
1077 | |||
1087 | def tidy_dirs(self, names): |
|
1078 | def tidy_dirs(self, names): | |
1088 |
dirs = |
|
1079 | dirs = util.sort(self.dirs_of(names)) | |
1089 | dirs.sort() |
|
|||
1090 | dirs.reverse() |
|
1080 | dirs.reverse() | |
1091 | deleted = [] |
|
1081 | deleted = [] | |
1092 | for d in dirs: |
|
1082 | for d in dirs: | |
@@ -1102,7 +1092,20 class svn_sink(converter_sink, commandli | |||||
1102 | def revid(self, rev): |
|
1092 | def revid(self, rev): | |
1103 | return u"svn:%s@%s" % (self.uuid, rev) |
|
1093 | return u"svn:%s@%s" % (self.uuid, rev) | |
1104 |
|
1094 | |||
1105 | def putcommit(self, files, parents, commit): |
|
1095 | def putcommit(self, files, copies, parents, commit, source): | |
|
1096 | # Apply changes to working copy | |||
|
1097 | for f, v in files: | |||
|
1098 | try: | |||
|
1099 | data = source.getfile(f, v) | |||
|
1100 | except IOError, inst: | |||
|
1101 | self.delete.append(f) | |||
|
1102 | else: | |||
|
1103 | e = source.getmode(f, v) | |||
|
1104 | self.putfile(f, e, data) | |||
|
1105 | if f in copies: | |||
|
1106 | self.copies.append([copies[f], f]) | |||
|
1107 | files = [f[0] for f in files] | |||
|
1108 | ||||
1106 | for parent in parents: |
|
1109 | for parent in parents: | |
1107 | try: |
|
1110 | try: | |
1108 | return self.revid(self.childmap[parent]) |
|
1111 | return self.revid(self.childmap[parent]) |
@@ -52,7 +52,6 import os, shlex, shutil, tempfile | |||||
52 |
|
52 | |||
53 | def snapshot_node(ui, repo, files, node, tmproot): |
|
53 | def snapshot_node(ui, repo, files, node, tmproot): | |
54 | '''snapshot files as of some revision''' |
|
54 | '''snapshot files as of some revision''' | |
55 | mf = repo.changectx(node).manifest() |
|
|||
56 | dirname = os.path.basename(repo.root) |
|
55 | dirname = os.path.basename(repo.root) | |
57 | if dirname == "": |
|
56 | if dirname == "": | |
58 | dirname = "root" |
|
57 | dirname = "root" | |
@@ -61,17 +60,18 def snapshot_node(ui, repo, files, node, | |||||
61 | os.mkdir(base) |
|
60 | os.mkdir(base) | |
62 | ui.note(_('making snapshot of %d files from rev %s\n') % |
|
61 | ui.note(_('making snapshot of %d files from rev %s\n') % | |
63 | (len(files), short(node))) |
|
62 | (len(files), short(node))) | |
|
63 | ctx = repo[node] | |||
64 | for fn in files: |
|
64 | for fn in files: | |
65 | if not fn in mf: |
|
65 | wfn = util.pconvert(fn) | |
|
66 | if not wfn in ctx: | |||
66 | # skipping new file after a merge ? |
|
67 | # skipping new file after a merge ? | |
67 | continue |
|
68 | continue | |
68 | wfn = util.pconvert(fn) |
|
|||
69 | ui.note(' %s\n' % wfn) |
|
69 | ui.note(' %s\n' % wfn) | |
70 | dest = os.path.join(base, wfn) |
|
70 | dest = os.path.join(base, wfn) | |
71 | destdir = os.path.dirname(dest) |
|
71 | destdir = os.path.dirname(dest) | |
72 | if not os.path.isdir(destdir): |
|
72 | if not os.path.isdir(destdir): | |
73 | os.makedirs(destdir) |
|
73 | os.makedirs(destdir) | |
74 |
data = repo.wwritedata(wfn, |
|
74 | data = repo.wwritedata(wfn, ctx[wfn].data()) | |
75 | open(dest, 'wb').write(data) |
|
75 | open(dest, 'wb').write(data) | |
76 | return dirname |
|
76 | return dirname | |
77 |
|
77 | |||
@@ -121,9 +121,8 def dodiff(ui, repo, diffcmd, diffopts, | |||||
121 | - just invoke the diff for a single file in the working dir |
|
121 | - just invoke the diff for a single file in the working dir | |
122 | ''' |
|
122 | ''' | |
123 | node1, node2 = cmdutil.revpair(repo, opts['rev']) |
|
123 | node1, node2 = cmdutil.revpair(repo, opts['rev']) | |
124 |
|
|
124 | matcher = cmdutil.match(repo, pats, opts) | |
125 |
modified, added, removed |
|
125 | modified, added, removed = repo.status(node1, node2, matcher)[:3] | |
126 | node1, node2, files, match=matchfn)[:5] |
|
|||
127 | if not (modified or added or removed): |
|
126 | if not (modified or added or removed): | |
128 | return 0 |
|
127 | return 0 | |
129 |
|
128 |
@@ -4,6 +4,7 | |||||
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms |
|
5 | # This software may be used and distributed according to the terms | |
6 | # of the GNU General Public License, incorporated herein by reference. |
|
6 | # of the GNU General Public License, incorporated herein by reference. | |
|
7 | '''pulling, updating and merging in one command''' | |||
7 |
|
8 | |||
8 | from mercurial.i18n import _ |
|
9 | from mercurial.i18n import _ | |
9 | from mercurial.node import nullid, short |
|
10 | from mercurial.node import nullid, short |
@@ -239,7 +239,7 def sign(ui, repo, *revs, **opts): | |||||
239 | repo.opener("localsigs", "ab").write(sigmessage) |
|
239 | repo.opener("localsigs", "ab").write(sigmessage) | |
240 | return |
|
240 | return | |
241 |
|
241 | |||
242 | for x in repo.status()[:5]: |
|
242 | for x in repo.status(unknown=True)[:5]: | |
243 | if ".hgsigs" in x and not opts["force"]: |
|
243 | if ".hgsigs" in x and not opts["force"]: | |
244 | raise util.Abort(_("working copy of .hgsigs is changed " |
|
244 | raise util.Abort(_("working copy of .hgsigs is changed " | |
245 | "(please commit .hgsigs manually " |
|
245 | "(please commit .hgsigs manually " |
@@ -4,6 +4,7 | |||||
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of |
|
5 | # This software may be used and distributed according to the terms of | |
6 | # the GNU General Public License, incorporated herein by reference. |
|
6 | # the GNU General Public License, incorporated herein by reference. | |
|
7 | '''show revision graphs in terminal windows''' | |||
7 |
|
8 | |||
8 | import os |
|
9 | import os | |
9 | import sys |
|
10 | import sys | |
@@ -12,6 +13,7 from mercurial.commands import templateo | |||||
12 | from mercurial.i18n import _ |
|
13 | from mercurial.i18n import _ | |
13 | from mercurial.node import nullrev |
|
14 | from mercurial.node import nullrev | |
14 | from mercurial.util import Abort, canonpath |
|
15 | from mercurial.util import Abort, canonpath | |
|
16 | from mercurial import util | |||
15 |
|
17 | |||
16 | def revision_grapher(repo, start_rev, stop_rev): |
|
18 | def revision_grapher(repo, start_rev, stop_rev): | |
17 | """incremental revision grapher |
|
19 | """incremental revision grapher | |
@@ -52,8 +54,7 def revision_grapher(repo, start_rev, st | |||||
52 | for parent in parents: |
|
54 | for parent in parents: | |
53 | if parent not in next_revs: |
|
55 | if parent not in next_revs: | |
54 | parents_to_add.append(parent) |
|
56 | parents_to_add.append(parent) | |
55 | parents_to_add.sort() |
|
57 | next_revs[rev_index:rev_index + 1] = util.sort(parents_to_add) | |
56 | next_revs[rev_index:rev_index + 1] = parents_to_add |
|
|||
57 |
|
58 | |||
58 | edges = [] |
|
59 | edges = [] | |
59 | for parent in parents: |
|
60 | for parent in parents: | |
@@ -88,7 +89,7 def filelog_grapher(repo, path, start_re | |||||
88 | assert start_rev >= stop_rev |
|
89 | assert start_rev >= stop_rev | |
89 | curr_rev = start_rev |
|
90 | curr_rev = start_rev | |
90 | revs = [] |
|
91 | revs = [] | |
91 |
filerev = repo.file(path) |
|
92 | filerev = len(repo.file(path)) - 1 | |
92 | while filerev >= 0: |
|
93 | while filerev >= 0: | |
93 | fctx = repo.filectx(path, fileid=filerev) |
|
94 | fctx = repo.filectx(path, fileid=filerev) | |
94 |
|
95 | |||
@@ -104,8 +105,7 def filelog_grapher(repo, path, start_re | |||||
104 | for parent in parents: |
|
105 | for parent in parents: | |
105 | if parent not in next_revs: |
|
106 | if parent not in next_revs: | |
106 | parents_to_add.append(parent) |
|
107 | parents_to_add.append(parent) | |
107 | parents_to_add.sort() |
|
108 | next_revs[rev_index:rev_index + 1] = util.sort(parents_to_add) | |
108 | next_revs[rev_index:rev_index + 1] = parents_to_add |
|
|||
109 |
|
109 | |||
110 | edges = [] |
|
110 | edges = [] | |
111 | for parent in parents: |
|
111 | for parent in parents: | |
@@ -197,7 +197,7 def get_revs(repo, rev_opt): | |||||
197 | revs = revrange(repo, rev_opt) |
|
197 | revs = revrange(repo, rev_opt) | |
198 | return (max(revs), min(revs)) |
|
198 | return (max(revs), min(revs)) | |
199 | else: |
|
199 | else: | |
200 |
return (repo |
|
200 | return (len(repo) - 1, 0) | |
201 |
|
201 | |||
202 | def graphlog(ui, repo, path=None, **opts): |
|
202 | def graphlog(ui, repo, path=None, **opts): | |
203 | """show revision history alongside an ASCII revision graph |
|
203 | """show revision history alongside an ASCII revision graph |
@@ -4,60 +4,58 | |||||
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms |
|
5 | # This software may be used and distributed according to the terms | |
6 | # of the GNU General Public License, incorporated herein by reference. |
|
6 | # of the GNU General Public License, incorporated herein by reference. | |
7 | # |
|
7 | '''browsing the repository in a graphical way | |
8 | # The hgk extension allows browsing the history of a repository in a |
|
8 | ||
9 | # graphical way. It requires Tcl/Tk version 8.4 or later. (Tcl/Tk is |
|
9 | The hgk extension allows browsing the history of a repository in a | |
10 | # not distributed with Mercurial.) |
|
10 | graphical way. It requires Tcl/Tk version 8.4 or later. (Tcl/Tk is | |
11 | # |
|
11 | not distributed with Mercurial.) | |
12 | # hgk consists of two parts: a Tcl script that does the displaying and |
|
12 | ||
13 | # querying of information, and an extension to mercurial named hgk.py, |
|
13 | hgk consists of two parts: a Tcl script that does the displaying and | |
14 | # which provides hooks for hgk to get information. hgk can be found in |
|
14 | querying of information, and an extension to mercurial named hgk.py, | |
15 | # the contrib directory, and hgk.py can be found in the hgext |
|
15 | which provides hooks for hgk to get information. hgk can be found in | |
16 | # directory. |
|
16 | the contrib directory, and hgk.py can be found in the hgext directory. | |
17 | # |
|
17 | ||
18 |
|
|
18 | To load the hgext.py extension, add it to your .hgrc file (you have | |
19 |
|
|
19 | to use your global $HOME/.hgrc file, not one in a repository). You | |
20 |
|
|
20 | can specify an absolute path: | |
21 | # |
|
21 | ||
22 |
|
|
22 | [extensions] | |
23 |
|
|
23 | hgk=/usr/local/lib/hgk.py | |
24 | # |
|
24 | ||
25 |
|
|
25 | Mercurial can also scan the default python library path for a file | |
26 |
|
|
26 | named 'hgk.py' if you set hgk empty: | |
27 | # |
|
27 | ||
28 |
|
|
28 | [extensions] | |
29 |
|
|
29 | hgk= | |
30 | # |
|
30 | ||
31 |
|
|
31 | The hg view command will launch the hgk Tcl script. For this command | |
32 |
|
|
32 | to work, hgk must be in your search path. Alternately, you can | |
33 |
|
|
33 | specify the path to hgk in your .hgrc file: | |
34 | # |
|
34 | ||
35 |
|
|
35 | [hgk] | |
36 |
|
|
36 | path=/location/of/hgk | |
37 | # |
|
37 | ||
38 |
|
|
38 | hgk can make use of the extdiff extension to visualize revisions. | |
39 |
|
|
39 | Assuming you had already configured extdiff vdiff command, just add: | |
40 | # command, just add: |
|
40 | ||
41 | # |
|
41 | [hgk] | |
42 | # [hgk] |
|
42 | vdiff=vdiff | |
43 | # vdiff=vdiff |
|
43 | ||
44 | # |
|
44 | Revisions context menu will now display additional entries to fire | |
45 | # Revisions context menu will now display additional entries to fire |
|
45 | vdiff on hovered and selected revisions.''' | |
46 | # vdiff on hovered and selected revisions. |
|
|||
47 |
|
46 | |||
48 | import os |
|
47 | import os | |
49 | from mercurial import commands, util, patch, revlog |
|
48 | from mercurial import commands, util, patch, revlog, cmdutil | |
50 | from mercurial.node import nullid, nullrev, short |
|
49 | from mercurial.node import nullid, nullrev, short | |
51 |
|
50 | |||
52 | def difftree(ui, repo, node1=None, node2=None, *files, **opts): |
|
51 | def difftree(ui, repo, node1=None, node2=None, *files, **opts): | |
53 | """diff trees from two commits""" |
|
52 | """diff trees from two commits""" | |
54 | def __difftree(repo, node1, node2, files=[]): |
|
53 | def __difftree(repo, node1, node2, files=[]): | |
55 | assert node2 is not None |
|
54 | assert node2 is not None | |
56 |
mmap = repo |
|
55 | mmap = repo[node1].manifest() | |
57 |
mmap2 = repo |
|
56 | mmap2 = repo[node2].manifest() | |
58 | status = repo.status(node1, node2, files=files)[:5] |
|
57 | m = cmdutil.match(repo, files) | |
59 |
modified, added, removed, |
|
58 | modified, added, removed = repo.status(node1, node2, m)[:3] | |
60 |
|
||||
61 | empty = short(nullid) |
|
59 | empty = short(nullid) | |
62 |
|
60 | |||
63 | for f in modified: |
|
61 | for f in modified: | |
@@ -92,8 +90,8 def difftree(ui, repo, node1=None, node2 | |||||
92 | if opts['patch']: |
|
90 | if opts['patch']: | |
93 | if opts['pretty']: |
|
91 | if opts['pretty']: | |
94 | catcommit(ui, repo, node2, "") |
|
92 | catcommit(ui, repo, node2, "") | |
95 | patch.diff(repo, node1, node2, |
|
93 | m = cmdutil.match(repo, files) | |
96 | files=files, |
|
94 | patch.diff(repo, node1, node2, match=m, | |
97 | opts=patch.diffopts(ui, {'git': True})) |
|
95 | opts=patch.diffopts(ui, {'git': True})) | |
98 | else: |
|
96 | else: | |
99 | __difftree(repo, node1, node2, files=files) |
|
97 | __difftree(repo, node1, node2, files=files) | |
@@ -103,11 +101,11 def difftree(ui, repo, node1=None, node2 | |||||
103 | def catcommit(ui, repo, n, prefix, ctx=None): |
|
101 | def catcommit(ui, repo, n, prefix, ctx=None): | |
104 | nlprefix = '\n' + prefix; |
|
102 | nlprefix = '\n' + prefix; | |
105 | if ctx is None: |
|
103 | if ctx is None: | |
106 |
ctx = repo |
|
104 | ctx = repo[n] | |
107 | (p1, p2) = ctx.parents() |
|
|||
108 | ui.write("tree %s\n" % short(ctx.changeset()[0])) # use ctx.node() instead ?? |
|
105 | ui.write("tree %s\n" % short(ctx.changeset()[0])) # use ctx.node() instead ?? | |
109 | if p1: ui.write("parent %s\n" % short(p1.node())) |
|
106 | for p in ctx.parents(): | |
110 |
|
|
107 | ui.write("parent %s\n" % p) | |
|
108 | ||||
111 | date = ctx.date() |
|
109 | date = ctx.date() | |
112 | description = ctx.description().replace("\0", "") |
|
110 | description = ctx.description().replace("\0", "") | |
113 | lines = description.splitlines() |
|
111 | lines = description.splitlines() | |
@@ -175,7 +173,7 def catfile(ui, repo, type=None, r=None, | |||||
175 | # you can specify a commit to stop at by starting the sha1 with ^ |
|
173 | # you can specify a commit to stop at by starting the sha1 with ^ | |
176 | def revtree(ui, args, repo, full="tree", maxnr=0, parents=False): |
|
174 | def revtree(ui, args, repo, full="tree", maxnr=0, parents=False): | |
177 | def chlogwalk(): |
|
175 | def chlogwalk(): | |
178 |
count = repo |
|
176 | count = len(repo) | |
179 | i = count |
|
177 | i = count | |
180 | l = [0] * 100 |
|
178 | l = [0] * 100 | |
181 | chunk = 100 |
|
179 | chunk = 100 | |
@@ -191,7 +189,7 def revtree(ui, args, repo, full="tree", | |||||
191 | l[chunk - x:] = [0] * (chunk - x) |
|
189 | l[chunk - x:] = [0] * (chunk - x) | |
192 | break |
|
190 | break | |
193 | if full != None: |
|
191 | if full != None: | |
194 |
l[x] = repo |
|
192 | l[x] = repo[i + x] | |
195 | l[x].changeset() # force reading |
|
193 | l[x].changeset() # force reading | |
196 | else: |
|
194 | else: | |
197 | l[x] = 1 |
|
195 | l[x] = 1 |
@@ -1,6 +1,4 | |||||
1 | """ |
|
1 | """a mercurial extension for syntax highlighting in hgweb | |
2 | This is Mercurial extension for syntax highlighting in the file |
|
|||
3 | revision view of hgweb. |
|
|||
4 |
|
2 | |||
5 | It depends on the pygments syntax highlighting library: |
|
3 | It depends on the pygments syntax highlighting library: | |
6 | http://pygments.org/ |
|
4 | http://pygments.org/ | |
@@ -15,23 +13,15 There is a single configuration option: | |||||
15 | [web] |
|
13 | [web] | |
16 | pygments_style = <style> |
|
14 | pygments_style = <style> | |
17 |
|
15 | |||
18 | The default is 'colorful'. If this is changed the corresponding CSS |
|
16 | The default is 'colorful'. | |
19 | file should be re-generated by running |
|
|||
20 |
|
||||
21 | # pygmentize -f html -S <newstyle> |
|
|||
22 |
|
||||
23 |
|
17 | |||
24 | -- Adam Hupp <adam@hupp.org> |
|
18 | -- Adam Hupp <adam@hupp.org> | |
25 |
|
||||
26 |
|
||||
27 | """ |
|
19 | """ | |
28 |
|
20 | |||
29 | from mercurial import demandimport |
|
21 | from mercurial import demandimport | |
30 | demandimport.ignore.extend(['pkgutil', |
|
22 | demandimport.ignore.extend(['pkgutil', 'pkg_resources', '__main__',]) | |
31 | 'pkg_resources', |
|
|||
32 | '__main__',]) |
|
|||
33 |
|
23 | |||
34 |
from mercurial.hgweb |
|
24 | from mercurial.hgweb import webcommands, webutil, common | |
35 | from mercurial import util |
|
25 | from mercurial import util | |
36 | from mercurial.templatefilters import filters |
|
26 | from mercurial.templatefilters import filters | |
37 |
|
27 | |||
@@ -40,10 +30,11 from pygments.util import ClassNotFound | |||||
40 | from pygments.lexers import guess_lexer, guess_lexer_for_filename, TextLexer |
|
30 | from pygments.lexers import guess_lexer, guess_lexer_for_filename, TextLexer | |
41 | from pygments.formatters import HtmlFormatter |
|
31 | from pygments.formatters import HtmlFormatter | |
42 |
|
32 | |||
43 |
SYNTAX_CSS = ('\n<link rel="stylesheet" href=" |
|
33 | SYNTAX_CSS = ('\n<link rel="stylesheet" href="{url}highlightcss" ' | |
44 | 'type="text/css" />') |
|
34 | 'type="text/css" />') | |
45 |
|
35 | |||
46 |
def pygmentize( |
|
36 | def pygmentize(field, fctx, style, tmpl): | |
|
37 | ||||
47 | # append a <link ...> to the syntax highlighting css |
|
38 | # append a <link ...> to the syntax highlighting css | |
48 | old_header = ''.join(tmpl('header')) |
|
39 | old_header = ''.join(tmpl('header')) | |
49 | if SYNTAX_CSS not in old_header: |
|
40 | if SYNTAX_CSS not in old_header: | |
@@ -54,7 +45,6 def pygmentize(self, tmpl, fctx, field): | |||||
54 | if util.binary(text): |
|
45 | if util.binary(text): | |
55 | return |
|
46 | return | |
56 |
|
47 | |||
57 | style = self.config("web", "pygments_style", "colorful") |
|
|||
58 | # To get multi-line strings right, we can't format line-by-line |
|
48 | # To get multi-line strings right, we can't format line-by-line | |
59 | try: |
|
49 | try: | |
60 | lexer = guess_lexer_for_filename(fctx.path(), text[:1024], |
|
50 | lexer = guess_lexer_for_filename(fctx.path(), text[:1024], | |
@@ -79,20 +69,30 def pygmentize(self, tmpl, fctx, field): | |||||
79 | newl = oldl.replace('line|escape', 'line|colorize') |
|
69 | newl = oldl.replace('line|escape', 'line|colorize') | |
80 | tmpl.cache[field] = newl |
|
70 | tmpl.cache[field] = newl | |
81 |
|
71 | |||
82 | def filerevision_highlight(self, tmpl, fctx): |
|
72 | web_filerevision = webcommands._filerevision | |
83 | pygmentize(self, tmpl, fctx, 'fileline') |
|
73 | web_annotate = webcommands.annotate | |
84 |
|
74 | |||
85 | return realrevision(self, tmpl, fctx) |
|
75 | def filerevision_highlight(web, tmpl, fctx): | |
|
76 | style = web.config('web', 'pygments_style', 'colorful') | |||
|
77 | pygmentize('fileline', fctx, style, tmpl) | |||
|
78 | return web_filerevision(web, tmpl, fctx) | |||
86 |
|
79 | |||
87 |
def |
|
80 | def annotate_highlight(web, req, tmpl): | |
88 | pygmentize(self, tmpl, fctx, 'annotateline') |
|
81 | fctx = webutil.filectx(web.repo, req) | |
|
82 | style = web.config('web', 'pygments_style', 'colorful') | |||
|
83 | pygmentize('annotateline', fctx, style, tmpl) | |||
|
84 | return web_annotate(web, req, tmpl) | |||
89 |
|
85 | |||
90 | return realannotate(self, tmpl, fctx) |
|
86 | def generate_css(web, req, tmpl): | |
|
87 | pg_style = web.config('web', 'pygments_style', 'colorful') | |||
|
88 | fmter = HtmlFormatter(style = pg_style) | |||
|
89 | req.respond(common.HTTP_OK, 'text/css') | |||
|
90 | return ['/* pygments_style = %s */\n\n' % pg_style, fmter.get_style_defs('')] | |||
|
91 | ||||
91 |
|
92 | |||
92 | # monkeypatch in the new version |
|
93 | # monkeypatch in the new version | |
93 | # should be safer than overriding the method in a derived class |
|
94 | ||
94 | # and then patching the class |
|
95 | webcommands._filerevision = filerevision_highlight | |
95 | realrevision = hgweb.filerevision |
|
96 | webcommands.annotate = annotate_highlight | |
96 | hgweb.filerevision = filerevision_highlight |
|
97 | webcommands.highlightcss = generate_css | |
97 | realannotate = hgweb.fileannotate |
|
98 | webcommands.__all__.append('highlightcss') | |
98 | hgweb.fileannotate = fileannotate_highlight |
|
@@ -47,12 +47,12 def reposetup(ui, repo): | |||||
47 | # to recurse. |
|
47 | # to recurse. | |
48 | inotifyserver = False |
|
48 | inotifyserver = False | |
49 |
|
49 | |||
50 |
def status(self, |
|
50 | def status(self, match, ignored, clean, unknown=True): | |
51 | list_unknown=True): |
|
51 | files = match.files() | |
52 | try: |
|
52 | try: | |
53 |
if not |
|
53 | if not ignored and not self.inotifyserver: | |
54 | result = client.query(ui, repo, files, match, False, |
|
54 | result = client.query(ui, repo, files, match, False, | |
55 |
|
|
55 | clean, unknown) | |
56 | if result is not None: |
|
56 | if result is not None: | |
57 | return result |
|
57 | return result | |
58 | except socket.error, err: |
|
58 | except socket.error, err: | |
@@ -81,15 +81,14 def reposetup(ui, repo): | |||||
81 | if query: |
|
81 | if query: | |
82 | try: |
|
82 | try: | |
83 | return query(ui, repo, files or [], match, |
|
83 | return query(ui, repo, files or [], match, | |
84 |
|
|
84 | ignored, clean, unknown) | |
85 | except socket.error, err: |
|
85 | except socket.error, err: | |
86 | ui.warn(_('could not talk to new inotify ' |
|
86 | ui.warn(_('could not talk to new inotify ' | |
87 | 'server: %s\n') % err[1]) |
|
87 | 'server: %s\n') % err[1]) | |
88 | ui.print_exc() |
|
88 | ui.print_exc() | |
89 |
|
89 | |||
90 | return super(inotifydirstate, self).status( |
|
90 | return super(inotifydirstate, self).status( | |
91 |
|
|
91 | match, ignored, clean, unknown) | |
92 | list_unknown) |
|
|||
93 |
|
92 | |||
94 | repo.dirstate.__class__ = inotifydirstate |
|
93 | repo.dirstate.__class__ = inotifydirstate | |
95 |
|
94 |
@@ -11,7 +11,7 from mercurial import ui | |||||
11 | import common |
|
11 | import common | |
12 | import os, select, socket, stat, struct, sys |
|
12 | import os, select, socket, stat, struct, sys | |
13 |
|
13 | |||
14 |
def query(ui, repo, names, match, |
|
14 | def query(ui, repo, names, match, ignored, clean, unknown=True): | |
15 | sock = socket.socket(socket.AF_UNIX) |
|
15 | sock = socket.socket(socket.AF_UNIX) | |
16 | sockpath = repo.join('inotify.sock') |
|
16 | sockpath = repo.join('inotify.sock') | |
17 | sock.connect(sockpath) |
|
17 | sock.connect(sockpath) | |
@@ -20,10 +20,10 def query(ui, repo, names, match, list_i | |||||
20 | for n in names or []: |
|
20 | for n in names or []: | |
21 | yield n |
|
21 | yield n | |
22 | states = 'almrx!' |
|
22 | states = 'almrx!' | |
23 |
if |
|
23 | if ignored: | |
24 | raise ValueError('this is insanity') |
|
24 | raise ValueError('this is insanity') | |
25 |
if |
|
25 | if clean: states += 'n' | |
26 |
if |
|
26 | if unknown: states += '?' | |
27 | yield states |
|
27 | yield states | |
28 |
|
28 | |||
29 | req = '\0'.join(genquery()) |
|
29 | req = '\0'.join(genquery()) |
@@ -534,9 +534,7 class Watcher(object): | |||||
534 | self.ui.note('%s processing %d deferred events as %d\n' % |
|
534 | self.ui.note('%s processing %d deferred events as %d\n' % | |
535 | (self.event_time(), self.deferred, |
|
535 | (self.event_time(), self.deferred, | |
536 | len(self.eventq))) |
|
536 | len(self.eventq))) | |
537 |
|
|
537 | for wpath, evts in util.sort(self.eventq.items()): | |
538 | eventq.sort() |
|
|||
539 | for wpath, evts in eventq: |
|
|||
540 | for evt in evts: |
|
538 | for evt in evts: | |
541 | self.deferred_event(wpath, evt) |
|
539 | self.deferred_event(wpath, evt) | |
542 | self.eventq.clear() |
|
540 | self.eventq.clear() |
@@ -78,7 +78,7 like CVS' $Log$, are not supported. A ke | |||||
78 | "Log = {desc}" expands to the first line of the changeset description. |
|
78 | "Log = {desc}" expands to the first line of the changeset description. | |
79 | ''' |
|
79 | ''' | |
80 |
|
80 | |||
81 |
from mercurial import commands, cmdutil, |
|
81 | from mercurial import commands, cmdutil, dispatch, filelog, revlog | |
82 | from mercurial import patch, localrepo, templater, templatefilters, util |
|
82 | from mercurial import patch, localrepo, templater, templatefilters, util | |
83 | from mercurial.hgweb import webcommands |
|
83 | from mercurial.hgweb import webcommands | |
84 | from mercurial.node import nullid, hex |
|
84 | from mercurial.node import nullid, hex | |
@@ -88,8 +88,8 import re, shutil, tempfile, time | |||||
88 | commands.optionalrepo += ' kwdemo' |
|
88 | commands.optionalrepo += ' kwdemo' | |
89 |
|
89 | |||
90 | # hg commands that do not act on keywords |
|
90 | # hg commands that do not act on keywords | |
91 | nokwcommands = ('add addremove bundle copy export grep incoming init' |
|
91 | nokwcommands = ('add addremove annotate bundle copy export grep incoming init' | |
92 | ' log outgoing push rename rollback tip' |
|
92 | ' log outgoing push rename rollback tip verify' | |
93 | ' convert email glog') |
|
93 | ' convert email glog') | |
94 |
|
94 | |||
95 | # hg commands that trigger expansion only when writing to working dir, |
|
95 | # hg commands that trigger expansion only when writing to working dir, | |
@@ -100,52 +100,8 def utcdate(date): | |||||
100 | '''Returns hgdate in cvs-like UTC format.''' |
|
100 | '''Returns hgdate in cvs-like UTC format.''' | |
101 | return time.strftime('%Y/%m/%d %H:%M:%S', time.gmtime(date[0])) |
|
101 | return time.strftime('%Y/%m/%d %H:%M:%S', time.gmtime(date[0])) | |
102 |
|
102 | |||
103 |
|
||||
104 | # make keyword tools accessible |
|
103 | # make keyword tools accessible | |
105 |
kwtools = {'templater': None, 'hgcmd': |
|
104 | kwtools = {'templater': None, 'hgcmd': '', 'inc': [], 'exc': ['.hg*']} | |
106 |
|
||||
107 | # store originals of monkeypatches |
|
|||
108 | _patchfile_init = patch.patchfile.__init__ |
|
|||
109 | _patch_diff = patch.diff |
|
|||
110 | _dispatch_parse = dispatch._parse |
|
|||
111 |
|
||||
112 | def _kwpatchfile_init(self, ui, fname, missing=False): |
|
|||
113 | '''Monkeypatch/wrap patch.patchfile.__init__ to avoid |
|
|||
114 | rejects or conflicts due to expanded keywords in working dir.''' |
|
|||
115 | _patchfile_init(self, ui, fname, missing=missing) |
|
|||
116 | # shrink keywords read from working dir |
|
|||
117 | kwt = kwtools['templater'] |
|
|||
118 | self.lines = kwt.shrinklines(self.fname, self.lines) |
|
|||
119 |
|
||||
120 | def _kw_diff(repo, node1=None, node2=None, files=None, match=util.always, |
|
|||
121 | fp=None, changes=None, opts=None): |
|
|||
122 | '''Monkeypatch patch.diff to avoid expansion except when |
|
|||
123 | comparing against working dir.''' |
|
|||
124 | if node2 is not None: |
|
|||
125 | kwtools['templater'].matcher = util.never |
|
|||
126 | elif node1 is not None and node1 != repo.changectx().node(): |
|
|||
127 | kwtools['templater'].restrict = True |
|
|||
128 | _patch_diff(repo, node1=node1, node2=node2, files=files, match=match, |
|
|||
129 | fp=fp, changes=changes, opts=opts) |
|
|||
130 |
|
||||
131 | def _kwweb_changeset(web, req, tmpl): |
|
|||
132 | '''Wraps webcommands.changeset turning off keyword expansion.''' |
|
|||
133 | kwtools['templater'].matcher = util.never |
|
|||
134 | return web.changeset(tmpl, web.changectx(req)) |
|
|||
135 |
|
||||
136 | def _kwweb_filediff(web, req, tmpl): |
|
|||
137 | '''Wraps webcommands.filediff turning off keyword expansion.''' |
|
|||
138 | kwtools['templater'].matcher = util.never |
|
|||
139 | return web.filediff(tmpl, web.filectx(req)) |
|
|||
140 |
|
||||
141 | def _kwdispatch_parse(ui, args): |
|
|||
142 | '''Monkeypatch dispatch._parse to obtain running hg command.''' |
|
|||
143 | cmd, func, args, options, cmdoptions = _dispatch_parse(ui, args) |
|
|||
144 | kwtools['hgcmd'] = cmd |
|
|||
145 | return cmd, func, args, options, cmdoptions |
|
|||
146 |
|
||||
147 | # dispatch._parse is run before reposetup, so wrap it here |
|
|||
148 | dispatch._parse = _kwdispatch_parse |
|
|||
149 |
|
105 | |||
150 |
|
106 | |||
151 | class kwtemplater(object): |
|
107 | class kwtemplater(object): | |
@@ -163,15 +119,16 class kwtemplater(object): | |||||
163 | 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}', |
|
119 | 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}', | |
164 | } |
|
120 | } | |
165 |
|
121 | |||
166 |
def __init__(self, ui, repo |
|
122 | def __init__(self, ui, repo): | |
167 | self.ui = ui |
|
123 | self.ui = ui | |
168 | self.repo = repo |
|
124 | self.repo = repo | |
169 |
self.matcher = util.matcher(repo.root, |
|
125 | self.matcher = util.matcher(repo.root, | |
|
126 | inc=kwtools['inc'], exc=kwtools['exc'])[1] | |||
170 | self.restrict = kwtools['hgcmd'] in restricted.split() |
|
127 | self.restrict = kwtools['hgcmd'] in restricted.split() | |
171 |
|
128 | |||
172 | kwmaps = self.ui.configitems('keywordmaps') |
|
129 | kwmaps = self.ui.configitems('keywordmaps') | |
173 | if kwmaps: # override default templates |
|
130 | if kwmaps: # override default templates | |
174 |
kwmaps = [(k, templater.parsestring(v, |
|
131 | kwmaps = [(k, templater.parsestring(v, False)) | |
175 | for (k, v) in kwmaps] |
|
132 | for (k, v) in kwmaps] | |
176 | self.templates = dict(kwmaps) |
|
133 | self.templates = dict(kwmaps) | |
177 | escaped = map(re.escape, self.templates.keys()) |
|
134 | escaped = map(re.escape, self.templates.keys()) | |
@@ -185,7 +142,7 class kwtemplater(object): | |||||
185 | def getnode(self, path, fnode): |
|
142 | def getnode(self, path, fnode): | |
186 | '''Derives changenode from file path and filenode.''' |
|
143 | '''Derives changenode from file path and filenode.''' | |
187 | # used by kwfilelog.read and kwexpand |
|
144 | # used by kwfilelog.read and kwexpand | |
188 |
c = |
|
145 | c = self.repo.filectx(path, fileid=fnode) | |
189 | return c.node() |
|
146 | return c.node() | |
190 |
|
147 | |||
191 | def substitute(self, data, path, node, subfunc): |
|
148 | def substitute(self, data, path, node, subfunc): | |
@@ -206,25 +163,26 class kwtemplater(object): | |||||
206 | return self.substitute(data, path, changenode, self.re_kw.sub) |
|
163 | return self.substitute(data, path, changenode, self.re_kw.sub) | |
207 | return data |
|
164 | return data | |
208 |
|
165 | |||
209 |
def iskwfile(self, path, |
|
166 | def iskwfile(self, path, flagfunc): | |
210 | '''Returns true if path matches [keyword] pattern |
|
167 | '''Returns true if path matches [keyword] pattern | |
211 | and is not a symbolic link. |
|
168 | and is not a symbolic link. | |
212 | Caveat: localrepository._link fails on Windows.''' |
|
169 | Caveat: localrepository._link fails on Windows.''' | |
213 |
return self.matcher(path) and not |
|
170 | return self.matcher(path) and not 'l' in flagfunc(path) | |
214 |
|
171 | |||
215 |
def overwrite(self, node |
|
172 | def overwrite(self, node, expand, files): | |
216 | '''Overwrites selected files expanding/shrinking keywords.''' |
|
173 | '''Overwrites selected files expanding/shrinking keywords.''' | |
217 | ctx = self.repo.changectx(node) |
|
174 | if node is not None: # commit | |
|
175 | ctx = self.repo[node] | |||
218 | mf = ctx.manifest() |
|
176 | mf = ctx.manifest() | |
219 | if node is not None: # commit |
|
|||
220 | files = [f for f in ctx.files() if f in mf] |
|
177 | files = [f for f in ctx.files() if f in mf] | |
221 | notify = self.ui.debug |
|
178 | notify = self.ui.debug | |
222 | else: # kwexpand/kwshrink |
|
179 | else: # kwexpand/kwshrink | |
|
180 | ctx = self.repo['.'] | |||
|
181 | mf = ctx.manifest() | |||
223 | notify = self.ui.note |
|
182 | notify = self.ui.note | |
224 |
candidates = [f for f in files if self.iskwfile(f, |
|
183 | candidates = [f for f in files if self.iskwfile(f, ctx.flags)] | |
225 | if candidates: |
|
184 | if candidates: | |
226 | self.restrict = True # do not expand when reading |
|
185 | self.restrict = True # do not expand when reading | |
227 | candidates.sort() |
|
|||
228 | action = expand and 'expanding' or 'shrinking' |
|
186 | action = expand and 'expanding' or 'shrinking' | |
229 | for f in candidates: |
|
187 | for f in candidates: | |
230 | fp = self.repo.file(f) |
|
188 | fp = self.repo.file(f) | |
@@ -271,9 +229,9 class kwfilelog(filelog.filelog): | |||||
271 | Subclass of filelog to hook into its read, add, cmp methods. |
|
229 | Subclass of filelog to hook into its read, add, cmp methods. | |
272 | Keywords are "stored" unexpanded, and processed on reading. |
|
230 | Keywords are "stored" unexpanded, and processed on reading. | |
273 | ''' |
|
231 | ''' | |
274 | def __init__(self, opener, path): |
|
232 | def __init__(self, opener, kwt, path): | |
275 | super(kwfilelog, self).__init__(opener, path) |
|
233 | super(kwfilelog, self).__init__(opener, path) | |
276 |
self.kwt = kwt |
|
234 | self.kwt = kwt | |
277 | self.path = path |
|
235 | self.path = path | |
278 |
|
236 | |||
279 | def read(self, node): |
|
237 | def read(self, node): | |
@@ -284,7 +242,7 class kwfilelog(filelog.filelog): | |||||
284 | def add(self, text, meta, tr, link, p1=None, p2=None): |
|
242 | def add(self, text, meta, tr, link, p1=None, p2=None): | |
285 | '''Removes keyword substitutions when adding to filelog.''' |
|
243 | '''Removes keyword substitutions when adding to filelog.''' | |
286 | text = self.kwt.shrink(self.path, text) |
|
244 | text = self.kwt.shrink(self.path, text) | |
287 |
return super(kwfilelog, self).add(text, meta, tr, link, p1 |
|
245 | return super(kwfilelog, self).add(text, meta, tr, link, p1, p2) | |
288 |
|
246 | |||
289 | def cmp(self, node, text): |
|
247 | def cmp(self, node, text): | |
290 | '''Removes keyword substitutions for comparison.''' |
|
248 | '''Removes keyword substitutions for comparison.''' | |
@@ -294,28 +252,30 class kwfilelog(filelog.filelog): | |||||
294 | return t2 != text |
|
252 | return t2 != text | |
295 | return revlog.revlog.cmp(self, node, text) |
|
253 | return revlog.revlog.cmp(self, node, text) | |
296 |
|
254 | |||
297 | def _status(ui, repo, kwt, *pats, **opts): |
|
255 | def _status(ui, repo, kwt, unknown, *pats, **opts): | |
298 | '''Bails out if [keyword] configuration is not active. |
|
256 | '''Bails out if [keyword] configuration is not active. | |
299 | Returns status of working directory.''' |
|
257 | Returns status of working directory.''' | |
300 | if kwt: |
|
258 | if kwt: | |
301 |
|
|
259 | matcher = cmdutil.match(repo, pats, opts) | |
302 |
return repo.status( |
|
260 | return repo.status(match=matcher, unknown=unknown, clean=True) | |
303 | if ui.configitems('keyword'): |
|
261 | if ui.configitems('keyword'): | |
304 | raise util.Abort(_('[keyword] patterns cannot match')) |
|
262 | raise util.Abort(_('[keyword] patterns cannot match')) | |
305 | raise util.Abort(_('no [keyword] patterns configured')) |
|
263 | raise util.Abort(_('no [keyword] patterns configured')) | |
306 |
|
264 | |||
307 | def _kwfwrite(ui, repo, expand, *pats, **opts): |
|
265 | def _kwfwrite(ui, repo, expand, *pats, **opts): | |
308 | '''Selects files and passes them to kwtemplater.overwrite.''' |
|
266 | '''Selects files and passes them to kwtemplater.overwrite.''' | |
|
267 | if repo.dirstate.parents()[1] != nullid: | |||
|
268 | raise util.Abort(_('outstanding uncommitted merge')) | |||
309 | kwt = kwtools['templater'] |
|
269 | kwt = kwtools['templater'] | |
310 | status = _status(ui, repo, kwt, *pats, **opts) |
|
270 | status = _status(ui, repo, kwt, False, *pats, **opts) | |
311 |
modified, added, removed, deleted |
|
271 | modified, added, removed, deleted = status[:4] | |
312 | if modified or added or removed or deleted: |
|
272 | if modified or added or removed or deleted: | |
313 |
raise util.Abort(_('outstanding uncommitted changes |
|
273 | raise util.Abort(_('outstanding uncommitted changes')) | |
314 | wlock = lock = None |
|
274 | wlock = lock = None | |
315 | try: |
|
275 | try: | |
316 | wlock = repo.wlock() |
|
276 | wlock = repo.wlock() | |
317 | lock = repo.lock() |
|
277 | lock = repo.lock() | |
318 |
kwt.overwrite( |
|
278 | kwt.overwrite(None, expand, status[6]) | |
319 | finally: |
|
279 | finally: | |
320 | del wlock, lock |
|
280 | del wlock, lock | |
321 |
|
281 | |||
@@ -345,7 +305,7 def demo(ui, repo, *args, **opts): | |||||
345 | branchname = 'demobranch' |
|
305 | branchname = 'demobranch' | |
346 | tmpdir = tempfile.mkdtemp('', 'kwdemo.') |
|
306 | tmpdir = tempfile.mkdtemp('', 'kwdemo.') | |
347 | ui.note(_('creating temporary repo at %s\n') % tmpdir) |
|
307 | ui.note(_('creating temporary repo at %s\n') % tmpdir) | |
348 |
repo = localrepo.localrepository(ui, |
|
308 | repo = localrepo.localrepository(ui, tmpdir, True) | |
349 | ui.setconfig('keyword', fn, '') |
|
309 | ui.setconfig('keyword', fn, '') | |
350 | if args or opts.get('rcfile'): |
|
310 | if args or opts.get('rcfile'): | |
351 | kwstatus = 'custom' |
|
311 | kwstatus = 'custom' | |
@@ -367,6 +327,7 def demo(ui, repo, *args, **opts): | |||||
367 | ui.readconfig(repo.join('hgrc')) |
|
327 | ui.readconfig(repo.join('hgrc')) | |
368 | if not opts.get('default'): |
|
328 | if not opts.get('default'): | |
369 | kwmaps = dict(ui.configitems('keywordmaps')) or kwtemplater.templates |
|
329 | kwmaps = dict(ui.configitems('keywordmaps')) or kwtemplater.templates | |
|
330 | uisetup(ui) | |||
370 | reposetup(ui, repo) |
|
331 | reposetup(ui, repo) | |
371 | for k, v in ui.configitems('extensions'): |
|
332 | for k, v in ui.configitems('extensions'): | |
372 | if k.endswith('keyword'): |
|
333 | if k.endswith('keyword'): | |
@@ -418,15 +379,11 def files(ui, repo, *pats, **opts): | |||||
418 | That is, files matched by [keyword] config patterns but not symlinks. |
|
379 | That is, files matched by [keyword] config patterns but not symlinks. | |
419 | ''' |
|
380 | ''' | |
420 | kwt = kwtools['templater'] |
|
381 | kwt = kwtools['templater'] | |
421 | status = _status(ui, repo, kwt, *pats, **opts) |
|
382 | status = _status(ui, repo, kwt, opts.get('untracked'), *pats, **opts) | |
422 | modified, added, removed, deleted, unknown, ignored, clean = status |
|
383 | modified, added, removed, deleted, unknown, ignored, clean = status | |
423 | files = modified + added + clean |
|
384 | files = util.sort(modified + added + clean + unknown) | |
424 | if opts.get('untracked'): |
|
385 | wctx = repo[None] | |
425 | files += unknown |
|
386 | kwfiles = [f for f in files if kwt.iskwfile(f, wctx.flags)] | |
426 | files.sort() |
|
|||
427 | wctx = repo.workingctx() |
|
|||
428 | islink = lambda p: 'l' in wctx.fileflags(p) |
|
|||
429 | kwfiles = [f for f in files if kwt.iskwfile(f, islink)] |
|
|||
430 | cwd = pats and repo.getcwd() or '' |
|
387 | cwd = pats and repo.getcwd() or '' | |
431 | kwfstats = not opts.get('ignore') and (('K', kwfiles),) or () |
|
388 | kwfstats = not opts.get('ignore') and (('K', kwfiles),) or () | |
432 | if opts.get('all') or opts.get('ignore'): |
|
389 | if opts.get('all') or opts.get('ignore'): | |
@@ -448,46 +405,57 def shrink(ui, repo, *pats, **opts): | |||||
448 | _kwfwrite(ui, repo, False, *pats, **opts) |
|
405 | _kwfwrite(ui, repo, False, *pats, **opts) | |
449 |
|
406 | |||
450 |
|
407 | |||
|
408 | def uisetup(ui): | |||
|
409 | '''Collects [keyword] config in kwtools. | |||
|
410 | Monkeypatches dispatch._parse if needed.''' | |||
|
411 | ||||
|
412 | for pat, opt in ui.configitems('keyword'): | |||
|
413 | if opt != 'ignore': | |||
|
414 | kwtools['inc'].append(pat) | |||
|
415 | else: | |||
|
416 | kwtools['exc'].append(pat) | |||
|
417 | ||||
|
418 | if kwtools['inc']: | |||
|
419 | def kwdispatch_parse(ui, args): | |||
|
420 | '''Monkeypatch dispatch._parse to obtain running hg command.''' | |||
|
421 | cmd, func, args, options, cmdoptions = dispatch_parse(ui, args) | |||
|
422 | kwtools['hgcmd'] = cmd | |||
|
423 | return cmd, func, args, options, cmdoptions | |||
|
424 | ||||
|
425 | dispatch_parse = dispatch._parse | |||
|
426 | dispatch._parse = kwdispatch_parse | |||
|
427 | ||||
451 | def reposetup(ui, repo): |
|
428 | def reposetup(ui, repo): | |
452 | '''Sets up repo as kwrepo for keyword substitution. |
|
429 | '''Sets up repo as kwrepo for keyword substitution. | |
453 | Overrides file method to return kwfilelog instead of filelog |
|
430 | Overrides file method to return kwfilelog instead of filelog | |
454 | if file matches user configuration. |
|
431 | if file matches user configuration. | |
455 | Wraps commit to overwrite configured files with updated |
|
432 | Wraps commit to overwrite configured files with updated | |
456 | keyword substitutions. |
|
433 | keyword substitutions. | |
457 | This is done for local repos only, and only if there are |
|
434 | Monkeypatches patch and webcommands.''' | |
458 | files configured at all for keyword substitution.''' |
|
|||
459 |
|
435 | |||
460 | try: |
|
436 | try: | |
461 |
if (not repo.local() or kwtools[' |
|
437 | if (not repo.local() or not kwtools['inc'] | |
|
438 | or kwtools['hgcmd'] in nokwcommands.split() | |||
462 | or '.hg' in util.splitpath(repo.root) |
|
439 | or '.hg' in util.splitpath(repo.root) | |
463 | or repo._url.startswith('bundle:')): |
|
440 | or repo._url.startswith('bundle:')): | |
464 | return |
|
441 | return | |
465 | except AttributeError: |
|
442 | except AttributeError: | |
466 | pass |
|
443 | pass | |
467 |
|
444 | |||
468 | inc, exc = [], ['.hg*'] |
|
445 | kwtools['templater'] = kwt = kwtemplater(ui, repo) | |
469 | for pat, opt in ui.configitems('keyword'): |
|
|||
470 | if opt != 'ignore': |
|
|||
471 | inc.append(pat) |
|
|||
472 | else: |
|
|||
473 | exc.append(pat) |
|
|||
474 | if not inc: |
|
|||
475 | return |
|
|||
476 |
|
||||
477 | kwtools['templater'] = kwt = kwtemplater(ui, repo, inc, exc) |
|
|||
478 |
|
446 | |||
479 | class kwrepo(repo.__class__): |
|
447 | class kwrepo(repo.__class__): | |
480 | def file(self, f): |
|
448 | def file(self, f): | |
481 | if f[0] == '/': |
|
449 | if f[0] == '/': | |
482 | f = f[1:] |
|
450 | f = f[1:] | |
483 | return kwfilelog(self.sopener, f) |
|
451 | return kwfilelog(self.sopener, kwt, f) | |
484 |
|
452 | |||
485 | def wread(self, filename): |
|
453 | def wread(self, filename): | |
486 | data = super(kwrepo, self).wread(filename) |
|
454 | data = super(kwrepo, self).wread(filename) | |
487 | return kwt.wread(filename, data) |
|
455 | return kwt.wread(filename, data) | |
488 |
|
456 | |||
489 | def commit(self, files=None, text='', user=None, date=None, |
|
457 | def commit(self, files=None, text='', user=None, date=None, | |
490 |
match= |
|
458 | match=None, force=False, force_editor=False, | |
491 | p1=None, p2=None, extra={}, empty_ok=False): |
|
459 | p1=None, p2=None, extra={}, empty_ok=False): | |
492 | wlock = lock = None |
|
460 | wlock = lock = None | |
493 | _p1 = _p2 = None |
|
461 | _p1 = _p2 = None | |
@@ -512,28 +480,66 def reposetup(ui, repo): | |||||
512 | else: |
|
480 | else: | |
513 | _p2 = hex(_p2) |
|
481 | _p2 = hex(_p2) | |
514 |
|
482 | |||
515 | node = super(kwrepo, |
|
483 | n = super(kwrepo, self).commit(files, text, user, date, match, | |
516 | self).commit(files=files, text=text, user=user, |
|
484 | force, force_editor, p1, p2, | |
517 |
|
|
485 | extra, empty_ok) | |
518 | force_editor=force_editor, |
|
|||
519 | p1=p1, p2=p2, extra=extra, |
|
|||
520 | empty_ok=empty_ok) |
|
|||
521 |
|
486 | |||
522 | # restore commit hooks |
|
487 | # restore commit hooks | |
523 | for name, cmd in commithooks.iteritems(): |
|
488 | for name, cmd in commithooks.iteritems(): | |
524 | ui.setconfig('hooks', name, cmd) |
|
489 | ui.setconfig('hooks', name, cmd) | |
525 |
if n |
|
490 | if n is not None: | |
526 |
kwt.overwrite(n |
|
491 | kwt.overwrite(n, True, None) | |
527 |
repo.hook('commit', node=n |
|
492 | repo.hook('commit', node=n, parent1=_p1, parent2=_p2) | |
528 |
return n |
|
493 | return n | |
529 | finally: |
|
494 | finally: | |
530 | del wlock, lock |
|
495 | del wlock, lock | |
531 |
|
496 | |||
|
497 | # monkeypatches | |||
|
498 | def kwpatchfile_init(self, ui, fname, missing=False): | |||
|
499 | '''Monkeypatch/wrap patch.patchfile.__init__ to avoid | |||
|
500 | rejects or conflicts due to expanded keywords in working dir.''' | |||
|
501 | patchfile_init(self, ui, fname, missing) | |||
|
502 | # shrink keywords read from working dir | |||
|
503 | self.lines = kwt.shrinklines(self.fname, self.lines) | |||
|
504 | ||||
|
505 | def kw_diff(repo, node1=None, node2=None, match=None, | |||
|
506 | fp=None, changes=None, opts=None): | |||
|
507 | '''Monkeypatch patch.diff to avoid expansion except when | |||
|
508 | comparing against working dir.''' | |||
|
509 | if node2 is not None: | |||
|
510 | kwt.matcher = util.never | |||
|
511 | elif node1 is not None and node1 != repo['.'].node(): | |||
|
512 | kwt.restrict = True | |||
|
513 | patch_diff(repo, node1, node2, match, fp, changes, opts) | |||
|
514 | ||||
|
515 | def kwweb_annotate(web, req, tmpl): | |||
|
516 | '''Wraps webcommands.annotate turning off keyword expansion.''' | |||
|
517 | kwt.matcher = util.never | |||
|
518 | return webcommands_annotate(web, req, tmpl) | |||
|
519 | ||||
|
520 | def kwweb_changeset(web, req, tmpl): | |||
|
521 | '''Wraps webcommands.changeset turning off keyword expansion.''' | |||
|
522 | kwt.matcher = util.never | |||
|
523 | return webcommands_changeset(web, req, tmpl) | |||
|
524 | ||||
|
525 | def kwweb_filediff(web, req, tmpl): | |||
|
526 | '''Wraps webcommands.filediff turning off keyword expansion.''' | |||
|
527 | kwt.matcher = util.never | |||
|
528 | return webcommands_filediff(web, req, tmpl) | |||
|
529 | ||||
532 | repo.__class__ = kwrepo |
|
530 | repo.__class__ = kwrepo | |
533 | patch.patchfile.__init__ = _kwpatchfile_init |
|
531 | ||
534 | patch.diff = _kw_diff |
|
532 | patchfile_init = patch.patchfile.__init__ | |
535 | webcommands.changeset = webcommands.rev = _kwweb_changeset |
|
533 | patch_diff = patch.diff | |
536 | webcommands.filediff = webcommands.diff = _kwweb_filediff |
|
534 | webcommands_annotate = webcommands.annotate | |
|
535 | webcommands_changeset = webcommands.changeset | |||
|
536 | webcommands_filediff = webcommands.filediff | |||
|
537 | ||||
|
538 | patch.patchfile.__init__ = kwpatchfile_init | |||
|
539 | patch.diff = kw_diff | |||
|
540 | webcommands.annotate = kwweb_annotate | |||
|
541 | webcommands.changeset = webcommands.rev = kwweb_changeset | |||
|
542 | webcommands.filediff = webcommands.diff = kwweb_filediff | |||
537 |
|
543 | |||
538 |
|
544 | |||
539 | cmdtable = { |
|
545 | cmdtable = { |
@@ -143,8 +143,7 class queue: | |||||
143 | bad = self.check_guard(guard) |
|
143 | bad = self.check_guard(guard) | |
144 | if bad: |
|
144 | if bad: | |
145 | raise util.Abort(bad) |
|
145 | raise util.Abort(bad) | |
146 |
guards = |
|
146 | guards = util.sort(util.unique(guards)) | |
147 | guards.sort() |
|
|||
148 | self.ui.debug('active guards: %s\n' % ' '.join(guards)) |
|
147 | self.ui.debug('active guards: %s\n' % ' '.join(guards)) | |
149 | self.active_guards = guards |
|
148 | self.active_guards = guards | |
150 | self.guards_dirty = True |
|
149 | self.guards_dirty = True | |
@@ -322,10 +321,8 class queue: | |||||
322 |
|
321 | |||
323 | def printdiff(self, repo, node1, node2=None, files=None, |
|
322 | def printdiff(self, repo, node1, node2=None, files=None, | |
324 | fp=None, changes=None, opts={}): |
|
323 | fp=None, changes=None, opts={}): | |
325 |
|
|
324 | m = cmdutil.match(repo, files, opts) | |
326 |
|
325 | patch.diff(repo, node1, node2, m, fp, changes, self.diffopts()) | ||
327 | patch.diff(repo, node1, node2, fns, match=matchfn, |
|
|||
328 | fp=fp, changes=changes, opts=self.diffopts()) |
|
|||
329 |
|
326 | |||
330 | def mergeone(self, repo, mergeq, head, patch, rev): |
|
327 | def mergeone(self, repo, mergeq, head, patch, rev): | |
331 | # first try just applying the patch |
|
328 | # first try just applying the patch | |
@@ -344,7 +341,7 class queue: | |||||
344 | hg.clean(repo, head) |
|
341 | hg.clean(repo, head) | |
345 | self.strip(repo, n, update=False, backup='strip') |
|
342 | self.strip(repo, n, update=False, backup='strip') | |
346 |
|
343 | |||
347 |
ctx = repo |
|
344 | ctx = repo[rev] | |
348 | ret = hg.merge(repo, rev) |
|
345 | ret = hg.merge(repo, rev) | |
349 | if ret: |
|
346 | if ret: | |
350 | raise util.Abort(_("update returned %d") % ret) |
|
347 | raise util.Abort(_("update returned %d") % ret) | |
@@ -510,8 +507,10 class queue: | |||||
510 | repo.dirstate.merge(f) |
|
507 | repo.dirstate.merge(f) | |
511 | p1, p2 = repo.dirstate.parents() |
|
508 | p1, p2 = repo.dirstate.parents() | |
512 | repo.dirstate.setparents(p1, merge) |
|
509 | repo.dirstate.setparents(p1, merge) | |
|
510 | ||||
513 | files = patch.updatedir(self.ui, repo, files) |
|
511 | files = patch.updatedir(self.ui, repo, files) | |
514 | n = repo.commit(files, message, user, date, match=util.never, |
|
512 | match = cmdutil.matchfiles(repo, files or []) | |
|
513 | n = repo.commit(files, message, user, date, match=match, | |||
515 | force=True) |
|
514 | force=True) | |
516 |
|
515 | |||
517 | if n == None: |
|
516 | if n == None: | |
@@ -535,6 +534,40 class queue: | |||||
535 | break |
|
534 | break | |
536 | return (err, n) |
|
535 | return (err, n) | |
537 |
|
536 | |||
|
537 | def _clean_series(self, patches): | |||
|
538 | indices = util.sort([self.find_series(p) for p in patches]) | |||
|
539 | for i in indices[-1::-1]: | |||
|
540 | del self.full_series[i] | |||
|
541 | self.parse_series() | |||
|
542 | self.series_dirty = 1 | |||
|
543 | ||||
|
544 | def finish(self, repo, revs): | |||
|
545 | revs.sort() | |||
|
546 | firstrev = repo[self.applied[0].rev].rev() | |||
|
547 | appliedbase = 0 | |||
|
548 | patches = [] | |||
|
549 | for rev in util.sort(revs): | |||
|
550 | if rev < firstrev: | |||
|
551 | raise util.Abort(_('revision %d is not managed') % rev) | |||
|
552 | base = revlog.bin(self.applied[appliedbase].rev) | |||
|
553 | node = repo.changelog.node(rev) | |||
|
554 | if node != base: | |||
|
555 | raise util.Abort(_('cannot delete revision %d above ' | |||
|
556 | 'applied patches') % rev) | |||
|
557 | patches.append(self.applied[appliedbase].name) | |||
|
558 | appliedbase += 1 | |||
|
559 | ||||
|
560 | r = self.qrepo() | |||
|
561 | if r: | |||
|
562 | r.remove(patches, True) | |||
|
563 | else: | |||
|
564 | for p in patches: | |||
|
565 | os.unlink(self.join(p)) | |||
|
566 | ||||
|
567 | del self.applied[:appliedbase] | |||
|
568 | self.applied_dirty = 1 | |||
|
569 | self._clean_series(patches) | |||
|
570 | ||||
538 | def delete(self, repo, patches, opts): |
|
571 | def delete(self, repo, patches, opts): | |
539 | if not patches and not opts.get('rev'): |
|
572 | if not patches and not opts.get('rev'): | |
540 | raise util.Abort(_('qdelete requires at least one revision or ' |
|
573 | raise util.Abort(_('qdelete requires at least one revision or ' | |
@@ -580,12 +613,7 class queue: | |||||
580 | if appliedbase: |
|
613 | if appliedbase: | |
581 | del self.applied[:appliedbase] |
|
614 | del self.applied[:appliedbase] | |
582 | self.applied_dirty = 1 |
|
615 | self.applied_dirty = 1 | |
583 |
|
|
616 | self._clean_series(realpatches) | |
584 | indices.sort() |
|
|||
585 | for i in indices[-1::-1]: |
|
|||
586 | del self.full_series[i] |
|
|||
587 | self.parse_series() |
|
|||
588 | self.series_dirty = 1 |
|
|||
589 |
|
617 | |||
590 | def check_toppatch(self, repo): |
|
618 | def check_toppatch(self, repo): | |
591 | if len(self.applied) > 0: |
|
619 | if len(self.applied) > 0: | |
@@ -623,11 +651,11 class queue: | |||||
623 | if os.path.exists(self.join(patch)): |
|
651 | if os.path.exists(self.join(patch)): | |
624 | raise util.Abort(_('patch "%s" already exists') % patch) |
|
652 | raise util.Abort(_('patch "%s" already exists') % patch) | |
625 | if opts.get('include') or opts.get('exclude') or pats: |
|
653 | if opts.get('include') or opts.get('exclude') or pats: | |
626 |
|
|
654 | match = cmdutil.match(repo, pats, opts) | |
627 |
m, a, r, d = repo.status( |
|
655 | m, a, r, d = repo.status(match=match)[:4] | |
628 | else: |
|
656 | else: | |
629 | m, a, r, d = self.check_localchanges(repo, force) |
|
657 | m, a, r, d = self.check_localchanges(repo, force) | |
630 |
|
|
658 | match = cmdutil.match(repo, m + a + r) | |
631 | commitfiles = m + a + r |
|
659 | commitfiles = m + a + r | |
632 | self.check_toppatch(repo) |
|
660 | self.check_toppatch(repo) | |
633 | wlock = repo.wlock() |
|
661 | wlock = repo.wlock() | |
@@ -665,14 +693,14 class queue: | |||||
665 | finally: |
|
693 | finally: | |
666 | del wlock |
|
694 | del wlock | |
667 |
|
695 | |||
668 | def strip(self, repo, rev, update=True, backup="all"): |
|
696 | def strip(self, repo, rev, update=True, backup="all", force=None): | |
669 | wlock = lock = None |
|
697 | wlock = lock = None | |
670 | try: |
|
698 | try: | |
671 | wlock = repo.wlock() |
|
699 | wlock = repo.wlock() | |
672 | lock = repo.lock() |
|
700 | lock = repo.lock() | |
673 |
|
701 | |||
674 | if update: |
|
702 | if update: | |
675 | self.check_localchanges(repo, refresh=False) |
|
703 | self.check_localchanges(repo, force=force, refresh=False) | |
676 | urev = self.qparents(repo, rev) |
|
704 | urev = self.qparents(repo, rev) | |
677 | hg.clean(repo, urev) |
|
705 | hg.clean(repo, urev) | |
678 | repo.dirstate.write() |
|
706 | repo.dirstate.write() | |
@@ -822,7 +850,7 class queue: | |||||
822 | self.ui.warn(_('cleaning up working directory...')) |
|
850 | self.ui.warn(_('cleaning up working directory...')) | |
823 | node = repo.dirstate.parents()[0] |
|
851 | node = repo.dirstate.parents()[0] | |
824 | hg.revert(repo, node, None) |
|
852 | hg.revert(repo, node, None) | |
825 | unknown = repo.status()[4] |
|
853 | unknown = repo.status(unknown=True)[4] | |
826 | # only remove unknown files that we know we touched or |
|
854 | # only remove unknown files that we know we touched or | |
827 | # created while patching |
|
855 | # created while patching | |
828 | for f in unknown: |
|
856 | for f in unknown: | |
@@ -903,7 +931,7 class queue: | |||||
903 | qp = self.qparents(repo, rev) |
|
931 | qp = self.qparents(repo, rev) | |
904 | changes = repo.changelog.read(qp) |
|
932 | changes = repo.changelog.read(qp) | |
905 | mmap = repo.manifest.read(changes[0]) |
|
933 | mmap = repo.manifest.read(changes[0]) | |
906 |
m, a, r, d |
|
934 | m, a, r, d = repo.status(qp, top)[:4] | |
907 | if d: |
|
935 | if d: | |
908 | raise util.Abort("deletions found between repo revs") |
|
936 | raise util.Abort("deletions found between repo revs") | |
909 | for f in m: |
|
937 | for f in m: | |
@@ -937,10 +965,7 class queue: | |||||
937 | self.ui.write("No patches applied\n") |
|
965 | self.ui.write("No patches applied\n") | |
938 | return |
|
966 | return | |
939 | qp = self.qparents(repo, top) |
|
967 | qp = self.qparents(repo, top) | |
940 | if opts.get('git'): |
|
968 | self._diffopts = patch.diffopts(self.ui, opts) | |
941 | self.diffopts().git = True |
|
|||
942 | if opts.get('unified') is not None: |
|
|||
943 | self.diffopts().context = opts['unified'] |
|
|||
944 | self.printdiff(repo, qp, files=pats, opts=opts) |
|
969 | self.printdiff(repo, qp, files=pats, opts=opts) | |
945 |
|
970 | |||
946 | def refresh(self, repo, pats=None, **opts): |
|
971 | def refresh(self, repo, pats=None, **opts): | |
@@ -1026,7 +1051,7 class queue: | |||||
1026 |
|
1051 | |||
1027 | if opts.get('git'): |
|
1052 | if opts.get('git'): | |
1028 | self.diffopts().git = True |
|
1053 | self.diffopts().git = True | |
1029 |
|
|
1054 | matchfn = cmdutil.match(repo, pats, opts) | |
1030 | tip = repo.changelog.tip() |
|
1055 | tip = repo.changelog.tip() | |
1031 | if top == tip: |
|
1056 | if top == tip: | |
1032 | # if the top of our patch queue is also the tip, there is an |
|
1057 | # if the top of our patch queue is also the tip, there is an | |
@@ -1039,21 +1064,19 class queue: | |||||
1039 | # patch already |
|
1064 | # patch already | |
1040 | # |
|
1065 | # | |
1041 | # this should really read: |
|
1066 | # this should really read: | |
1042 |
# mm, dd, aa, aa2 |
|
1067 | # mm, dd, aa, aa2 = repo.status(tip, patchparent)[:4] | |
1043 | # but we do it backwards to take advantage of manifest/chlog |
|
1068 | # but we do it backwards to take advantage of manifest/chlog | |
1044 | # caching against the next repo.status call |
|
1069 | # caching against the next repo.status call | |
1045 | # |
|
1070 | # | |
1046 |
mm, aa, dd, aa2 |
|
1071 | mm, aa, dd, aa2 = repo.status(patchparent, tip)[:4] | |
1047 | changes = repo.changelog.read(tip) |
|
1072 | changes = repo.changelog.read(tip) | |
1048 | man = repo.manifest.read(changes[0]) |
|
1073 | man = repo.manifest.read(changes[0]) | |
1049 | aaa = aa[:] |
|
1074 | aaa = aa[:] | |
1050 | if opts.get('short'): |
|
1075 | if opts.get('short'): | |
1051 |
|
|
1076 | match = cmdutil.matchfiles(repo, mm + aa + dd) | |
1052 | match = dict.fromkeys(filelist).__contains__ |
|
|||
1053 | else: |
|
1077 | else: | |
1054 | filelist = None |
|
1078 | match = cmdutil.matchall(repo) | |
1055 | match = util.always |
|
1079 | m, a, r, d = repo.status(match=match)[:4] | |
1056 | m, a, r, d, u = repo.status(files=filelist, match=match)[:5] |
|
|||
1057 |
|
1080 | |||
1058 | # we might end up with files that were added between |
|
1081 | # we might end up with files that were added between | |
1059 | # tip and the dirstate parent, but then changed in the |
|
1082 | # tip and the dirstate parent, but then changed in the | |
@@ -1086,9 +1109,9 class queue: | |||||
1086 | m = util.unique(mm) |
|
1109 | m = util.unique(mm) | |
1087 | r = util.unique(dd) |
|
1110 | r = util.unique(dd) | |
1088 | a = util.unique(aa) |
|
1111 | a = util.unique(aa) | |
1089 |
c = [filter(matchfn, l) for l in (m, a, r |
|
1112 | c = [filter(matchfn, l) for l in (m, a, r)] | |
1090 |
|
|
1113 | match = cmdutil.matchfiles(repo, util.unique(c[0] + c[1] + c[2])) | |
1091 |
patch.diff(repo, patchparent, |
|
1114 | patch.diff(repo, patchparent, match=match, | |
1092 | fp=patchf, changes=c, opts=self.diffopts()) |
|
1115 | fp=patchf, changes=c, opts=self.diffopts()) | |
1093 | patchf.close() |
|
1116 | patchf.close() | |
1094 |
|
1117 | |||
@@ -1146,7 +1169,7 class queue: | |||||
1146 | self.applied_dirty = 1 |
|
1169 | self.applied_dirty = 1 | |
1147 | self.strip(repo, top, update=False, |
|
1170 | self.strip(repo, top, update=False, | |
1148 | backup='strip') |
|
1171 | backup='strip') | |
1149 |
n = repo.commit( |
|
1172 | n = repo.commit(match.files(), message, user, date, match=match, | |
1150 | force=1) |
|
1173 | force=1) | |
1151 | self.applied.append(statusentry(revlog.hex(n), patchfn)) |
|
1174 | self.applied.append(statusentry(revlog.hex(n), patchfn)) | |
1152 | self.removeundo(repo) |
|
1175 | self.removeundo(repo) | |
@@ -1236,8 +1259,7 class queue: | |||||
1236 | self.guards_path) |
|
1259 | self.guards_path) | |
1237 | and not fl.startswith('.')): |
|
1260 | and not fl.startswith('.')): | |
1238 | msng_list.append(fl) |
|
1261 | msng_list.append(fl) | |
1239 |
|
|
1262 | for x in util.sort(msng_list): | |
1240 | for x in msng_list: |
|
|||
1241 | pfx = self.ui.verbose and ('D ') or '' |
|
1263 | pfx = self.ui.verbose and ('D ') or '' | |
1242 | self.ui.write("%s%s\n" % (pfx, displayname(x))) |
|
1264 | self.ui.write("%s%s\n" % (pfx, displayname(x))) | |
1243 |
|
1265 | |||
@@ -1499,9 +1521,8 def delete(ui, repo, *patches, **opts): | |||||
1499 | the --rev parameter. At least one patch or revision is required. |
|
1521 | the --rev parameter. At least one patch or revision is required. | |
1500 |
|
1522 | |||
1501 | With --rev, mq will stop managing the named revisions (converting |
|
1523 | With --rev, mq will stop managing the named revisions (converting | |
1502 |
them to regular mercurial changesets). The |
|
1524 | them to regular mercurial changesets). The qfinish command should be | |
1503 | and at the base of the stack. This option is useful when the patches |
|
1525 | used as an alternative for qdel -r, as the latter option is deprecated. | |
1504 | have been applied upstream. |
|
|||
1505 |
|
1526 | |||
1506 | With --keep, the patch files are preserved in the patch directory.""" |
|
1527 | With --keep, the patch files are preserved in the patch directory.""" | |
1507 | q = repo.mq |
|
1528 | q = repo.mq | |
@@ -2086,7 +2107,7 def strip(ui, repo, rev, **opts): | |||||
2086 | elif rev not in (cl.ancestor(p[0], rev), cl.ancestor(p[1], rev)): |
|
2107 | elif rev not in (cl.ancestor(p[0], rev), cl.ancestor(p[1], rev)): | |
2087 | update = False |
|
2108 | update = False | |
2088 |
|
2109 | |||
2089 | repo.mq.strip(repo, rev, backup=backup, update=update) |
|
2110 | repo.mq.strip(repo, rev, backup=backup, update=update, force=opts['force']) | |
2090 | return 0 |
|
2111 | return 0 | |
2091 |
|
2112 | |||
2092 | def select(ui, repo, *args, **opts): |
|
2113 | def select(ui, repo, *args, **opts): | |
@@ -2191,6 +2212,34 def select(ui, repo, *args, **opts): | |||||
2191 | finally: |
|
2212 | finally: | |
2192 | q.save_dirty() |
|
2213 | q.save_dirty() | |
2193 |
|
2214 | |||
|
2215 | def finish(ui, repo, *revrange, **opts): | |||
|
2216 | """move applied patches into repository history | |||
|
2217 | ||||
|
2218 | Finishes the specified revisions (corresponding to applied patches) by | |||
|
2219 | moving them out of mq control into regular repository history. | |||
|
2220 | ||||
|
2221 | Accepts a revision range or the --all option. If --all is specified, all | |||
|
2222 | applied mq revisions are removed from mq control. Otherwise, the given | |||
|
2223 | revisions must be at the base of the stack of applied patches. | |||
|
2224 | ||||
|
2225 | This can be especially useful if your changes have been applied to an | |||
|
2226 | upstream repository, or if you are about to push your changes to upstream. | |||
|
2227 | """ | |||
|
2228 | if not opts['applied'] and not revrange: | |||
|
2229 | raise util.Abort(_('no revisions specified')) | |||
|
2230 | elif opts['applied']: | |||
|
2231 | revrange = ('qbase:qtip',) + revrange | |||
|
2232 | ||||
|
2233 | q = repo.mq | |||
|
2234 | if not q.applied: | |||
|
2235 | ui.status(_('no patches applied\n')) | |||
|
2236 | return 0 | |||
|
2237 | ||||
|
2238 | revs = cmdutil.revrange(repo, revrange) | |||
|
2239 | q.finish(repo, revs) | |||
|
2240 | q.save_dirty() | |||
|
2241 | return 0 | |||
|
2242 | ||||
2194 | def reposetup(ui, repo): |
|
2243 | def reposetup(ui, repo): | |
2195 | class mqrepo(repo.__class__): |
|
2244 | class mqrepo(repo.__class__): | |
2196 | def abort_if_wdir_patched(self, errmsg, force=False): |
|
2245 | def abort_if_wdir_patched(self, errmsg, force=False): | |
@@ -2267,7 +2316,7 def reposetup(ui, repo): | |||||
2267 | # we might as well use it, but we won't save it. |
|
2316 | # we might as well use it, but we won't save it. | |
2268 |
|
2317 | |||
2269 | # update the cache up to the tip |
|
2318 | # update the cache up to the tip | |
2270 |
self._updatebranchcache(partial, start, cl |
|
2319 | self._updatebranchcache(partial, start, len(cl)) | |
2271 |
|
2320 | |||
2272 | return partial |
|
2321 | return partial | |
2273 |
|
2322 | |||
@@ -2300,10 +2349,8 cmdtable = { | |||||
2300 | _('hg qcommit [OPTION]... [FILE]...')), |
|
2349 | _('hg qcommit [OPTION]... [FILE]...')), | |
2301 | "^qdiff": |
|
2350 | "^qdiff": | |
2302 | (diff, |
|
2351 | (diff, | |
2303 | [('g', 'git', None, _('use git extended diff format')), |
|
2352 | commands.diffopts + commands.diffopts2 + commands.walkopts, | |
2304 | ('U', 'unified', 3, _('number of lines of context to show')), |
|
2353 | _('hg qdiff [OPTION]... [FILE]...')), | |
2305 | ] + commands.walkopts, |
|
|||
2306 | _('hg qdiff [-I] [-X] [-U NUM] [-g] [FILE]...')), |
|
|||
2307 | "qdelete|qremove|qrm": |
|
2354 | "qdelete|qremove|qrm": | |
2308 | (delete, |
|
2355 | (delete, | |
2309 | [('k', 'keep', None, _('keep patch file')), |
|
2356 | [('k', 'keep', None, _('keep patch file')), | |
@@ -2395,9 +2442,14 cmdtable = { | |||||
2395 | _('hg qseries [-ms]')), |
|
2442 | _('hg qseries [-ms]')), | |
2396 | "^strip": |
|
2443 | "^strip": | |
2397 | (strip, |
|
2444 | (strip, | |
2398 |
[(' |
|
2445 | [('f', 'force', None, _('force removal with local changes')), | |
|
2446 | ('b', 'backup', None, _('bundle unrelated changesets')), | |||
2399 | ('n', 'nobackup', None, _('no backups'))], |
|
2447 | ('n', 'nobackup', None, _('no backups'))], | |
2400 | _('hg strip [-f] [-b] [-n] REV')), |
|
2448 | _('hg strip [-f] [-b] [-n] REV')), | |
2401 | "qtop": (top, [] + seriesopts, _('hg qtop [-s]')), |
|
2449 | "qtop": (top, [] + seriesopts, _('hg qtop [-s]')), | |
2402 | "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')), |
|
2450 | "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')), | |
|
2451 | "qfinish": | |||
|
2452 | (finish, | |||
|
2453 | [('a', 'applied', None, _('finish all applied changesets'))], | |||
|
2454 | _('hg qfinish [-a] [REV...]')), | |||
2403 | } |
|
2455 | } |
@@ -156,9 +156,7 class notifier(object): | |||||
156 | if fnmatch.fnmatch(self.repo.root, pat): |
|
156 | if fnmatch.fnmatch(self.repo.root, pat): | |
157 | for user in users.split(','): |
|
157 | for user in users.split(','): | |
158 | subs[self.fixmail(user)] = 1 |
|
158 | subs[self.fixmail(user)] = 1 | |
159 | subs = subs.keys() |
|
159 | return util.sort(subs) | |
160 | subs.sort() |
|
|||
161 | return subs |
|
|||
162 |
|
160 | |||
163 | def url(self, path=None): |
|
161 | def url(self, path=None): | |
164 | return self.ui.config('web', 'baseurl') + (path or self.root) |
|
162 | return self.ui.config('web', 'baseurl') + (path or self.root) | |
@@ -269,11 +267,11 def hook(ui, repo, hooktype, node=None, | |||||
269 | node = bin(node) |
|
267 | node = bin(node) | |
270 | ui.pushbuffer() |
|
268 | ui.pushbuffer() | |
271 | if hooktype == 'changegroup': |
|
269 | if hooktype == 'changegroup': | |
272 |
start = repo |
|
270 | start = repo[node].rev() | |
273 |
end = repo |
|
271 | end = len(repo) | |
274 | count = end - start |
|
272 | count = end - start | |
275 | for rev in xrange(start, end): |
|
273 | for rev in xrange(start, end): | |
276 |
n.node(repo. |
|
274 | n.node(repo[node].rev()) | |
277 | n.diff(node, repo.changelog.tip()) |
|
275 | n.diff(node, repo.changelog.tip()) | |
278 | else: |
|
276 | else: | |
279 | count = 1 |
|
277 | count = 1 |
@@ -10,26 +10,56 | |||||
10 | # [extension] |
|
10 | # [extension] | |
11 | # hgext.pager = |
|
11 | # hgext.pager = | |
12 | # |
|
12 | # | |
13 | # To set the pager that should be used, set the application variable: |
|
13 | # Run "hg help pager" to get info on configuration. | |
14 | # |
|
14 | ||
15 | # [pager] |
|
15 | '''browse command output with external pager | |
16 | # pager = LESS='FSRX' less |
|
16 | ||
17 | # |
|
17 | To set the pager that should be used, set the application variable: | |
18 | # If no pager is set, the pager extensions uses the environment |
|
18 | ||
19 | # variable $PAGER. If neither pager.pager, nor $PAGER is set, no pager |
|
19 | [pager] | |
20 | # is used. |
|
20 | pager = LESS='FSRX' less | |
21 | # |
|
21 | ||
22 | # If you notice "BROKEN PIPE" error messages, you can disable them |
|
22 | If no pager is set, the pager extensions uses the environment | |
23 | # by setting: |
|
23 | variable $PAGER. If neither pager.pager, nor $PAGER is set, no pager | |
24 | # |
|
24 | is used. | |
25 | # [pager] |
|
25 | ||
26 | # quiet = True |
|
26 | If you notice "BROKEN PIPE" error messages, you can disable them | |
|
27 | by setting: | |||
|
28 | ||||
|
29 | [pager] | |||
|
30 | quiet = True | |||
|
31 | ||||
|
32 | You can disable the pager for certain commands by adding them to the | |||
|
33 | pager.ignore list: | |||
|
34 | ||||
|
35 | [pager] | |||
|
36 | ignore = version, help, update | |||
|
37 | ||||
|
38 | You can also enable the pager only for certain commands using pager.attend: | |||
|
39 | ||||
|
40 | [pager] | |||
|
41 | attend = log | |||
|
42 | ||||
|
43 | If pager.attend is present, pager.ignore will be ignored. | |||
|
44 | ||||
|
45 | To ignore global commands like "hg version" or "hg help", you have to specify | |||
|
46 | them in the global .hgrc | |||
|
47 | ''' | |||
27 |
|
48 | |||
28 | import sys, os, signal |
|
49 | import sys, os, signal | |
|
50 | from mercurial import dispatch, util | |||
29 |
|
51 | |||
30 | def uisetup(ui): |
|
52 | def uisetup(ui): | |
|
53 | def pagecmd(ui, options, cmd, cmdfunc): | |||
31 | p = ui.config("pager", "pager", os.environ.get("PAGER")) |
|
54 | p = ui.config("pager", "pager", os.environ.get("PAGER")) | |
32 | if p and sys.stdout.isatty() and '--debugger' not in sys.argv: |
|
55 | if p and sys.stdout.isatty() and '--debugger' not in sys.argv: | |
|
56 | attend = ui.configlist('pager', 'attend') | |||
|
57 | if (cmd in attend or | |||
|
58 | (cmd not in ui.configlist('pager', 'ignore') and not attend)): | |||
|
59 | sys.stderr = sys.stdout = util.popen(p, "wb") | |||
33 | if ui.configbool('pager', 'quiet'): |
|
60 | if ui.configbool('pager', 'quiet'): | |
34 | signal.signal(signal.SIGPIPE, signal.SIG_DFL) |
|
61 | signal.signal(signal.SIGPIPE, signal.SIG_DFL) | |
35 | sys.stderr = sys.stdout = os.popen(p, "wb") |
|
62 | return oldrun(ui, options, cmd, cmdfunc) | |
|
63 | ||||
|
64 | oldrun = dispatch._runcommand | |||
|
65 | dispatch._runcommand = pagecmd |
@@ -1,72 +1,69 | |||||
1 |
|
|
1 | '''sending Mercurial changesets as a series of patch emails | |
2 | # of patch emails. |
|
2 | ||
3 | # |
|
3 | The series is started off with a "[PATCH 0 of N]" introduction, | |
4 | # The series is started off with a "[PATCH 0 of N]" introduction, |
|
4 | which describes the series as a whole. | |
5 | # which describes the series as a whole. |
|
5 | ||
6 | # |
|
6 | Each patch email has a Subject line of "[PATCH M of N] ...", using | |
7 | # Each patch email has a Subject line of "[PATCH M of N] ...", using |
|
7 | the first line of the changeset description as the subject text. | |
8 | # the first line of the changeset description as the subject text. |
|
8 | The message contains two or three body parts: | |
9 | # The message contains two or three body parts: |
|
9 | ||
10 | # |
|
10 | The remainder of the changeset description. | |
11 | # The remainder of the changeset description. |
|
11 | ||
12 | # |
|
12 | [Optional] If the diffstat program is installed, the result of | |
13 | # [Optional] If the diffstat program is installed, the result of |
|
13 | running diffstat on the patch. | |
14 | # running diffstat on the patch. |
|
14 | ||
15 | # |
|
15 | The patch itself, as generated by "hg export". | |
16 | # The patch itself, as generated by "hg export". |
|
16 | ||
17 | # |
|
17 | Each message refers to all of its predecessors using the In-Reply-To | |
18 | # Each message refers to all of its predecessors using the In-Reply-To |
|
18 | and References headers, so they will show up as a sequence in | |
19 | # and References headers, so they will show up as a sequence in |
|
19 | threaded mail and news readers, and in mail archives. | |
20 | # threaded mail and news readers, and in mail archives. |
|
20 | ||
21 | # |
|
21 | For each changeset, you will be prompted with a diffstat summary and | |
22 | # For each changeset, you will be prompted with a diffstat summary and |
|
22 | the changeset summary, so you can be sure you are sending the right changes. | |
23 | # the changeset summary, so you can be sure you are sending the right |
|
23 | ||
24 | # changes. |
|
24 | To enable this extension: | |
25 | # |
|
25 | ||
26 | # To enable this extension: |
|
26 | [extensions] | |
27 | # |
|
27 | hgext.patchbomb = | |
28 | # [extensions] |
|
28 | ||
29 | # hgext.patchbomb = |
|
29 | To configure other defaults, add a section like this to your hgrc file: | |
30 | # |
|
|||
31 | # To configure other defaults, add a section like this to your hgrc |
|
|||
32 | # file: |
|
|||
33 | # |
|
|||
34 | # [email] |
|
|||
35 | # from = My Name <my@email> |
|
|||
36 | # to = recipient1, recipient2, ... |
|
|||
37 | # cc = cc1, cc2, ... |
|
|||
38 | # bcc = bcc1, bcc2, ... |
|
|||
39 | # |
|
|||
40 | # Then you can use the "hg email" command to mail a series of changesets |
|
|||
41 | # as a patchbomb. |
|
|||
42 | # |
|
|||
43 | # To avoid sending patches prematurely, it is a good idea to first run |
|
|||
44 | # the "email" command with the "-n" option (test only). You will be |
|
|||
45 | # prompted for an email recipient address, a subject an an introductory |
|
|||
46 | # message describing the patches of your patchbomb. Then when all is |
|
|||
47 | # done, patchbomb messages are displayed. If PAGER environment variable |
|
|||
48 | # is set, your pager will be fired up once for each patchbomb message, so |
|
|||
49 | # you can verify everything is alright. |
|
|||
50 | # |
|
|||
51 | # The "-m" (mbox) option is also very useful. Instead of previewing |
|
|||
52 | # each patchbomb message in a pager or sending the messages directly, |
|
|||
53 | # it will create a UNIX mailbox file with the patch emails. This |
|
|||
54 | # mailbox file can be previewed with any mail user agent which supports |
|
|||
55 | # UNIX mbox files, i.e. with mutt: |
|
|||
56 | # |
|
|||
57 | # % mutt -R -f mbox |
|
|||
58 | # |
|
|||
59 | # When you are previewing the patchbomb messages, you can use `formail' |
|
|||
60 | # (a utility that is commonly installed as part of the procmail package), |
|
|||
61 | # to send each message out: |
|
|||
62 | # |
|
|||
63 | # % formail -s sendmail -bm -t < mbox |
|
|||
64 | # |
|
|||
65 | # That should be all. Now your patchbomb is on its way out. |
|
|||
66 |
|
|
30 | ||
67 | import os, errno, socket, tempfile |
|
31 | [email] | |
|
32 | from = My Name <my@email> | |||
|
33 | to = recipient1, recipient2, ... | |||
|
34 | cc = cc1, cc2, ... | |||
|
35 | bcc = bcc1, bcc2, ... | |||
|
36 | ||||
|
37 | Then you can use the "hg email" command to mail a series of changesets | |||
|
38 | as a patchbomb. | |||
|
39 | ||||
|
40 | To avoid sending patches prematurely, it is a good idea to first run | |||
|
41 | the "email" command with the "-n" option (test only). You will be | |||
|
42 | prompted for an email recipient address, a subject an an introductory | |||
|
43 | message describing the patches of your patchbomb. Then when all is | |||
|
44 | done, patchbomb messages are displayed. If PAGER environment variable | |||
|
45 | is set, your pager will be fired up once for each patchbomb message, so | |||
|
46 | you can verify everything is alright. | |||
|
47 | ||||
|
48 | The "-m" (mbox) option is also very useful. Instead of previewing | |||
|
49 | each patchbomb message in a pager or sending the messages directly, | |||
|
50 | it will create a UNIX mailbox file with the patch emails. This | |||
|
51 | mailbox file can be previewed with any mail user agent which supports | |||
|
52 | UNIX mbox files, i.e. with mutt: | |||
|
53 | ||||
|
54 | % mutt -R -f mbox | |||
|
55 | ||||
|
56 | When you are previewing the patchbomb messages, you can use `formail' | |||
|
57 | (a utility that is commonly installed as part of the procmail package), | |||
|
58 | to send each message out: | |||
|
59 | ||||
|
60 | % formail -s sendmail -bm -t < mbox | |||
|
61 | ||||
|
62 | That should be all. Now your patchbomb is on its way out.''' | |||
|
63 | ||||
|
64 | import os, errno, socket, tempfile, cStringIO | |||
68 | import email.MIMEMultipart, email.MIMEText, email.MIMEBase |
|
65 | import email.MIMEMultipart, email.MIMEText, email.MIMEBase | |
69 | import email.Utils, email.Encoders |
|
66 | import email.Utils, email.Encoders, email.Generator | |
70 | from mercurial import cmdutil, commands, hg, mail, patch, util |
|
67 | from mercurial import cmdutil, commands, hg, mail, patch, util | |
71 | from mercurial.i18n import _ |
|
68 | from mercurial.i18n import _ | |
72 | from mercurial.node import bin |
|
69 | from mercurial.node import bin | |
@@ -404,11 +401,12 def patchbomb(ui, repo, *revs, **opts): | |||||
404 | ui.status('Displaying ', m['Subject'], ' ...\n') |
|
401 | ui.status('Displaying ', m['Subject'], ' ...\n') | |
405 | ui.flush() |
|
402 | ui.flush() | |
406 | if 'PAGER' in os.environ: |
|
403 | if 'PAGER' in os.environ: | |
407 |
fp = |
|
404 | fp = util.popen(os.environ['PAGER'], 'w') | |
408 | else: |
|
405 | else: | |
409 | fp = ui |
|
406 | fp = ui | |
|
407 | generator = email.Generator.Generator(fp, mangle_from_=False) | |||
410 | try: |
|
408 | try: | |
411 | fp.write(m.as_string(0)) |
|
409 | generator.flatten(m, 0) | |
412 | fp.write('\n') |
|
410 | fp.write('\n') | |
413 | except IOError, inst: |
|
411 | except IOError, inst: | |
414 | if inst.errno != errno.EPIPE: |
|
412 | if inst.errno != errno.EPIPE: | |
@@ -418,9 +416,10 def patchbomb(ui, repo, *revs, **opts): | |||||
418 | elif opts.get('mbox'): |
|
416 | elif opts.get('mbox'): | |
419 | ui.status('Writing ', m['Subject'], ' ...\n') |
|
417 | ui.status('Writing ', m['Subject'], ' ...\n') | |
420 | fp = open(opts.get('mbox'), 'In-Reply-To' in m and 'ab+' or 'wb+') |
|
418 | fp = open(opts.get('mbox'), 'In-Reply-To' in m and 'ab+' or 'wb+') | |
|
419 | generator = email.Generator.Generator(fp, mangle_from_=True) | |||
421 | date = util.datestr(start_time, '%a %b %d %H:%M:%S %Y') |
|
420 | date = util.datestr(start_time, '%a %b %d %H:%M:%S %Y') | |
422 | fp.write('From %s %s\n' % (sender_addr, date)) |
|
421 | fp.write('From %s %s\n' % (sender_addr, date)) | |
423 | fp.write(m.as_string(0)) |
|
422 | generator.flatten(m, 0) | |
424 | fp.write('\n\n') |
|
423 | fp.write('\n\n') | |
425 | fp.close() |
|
424 | fp.close() | |
426 | else: |
|
425 | else: | |
@@ -429,7 +428,10 def patchbomb(ui, repo, *revs, **opts): | |||||
429 | ui.status('Sending ', m['Subject'], ' ...\n') |
|
428 | ui.status('Sending ', m['Subject'], ' ...\n') | |
430 | # Exim does not remove the Bcc field |
|
429 | # Exim does not remove the Bcc field | |
431 | del m['Bcc'] |
|
430 | del m['Bcc'] | |
432 | sendmail(sender, to + bcc + cc, m.as_string(0)) |
|
431 | fp = cStringIO.StringIO() | |
|
432 | generator = email.Generator.Generator(fp, mangle_from_=False) | |||
|
433 | generator.flatten(m, 0) | |||
|
434 | sendmail(sender, to + bcc + cc, fp.getvalue()) | |||
433 |
|
435 | |||
434 | cmdtable = { |
|
436 | cmdtable = { | |
435 | "email": |
|
437 | "email": |
@@ -27,79 +27,10 | |||||
27 | # along with this program; if not, write to the Free Software |
|
27 | # along with this program; if not, write to the Free Software | |
28 | # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. |
|
28 | # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. | |
29 |
|
29 | |||
30 | from mercurial import util, commands |
|
30 | from mercurial import util, commands, cmdutil | |
31 | from mercurial.i18n import _ |
|
31 | from mercurial.i18n import _ | |
32 | import os |
|
32 | import os | |
33 |
|
33 | |||
34 | def dopurge(ui, repo, dirs=None, act=True, ignored=False, |
|
|||
35 | abort_on_err=False, eol='\n', |
|
|||
36 | force=False, include=None, exclude=None): |
|
|||
37 | def error(msg): |
|
|||
38 | if abort_on_err: |
|
|||
39 | raise util.Abort(msg) |
|
|||
40 | else: |
|
|||
41 | ui.warn(_('warning: %s\n') % msg) |
|
|||
42 |
|
||||
43 | def remove(remove_func, name): |
|
|||
44 | if act: |
|
|||
45 | try: |
|
|||
46 | remove_func(os.path.join(repo.root, name)) |
|
|||
47 | except OSError, e: |
|
|||
48 | error(_('%s cannot be removed') % name) |
|
|||
49 | else: |
|
|||
50 | ui.write('%s%s' % (name, eol)) |
|
|||
51 |
|
||||
52 | if not force: |
|
|||
53 | _check_fs(ui, repo) |
|
|||
54 |
|
||||
55 | directories = [] |
|
|||
56 | files = [] |
|
|||
57 | missing = [] |
|
|||
58 | roots, match, anypats = util.cmdmatcher(repo.root, repo.getcwd(), dirs, |
|
|||
59 | include, exclude) |
|
|||
60 | for src, f, st in repo.dirstate.statwalk(files=roots, match=match, |
|
|||
61 | ignored=ignored, directories=True): |
|
|||
62 | if src == 'd': |
|
|||
63 | directories.append(f) |
|
|||
64 | elif src == 'm': |
|
|||
65 | missing.append(f) |
|
|||
66 | elif src == 'f' and f not in repo.dirstate: |
|
|||
67 | files.append(f) |
|
|||
68 |
|
||||
69 | directories.sort() |
|
|||
70 |
|
||||
71 | for f in files: |
|
|||
72 | if f not in repo.dirstate: |
|
|||
73 | ui.note(_('Removing file %s\n') % f) |
|
|||
74 | remove(os.remove, f) |
|
|||
75 |
|
||||
76 | for f in directories[::-1]: |
|
|||
77 | if match(f) and not os.listdir(repo.wjoin(f)): |
|
|||
78 | ui.note(_('Removing directory %s\n') % f) |
|
|||
79 | remove(os.rmdir, f) |
|
|||
80 |
|
||||
81 | def _check_fs(ui, repo): |
|
|||
82 | """Abort if there is the chance of having problems with name-mangling fs |
|
|||
83 |
|
||||
84 | In a name mangling filesystem (e.g. a case insensitive one) |
|
|||
85 | dirstate.walk() can yield filenames different from the ones |
|
|||
86 | stored in the dirstate. This already confuses the status and |
|
|||
87 | add commands, but with purge this may cause data loss. |
|
|||
88 |
|
||||
89 | To prevent this, this function will abort if there are uncommitted |
|
|||
90 | changes. |
|
|||
91 | """ |
|
|||
92 |
|
||||
93 | # We can't use (files, match) to do a partial walk here - we wouldn't |
|
|||
94 | # notice a modified README file if the user ran "hg purge readme" |
|
|||
95 | modified, added, removed, deleted = repo.status()[:4] |
|
|||
96 | if modified or added or removed or deleted: |
|
|||
97 | if not util.checkfolding(repo.path) and not ui.quiet: |
|
|||
98 | ui.warn(_("Purging on name mangling filesystems is not " |
|
|||
99 | "fully supported.\n")) |
|
|||
100 | raise util.Abort(_("outstanding uncommitted changes")) |
|
|||
101 |
|
||||
102 |
|
||||
103 | def purge(ui, repo, *dirs, **opts): |
|
34 | def purge(ui, repo, *dirs, **opts): | |
104 | '''removes files not tracked by mercurial |
|
35 | '''removes files not tracked by mercurial | |
105 |
|
36 | |||
@@ -125,25 +56,42 def purge(ui, repo, *dirs, **opts): | |||||
125 | files that this program would delete use the --print option. |
|
56 | files that this program would delete use the --print option. | |
126 | ''' |
|
57 | ''' | |
127 | act = not opts['print'] |
|
58 | act = not opts['print'] | |
128 | ignored = bool(opts['all']) |
|
59 | eol = '\n' | |
129 | abort_on_err = bool(opts['abort_on_err']) |
|
60 | if opts['print0']: | |
130 | eol = opts['print0'] and '\0' or '\n' |
|
61 | eol = '\0' | |
131 | if eol == '\0': |
|
62 | act = False # --print0 implies --print | |
132 | # --print0 implies --print |
|
|||
133 | act = False |
|
|||
134 | force = bool(opts['force']) |
|
|||
135 | include = opts['include'] |
|
|||
136 | exclude = opts['exclude'] |
|
|||
137 | dopurge(ui, repo, dirs, act, ignored, abort_on_err, |
|
|||
138 | eol, force, include, exclude) |
|
|||
139 |
|
63 | |||
|
64 | def remove(remove_func, name): | |||
|
65 | if act: | |||
|
66 | try: | |||
|
67 | remove_func(os.path.join(repo.root, name)) | |||
|
68 | except OSError, e: | |||
|
69 | m = _('%s cannot be removed') % name | |||
|
70 | if opts['abort_on_err']: | |||
|
71 | raise util.Abort(m) | |||
|
72 | ui.warn(_('warning: %s\n') % m) | |||
|
73 | else: | |||
|
74 | ui.write('%s%s' % (name, eol)) | |||
|
75 | ||||
|
76 | directories = [] | |||
|
77 | match = cmdutil.match(repo, dirs, opts) | |||
|
78 | match.dir = directories.append | |||
|
79 | status = repo.status(match=match, ignored=opts['all'], unknown=True) | |||
|
80 | ||||
|
81 | for f in util.sort(status[4] + status[5]): | |||
|
82 | ui.note(_('Removing file %s\n') % f) | |||
|
83 | remove(os.remove, f) | |||
|
84 | ||||
|
85 | for f in util.sort(directories)[::-1]: | |||
|
86 | if match(f) and not os.listdir(repo.wjoin(f)): | |||
|
87 | ui.note(_('Removing directory %s\n') % f) | |||
|
88 | remove(os.rmdir, f) | |||
140 |
|
89 | |||
141 | cmdtable = { |
|
90 | cmdtable = { | |
142 | 'purge|clean': |
|
91 | 'purge|clean': | |
143 | (purge, |
|
92 | (purge, | |
144 | [('a', 'abort-on-err', None, _('abort if an error occurs')), |
|
93 | [('a', 'abort-on-err', None, _('abort if an error occurs')), | |
145 | ('', 'all', None, _('purge ignored files too')), |
|
94 | ('', 'all', None, _('purge ignored files too')), | |
146 | ('f', 'force', None, _('purge even when there are uncommitted changes')), |
|
|||
147 | ('p', 'print', None, _('print the file names instead of deleting them')), |
|
95 | ('p', 'print', None, _('print the file names instead of deleting them')), | |
148 | ('0', 'print0', None, _('end filenames with NUL, for use with xargs' |
|
96 | ('0', 'print0', None, _('end filenames with NUL, for use with xargs' | |
149 | ' (implies -p)')), |
|
97 | ' (implies -p)')), |
@@ -389,7 +389,7 def dorecord(ui, repo, committer, *pats, | |||||
389 | if not ui.interactive: |
|
389 | if not ui.interactive: | |
390 | raise util.Abort(_('running non-interactively, use commit instead')) |
|
390 | raise util.Abort(_('running non-interactively, use commit instead')) | |
391 |
|
391 | |||
392 |
def recordfunc(ui, repo, |
|
392 | def recordfunc(ui, repo, message, match, opts): | |
393 | """This is generic record driver. |
|
393 | """This is generic record driver. | |
394 |
|
394 | |||
395 | It's job is to interactively filter local changes, and accordingly |
|
395 | It's job is to interactively filter local changes, and accordingly | |
@@ -402,16 +402,16 def dorecord(ui, repo, committer, *pats, | |||||
402 | In the end we'll record intresting changes, and everything else will be |
|
402 | In the end we'll record intresting changes, and everything else will be | |
403 | left in place, so the user can continue his work. |
|
403 | left in place, so the user can continue his work. | |
404 | """ |
|
404 | """ | |
405 | if files: |
|
405 | if match.files(): | |
406 | changes = None |
|
406 | changes = None | |
407 | else: |
|
407 | else: | |
408 |
changes = repo.status( |
|
408 | changes = repo.status(match=match)[:3] | |
409 |
modified, added, removed = changes |
|
409 | modified, added, removed = changes | |
410 |
|
|
410 | match = cmdutil.matchfiles(repo, modified + added + removed) | |
411 | diffopts = mdiff.diffopts(git=True, nodates=True) |
|
411 | diffopts = mdiff.diffopts(git=True, nodates=True) | |
412 | fp = cStringIO.StringIO() |
|
412 | fp = cStringIO.StringIO() | |
413 |
patch.diff(repo, repo.dirstate.parents()[0], |
|
413 | patch.diff(repo, repo.dirstate.parents()[0], match=match, | |
414 |
|
|
414 | changes=changes, opts=diffopts, fp=fp) | |
415 | fp.seek(0) |
|
415 | fp.seek(0) | |
416 |
|
416 | |||
417 | # 1. filter patch, so we have intending-to apply subset of it |
|
417 | # 1. filter patch, so we have intending-to apply subset of it | |
@@ -423,14 +423,15 def dorecord(ui, repo, committer, *pats, | |||||
423 | try: contenders.update(dict.fromkeys(h.files())) |
|
423 | try: contenders.update(dict.fromkeys(h.files())) | |
424 | except AttributeError: pass |
|
424 | except AttributeError: pass | |
425 |
|
425 | |||
426 | newfiles = [f for f in files if f in contenders] |
|
426 | newfiles = [f for f in match.files() if f in contenders] | |
427 |
|
427 | |||
428 | if not newfiles: |
|
428 | if not newfiles: | |
429 | ui.status(_('no changes to record\n')) |
|
429 | ui.status(_('no changes to record\n')) | |
430 | return 0 |
|
430 | return 0 | |
431 |
|
431 | |||
432 | if changes is None: |
|
432 | if changes is None: | |
433 | changes = repo.status(files=newfiles, match=match)[:5] |
|
433 | match = cmdutil.matchfiles(repo, newfiles) | |
|
434 | changes = repo.status(match=match) | |||
434 | modified = dict.fromkeys(changes[0]) |
|
435 | modified = dict.fromkeys(changes[0]) | |
435 |
|
436 | |||
436 | # 2. backup changed files, so we can restore them in the end |
|
437 | # 2. backup changed files, so we can restore them in the end |
@@ -88,9 +88,7 class transplanter: | |||||
88 |
|
88 | |||
89 | def apply(self, repo, source, revmap, merges, opts={}): |
|
89 | def apply(self, repo, source, revmap, merges, opts={}): | |
90 | '''apply the revisions in revmap one by one in revision order''' |
|
90 | '''apply the revisions in revmap one by one in revision order''' | |
91 |
revs = revmap |
|
91 | revs = util.sort(revmap) | |
92 | revs.sort() |
|
|||
93 |
|
||||
94 | p1, p2 = repo.dirstate.parents() |
|
92 | p1, p2 = repo.dirstate.parents() | |
95 | pulls = [] |
|
93 | pulls = [] | |
96 | diffopts = patch.diffopts(self.ui, opts) |
|
94 | diffopts = patch.diffopts(self.ui, opts) | |
@@ -310,9 +308,7 class transplanter: | |||||
310 | if not os.path.isdir(self.path): |
|
308 | if not os.path.isdir(self.path): | |
311 | os.mkdir(self.path) |
|
309 | os.mkdir(self.path) | |
312 | series = self.opener('series', 'w') |
|
310 | series = self.opener('series', 'w') | |
313 | revs = revmap.keys() |
|
311 | for rev in util.sort(revmap): | |
314 | revs.sort() |
|
|||
315 | for rev in revs: |
|
|||
316 | series.write(revlog.hex(revmap[rev]) + '\n') |
|
312 | series.write(revlog.hex(revmap[rev]) + '\n') | |
317 | if merges: |
|
313 | if merges: | |
318 | series.write('# Merges\n') |
|
314 | series.write('# Merges\n') | |
@@ -572,10 +568,6 def transplant(ui, repo, *revs, **opts): | |||||
572 | for r in merges: |
|
568 | for r in merges: | |
573 | revmap[source.changelog.rev(r)] = r |
|
569 | revmap[source.changelog.rev(r)] = r | |
574 |
|
570 | |||
575 | revs = revmap.keys() |
|
|||
576 | revs.sort() |
|
|||
577 | pulls = [] |
|
|||
578 |
|
||||
579 | tp.apply(repo, source, revmap, merges, opts) |
|
571 | tp.apply(repo, source, revmap, merges, opts) | |
580 | finally: |
|
572 | finally: | |
581 | if bundle: |
|
573 | if bundle: |
@@ -1,4 +1,4 | |||||
1 | # win32text.py - LF <-> CRLF translation utilities for Windows users |
|
1 | # win32text.py - LF <-> CRLF/CR translation utilities for Windows/Mac users | |
2 | # |
|
2 | # | |
3 | # This software may be used and distributed according to the terms |
|
3 | # This software may be used and distributed according to the terms | |
4 | # of the GNU General Public License, incorporated herein by reference. |
|
4 | # of the GNU General Public License, incorporated herein by reference. | |
@@ -9,95 +9,133 | |||||
9 | # hgext.win32text = |
|
9 | # hgext.win32text = | |
10 | # [encode] |
|
10 | # [encode] | |
11 | # ** = cleverencode: |
|
11 | # ** = cleverencode: | |
|
12 | # # or ** = macencode: | |||
12 | # [decode] |
|
13 | # [decode] | |
13 | # ** = cleverdecode: |
|
14 | # ** = cleverdecode: | |
|
15 | # # or ** = macdecode: | |||
14 | # |
|
16 | # | |
15 | # If not doing conversion, to make sure you do not commit CRLF by accident: |
|
17 | # If not doing conversion, to make sure you do not commit CRLF/CR by accident: | |
16 | # |
|
18 | # | |
17 | # [hooks] |
|
19 | # [hooks] | |
18 | # pretxncommit.crlf = python:hgext.win32text.forbidcrlf |
|
20 | # pretxncommit.crlf = python:hgext.win32text.forbidcrlf | |
|
21 | # # or pretxncommit.cr = python:hgext.win32text.forbidcr | |||
19 | # |
|
22 | # | |
20 |
# To do the same check on a server to prevent CRLF from being pushed or |
|
23 | # To do the same check on a server to prevent CRLF/CR from being pushed or | |
|
24 | # pulled: | |||
21 | # |
|
25 | # | |
22 | # [hooks] |
|
26 | # [hooks] | |
23 | # pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf |
|
27 | # pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf | |
|
28 | # # or pretxnchangegroup.cr = python:hgext.win32text.forbidcr | |||
24 |
|
29 | |||
25 | from mercurial.i18n import gettext as _ |
|
30 | from mercurial.i18n import gettext as _ | |
26 | from mercurial.node import bin, short |
|
31 | from mercurial.node import bin, short | |
|
32 | from mercurial import util | |||
27 | import re |
|
33 | import re | |
28 |
|
34 | |||
29 | # regexp for single LF without CR preceding. |
|
35 | # regexp for single LF without CR preceding. | |
30 | re_single_lf = re.compile('(^|[^\r])\n', re.MULTILINE) |
|
36 | re_single_lf = re.compile('(^|[^\r])\n', re.MULTILINE) | |
31 |
|
37 | |||
32 | def dumbdecode(s, cmd, ui=None, repo=None, filename=None, **kwargs): |
|
38 | newlinestr = {'\r\n': 'CRLF', '\r': 'CR'} | |
33 | # warn if already has CRLF in repository. |
|
39 | filterstr = {'\r\n': 'clever', '\r': 'mac'} | |
|
40 | ||||
|
41 | def checknewline(s, newline, ui=None, repo=None, filename=None): | |||
|
42 | # warn if already has 'newline' in repository. | |||
34 | # it might cause unexpected eol conversion. |
|
43 | # it might cause unexpected eol conversion. | |
35 | # see issue 302: |
|
44 | # see issue 302: | |
36 | # http://www.selenic.com/mercurial/bts/issue302 |
|
45 | # http://www.selenic.com/mercurial/bts/issue302 | |
37 |
if |
|
46 | if newline in s and ui and filename and repo: | |
38 |
ui.warn(_('WARNING: %s already has |
|
47 | ui.warn(_('WARNING: %s already has %s line endings\n' | |
39 | 'and does not need EOL conversion by the win32text plugin.\n' |
|
48 | 'and does not need EOL conversion by the win32text plugin.\n' | |
40 | 'Before your next commit, please reconsider your ' |
|
49 | 'Before your next commit, please reconsider your ' | |
41 | 'encode/decode settings in \nMercurial.ini or %s.\n') % |
|
50 | 'encode/decode settings in \nMercurial.ini or %s.\n') % | |
42 | (filename, repo.join('hgrc'))) |
|
51 | (filename, newlinestr[newline], repo.join('hgrc'))) | |
|
52 | ||||
|
53 | def dumbdecode(s, cmd, **kwargs): | |||
|
54 | checknewline(s, '\r\n', **kwargs) | |||
43 | # replace single LF to CRLF |
|
55 | # replace single LF to CRLF | |
44 | return re_single_lf.sub('\\1\r\n', s) |
|
56 | return re_single_lf.sub('\\1\r\n', s) | |
45 |
|
57 | |||
46 | def dumbencode(s, cmd): |
|
58 | def dumbencode(s, cmd): | |
47 | return s.replace('\r\n', '\n') |
|
59 | return s.replace('\r\n', '\n') | |
48 |
|
60 | |||
49 | def clevertest(s, cmd): |
|
61 | def macdumbdecode(s, cmd, **kwargs): | |
50 | if '\0' in s: return False |
|
62 | checknewline(s, '\r', **kwargs) | |
51 | return True |
|
63 | return s.replace('\n', '\r') | |
|
64 | ||||
|
65 | def macdumbencode(s, cmd): | |||
|
66 | return s.replace('\r', '\n') | |||
52 |
|
67 | |||
53 | def cleverdecode(s, cmd, **kwargs): |
|
68 | def cleverdecode(s, cmd, **kwargs): | |
54 | if clevertest(s, cmd): |
|
69 | if not util.binary(s): | |
55 | return dumbdecode(s, cmd, **kwargs) |
|
70 | return dumbdecode(s, cmd, **kwargs) | |
56 | return s |
|
71 | return s | |
57 |
|
72 | |||
58 | def cleverencode(s, cmd): |
|
73 | def cleverencode(s, cmd): | |
59 | if clevertest(s, cmd): |
|
74 | if not util.binary(s): | |
60 | return dumbencode(s, cmd) |
|
75 | return dumbencode(s, cmd) | |
61 | return s |
|
76 | return s | |
62 |
|
77 | |||
|
78 | def macdecode(s, cmd, **kwargs): | |||
|
79 | if not util.binary(s): | |||
|
80 | return macdumbdecode(s, cmd, **kwargs) | |||
|
81 | return s | |||
|
82 | ||||
|
83 | def macencode(s, cmd): | |||
|
84 | if not util.binary(s): | |||
|
85 | return macdumbencode(s, cmd) | |||
|
86 | return s | |||
|
87 | ||||
63 | _filters = { |
|
88 | _filters = { | |
64 | 'dumbdecode:': dumbdecode, |
|
89 | 'dumbdecode:': dumbdecode, | |
65 | 'dumbencode:': dumbencode, |
|
90 | 'dumbencode:': dumbencode, | |
66 | 'cleverdecode:': cleverdecode, |
|
91 | 'cleverdecode:': cleverdecode, | |
67 | 'cleverencode:': cleverencode, |
|
92 | 'cleverencode:': cleverencode, | |
|
93 | 'macdumbdecode:': macdumbdecode, | |||
|
94 | 'macdumbencode:': macdumbencode, | |||
|
95 | 'macdecode:': macdecode, | |||
|
96 | 'macencode:': macencode, | |||
68 | } |
|
97 | } | |
69 |
|
98 | |||
70 |
def forbid |
|
99 | def forbidnewline(ui, repo, hooktype, node, newline, **kwargs): | |
71 | halt = False |
|
100 | halt = False | |
72 |
for rev in xrange(repo |
|
101 | for rev in xrange(repo[node].rev(), len(repo)): | |
73 |
c = repo |
|
102 | c = repo[rev] | |
74 | for f in c.files(): |
|
103 | for f in c.files(): | |
75 | if f not in c: |
|
104 | if f not in c: | |
76 | continue |
|
105 | continue | |
77 | data = c[f].data() |
|
106 | data = c[f].data() | |
78 |
if |
|
107 | if not util.binary(data) and newline in data: | |
79 | if not halt: |
|
108 | if not halt: | |
80 | ui.warn(_('Attempt to commit or push text file(s) ' |
|
109 | ui.warn(_('Attempt to commit or push text file(s) ' | |
81 |
'using |
|
110 | 'using %s line endings\n') % | |
|
111 | newlinestr[newline]) | |||
82 | ui.warn(_('in %s: %s\n') % (short(c.node()), f)) |
|
112 | ui.warn(_('in %s: %s\n') % (short(c.node()), f)) | |
83 | halt = True |
|
113 | halt = True | |
84 | if halt and hooktype == 'pretxnchangegroup': |
|
114 | if halt and hooktype == 'pretxnchangegroup': | |
|
115 | crlf = newlinestr[newline].lower() | |||
|
116 | filter = filterstr[newline] | |||
85 | ui.warn(_('\nTo prevent this mistake in your local repository,\n' |
|
117 | ui.warn(_('\nTo prevent this mistake in your local repository,\n' | |
86 | 'add to Mercurial.ini or .hg/hgrc:\n' |
|
118 | 'add to Mercurial.ini or .hg/hgrc:\n' | |
87 | '\n' |
|
119 | '\n' | |
88 | '[hooks]\n' |
|
120 | '[hooks]\n' | |
89 |
'pretxncommit. |
|
121 | 'pretxncommit.%s = python:hgext.win32text.forbid%s\n' | |
90 | '\n' |
|
122 | '\n' | |
91 | 'and also consider adding:\n' |
|
123 | 'and also consider adding:\n' | |
92 | '\n' |
|
124 | '\n' | |
93 | '[extensions]\n' |
|
125 | '[extensions]\n' | |
94 | 'hgext.win32text =\n' |
|
126 | 'hgext.win32text =\n' | |
95 | '[encode]\n' |
|
127 | '[encode]\n' | |
96 |
'** = |
|
128 | '** = %sencode:\n' | |
97 | '[decode]\n' |
|
129 | '[decode]\n' | |
98 |
'** = |
|
130 | '** = %sdecode:\n') % (crlf, crlf, filter, filter)) | |
99 | return halt |
|
131 | return halt | |
100 |
|
132 | |||
|
133 | def forbidcrlf(ui, repo, hooktype, node, **kwargs): | |||
|
134 | return forbidnewline(ui, repo, hooktype, node, '\r\n', **kwargs) | |||
|
135 | ||||
|
136 | def forbidcr(ui, repo, hooktype, node, **kwargs): | |||
|
137 | return forbidnewline(ui, repo, hooktype, node, '\r', **kwargs) | |||
|
138 | ||||
101 | def reposetup(ui, repo): |
|
139 | def reposetup(ui, repo): | |
102 | if not repo.local(): |
|
140 | if not repo.local(): | |
103 | return |
|
141 | return |
@@ -52,7 +52,8 class tarit: | |||||
52 | def _write_gzip_header(self): |
|
52 | def _write_gzip_header(self): | |
53 | self.fileobj.write('\037\213') # magic header |
|
53 | self.fileobj.write('\037\213') # magic header | |
54 | self.fileobj.write('\010') # compression method |
|
54 | self.fileobj.write('\010') # compression method | |
55 |
|
|
55 | # Python 2.6 deprecates self.filename | |
|
56 | fname = getattr(self, 'name', None) or self.filename | |||
56 | flags = 0 |
|
57 | flags = 0 | |
57 | if fname: |
|
58 | if fname: | |
58 | flags = gzip.FNAME |
|
59 | flags = gzip.FNAME | |
@@ -207,18 +208,17 def archive(repo, dest, node, kind, deco | |||||
207 | data = repo.wwritedata(name, data) |
|
208 | data = repo.wwritedata(name, data) | |
208 | archiver.addfile(name, mode, islink, data) |
|
209 | archiver.addfile(name, mode, islink, data) | |
209 |
|
210 | |||
210 | ctx = repo.changectx(node) |
|
|||
211 | if kind not in archivers: |
|
211 | if kind not in archivers: | |
212 | raise util.Abort(_("unknown archive type '%s'" % kind)) |
|
212 | raise util.Abort(_("unknown archive type '%s'" % kind)) | |
|
213 | ||||
|
214 | ctx = repo[node] | |||
213 | archiver = archivers[kind](dest, prefix, mtime or ctx.date()[0]) |
|
215 | archiver = archivers[kind](dest, prefix, mtime or ctx.date()[0]) | |
214 | m = ctx.manifest() |
|
216 | ||
215 | items = m.items() |
|
|||
216 | items.sort() |
|
|||
217 | if repo.ui.configbool("ui", "archivemeta", True): |
|
217 | if repo.ui.configbool("ui", "archivemeta", True): | |
218 | write('.hg_archival.txt', 0644, False, |
|
218 | write('.hg_archival.txt', 0644, False, | |
219 | lambda: 'repo: %s\nnode: %s\n' % ( |
|
219 | lambda: 'repo: %s\nnode: %s\n' % ( | |
220 | hex(repo.changelog.node(0)), hex(node))) |
|
220 | hex(repo.changelog.node(0)), hex(node))) | |
221 | for filename, filenode in items: |
|
221 | for f in ctx: | |
222 | write(filename, m.execf(filename) and 0755 or 0644, m.linkf(filename), |
|
222 | ff = ctx.flags(f) | |
223 | lambda: repo.file(filename).read(filenode)) |
|
223 | write(f, 'x' in ff and 0755 or 0644, 'l' in ff, ctx[f].data) | |
224 | archiver.done() |
|
224 | archiver.done() |
@@ -12,7 +12,7 of the GNU General Public License, incor | |||||
12 |
|
12 | |||
13 | from node import hex, nullid, short |
|
13 | from node import hex, nullid, short | |
14 | from i18n import _ |
|
14 | from i18n import _ | |
15 | import changegroup, util, os, struct, bz2, tempfile, shutil, mdiff |
|
15 | import changegroup, util, os, struct, bz2, zlib, tempfile, shutil, mdiff | |
16 | import repo, localrepo, changelog, manifest, filelog, revlog |
|
16 | import repo, localrepo, changelog, manifest, filelog, revlog | |
17 |
|
17 | |||
18 | class bundlerevlog(revlog.revlog): |
|
18 | class bundlerevlog(revlog.revlog): | |
@@ -34,7 +34,7 class bundlerevlog(revlog.revlog): | |||||
34 | for chunk in changegroup.chunkiter(bundlefile): |
|
34 | for chunk in changegroup.chunkiter(bundlefile): | |
35 | pos = bundlefile.tell() |
|
35 | pos = bundlefile.tell() | |
36 | yield chunk, pos - len(chunk) |
|
36 | yield chunk, pos - len(chunk) | |
37 |
n = self |
|
37 | n = len(self) | |
38 | prev = None |
|
38 | prev = None | |
39 | for chunk, start in chunkpositer(): |
|
39 | for chunk, start in chunkpositer(): | |
40 | size = len(chunk) |
|
40 | size = len(chunk) | |
@@ -127,7 +127,7 class bundlerevlog(revlog.revlog): | |||||
127 |
|
127 | |||
128 | def addrevision(self, text, transaction, link, p1=None, p2=None, d=None): |
|
128 | def addrevision(self, text, transaction, link, p1=None, p2=None, d=None): | |
129 | raise NotImplementedError |
|
129 | raise NotImplementedError | |
130 |
def addgroup(self, revs, linkmapper, transaction |
|
130 | def addgroup(self, revs, linkmapper, transaction): | |
131 | raise NotImplementedError |
|
131 | raise NotImplementedError | |
132 | def strip(self, rev, minlink): |
|
132 | def strip(self, rev, minlink): | |
133 | raise NotImplementedError |
|
133 | raise NotImplementedError | |
@@ -173,14 +173,17 class bundlerepository(localrepo.localre | |||||
173 | raise util.Abort(_("%s: not a Mercurial bundle file") % bundlename) |
|
173 | raise util.Abort(_("%s: not a Mercurial bundle file") % bundlename) | |
174 | elif not header.startswith("HG10"): |
|
174 | elif not header.startswith("HG10"): | |
175 | raise util.Abort(_("%s: unknown bundle version") % bundlename) |
|
175 | raise util.Abort(_("%s: unknown bundle version") % bundlename) | |
176 | elif header == "HG10BZ": |
|
176 | elif (header == "HG10BZ") or (header == "HG10GZ"): | |
177 | fdtemp, temp = tempfile.mkstemp(prefix="hg-bundle-", |
|
177 | fdtemp, temp = tempfile.mkstemp(prefix="hg-bundle-", | |
178 | suffix=".hg10un", dir=self.path) |
|
178 | suffix=".hg10un", dir=self.path) | |
179 | self.tempfile = temp |
|
179 | self.tempfile = temp | |
180 | fptemp = os.fdopen(fdtemp, 'wb') |
|
180 | fptemp = os.fdopen(fdtemp, 'wb') | |
181 | def generator(f): |
|
181 | def generator(f): | |
|
182 | if header == "HG10BZ": | |||
182 | zd = bz2.BZ2Decompressor() |
|
183 | zd = bz2.BZ2Decompressor() | |
183 | zd.decompress("BZ") |
|
184 | zd.decompress("BZ") | |
|
185 | elif header == "HG10GZ": | |||
|
186 | zd = zlib.decompressobj() | |||
184 | for chunk in f: |
|
187 | for chunk in f: | |
185 | yield zd.decompress(chunk) |
|
188 | yield zd.decompress(chunk) | |
186 | gen = generator(util.filechunkiter(self.bundlefile, 4096)) |
|
189 | gen = generator(util.filechunkiter(self.bundlefile, 4096)) |
@@ -82,7 +82,7 class changelog(revlog): | |||||
82 | "delay visibility of index updates to other readers" |
|
82 | "delay visibility of index updates to other readers" | |
83 | self._realopener = self.opener |
|
83 | self._realopener = self.opener | |
84 | self.opener = self._delayopener |
|
84 | self.opener = self._delayopener | |
85 |
self._delaycount = self |
|
85 | self._delaycount = len(self) | |
86 | self._delaybuf = [] |
|
86 | self._delaybuf = [] | |
87 | self._delayname = None |
|
87 | self._delayname = None | |
88 |
|
88 | |||
@@ -108,7 +108,7 class changelog(revlog): | |||||
108 | # if we're doing an initial clone, divert to another file |
|
108 | # if we're doing an initial clone, divert to another file | |
109 | if self._delaycount == 0: |
|
109 | if self._delaycount == 0: | |
110 | self._delayname = fp.name |
|
110 | self._delayname = fp.name | |
111 |
if not self |
|
111 | if not len(self): | |
112 | # make sure to truncate the file |
|
112 | # make sure to truncate the file | |
113 | mode = mode.replace('a', 'w') |
|
113 | mode = mode.replace('a', 'w') | |
114 | return self._realopener(name + ".a", mode) |
|
114 | return self._realopener(name + ".a", mode) | |
@@ -130,9 +130,7 class changelog(revlog): | |||||
130 |
|
130 | |||
131 | def encode_extra(self, d): |
|
131 | def encode_extra(self, d): | |
132 | # keys must be sorted to produce a deterministic changelog entry |
|
132 | # keys must be sorted to produce a deterministic changelog entry | |
133 | keys = d.keys() |
|
133 | items = [_string_escape('%s:%s' % (k, d[k])) for k in util.sort(d)] | |
134 | keys.sort() |
|
|||
135 | items = [_string_escape('%s:%s' % (k, d[k])) for k in keys] |
|
|||
136 | return "\0".join(items) |
|
134 | return "\0".join(items) | |
137 |
|
135 | |||
138 | def read(self, node): |
|
136 | def read(self, node): | |
@@ -175,7 +173,7 class changelog(revlog): | |||||
175 | files = l[3:] |
|
173 | files = l[3:] | |
176 | return (manifest, user, (time, timezone), files, desc, extra) |
|
174 | return (manifest, user, (time, timezone), files, desc, extra) | |
177 |
|
175 | |||
178 |
def add(self, manifest, |
|
176 | def add(self, manifest, files, desc, transaction, p1=None, p2=None, | |
179 | user=None, date=None, extra={}): |
|
177 | user=None, date=None, extra={}): | |
180 |
|
178 | |||
181 | user, desc = util.fromlocal(user), util.fromlocal(desc) |
|
179 | user, desc = util.fromlocal(user), util.fromlocal(desc) | |
@@ -189,7 +187,6 class changelog(revlog): | |||||
189 | if extra: |
|
187 | if extra: | |
190 | extra = self.encode_extra(extra) |
|
188 | extra = self.encode_extra(extra) | |
191 | parseddate = "%s %s" % (parseddate, extra) |
|
189 | parseddate = "%s %s" % (parseddate, extra) | |
192 | list.sort() |
|
190 | l = [hex(manifest), user, parseddate] + util.sort(files) + ["", desc] | |
193 | l = [hex(manifest), user, parseddate] + list + ["", desc] |
|
|||
194 | text = "\n".join(l) |
|
191 | text = "\n".join(l) | |
195 |
return self.addrevision(text, transaction, self |
|
192 | return self.addrevision(text, transaction, len(self), p1, p2) |
@@ -9,6 +9,7 from node import hex, nullid, nullrev, s | |||||
9 | from i18n import _ |
|
9 | from i18n import _ | |
10 | import os, sys, bisect, stat |
|
10 | import os, sys, bisect, stat | |
11 | import mdiff, bdiff, util, templater, templatefilters, patch, errno |
|
11 | import mdiff, bdiff, util, templater, templatefilters, patch, errno | |
|
12 | import match as _match | |||
12 |
|
13 | |||
13 | revrangesep = ':' |
|
14 | revrangesep = ':' | |
14 |
|
15 | |||
@@ -125,7 +126,7 def revpair(repo, revs): | |||||
125 | if revrangesep in revs[0]: |
|
126 | if revrangesep in revs[0]: | |
126 | start, end = revs[0].split(revrangesep, 1) |
|
127 | start, end = revs[0].split(revrangesep, 1) | |
127 | start = revfix(repo, start, 0) |
|
128 | start = revfix(repo, start, 0) | |
128 |
end = revfix(repo, end, repo |
|
129 | end = revfix(repo, end, len(repo) - 1) | |
129 | else: |
|
130 | else: | |
130 | start = revfix(repo, revs[0], None) |
|
131 | start = revfix(repo, revs[0], None) | |
131 | elif len(revs) == 2: |
|
132 | elif len(revs) == 2: | |
@@ -150,7 +151,7 def revrange(repo, revs): | |||||
150 | if revrangesep in spec: |
|
151 | if revrangesep in spec: | |
151 | start, end = spec.split(revrangesep, 1) |
|
152 | start, end = spec.split(revrangesep, 1) | |
152 | start = revfix(repo, start, 0) |
|
153 | start = revfix(repo, start, 0) | |
153 |
end = revfix(repo, end, repo |
|
154 | end = revfix(repo, end, len(repo) - 1) | |
154 | step = start > end and -1 or 1 |
|
155 | step = start > end and -1 or 1 | |
155 | for rev in xrange(start, end+step, step): |
|
156 | for rev in xrange(start, end+step, step): | |
156 | if rev in seen: |
|
157 | if rev in seen: | |
@@ -223,27 +224,28 def make_file(repo, pat, node=None, | |||||
223 | pathname), |
|
224 | pathname), | |
224 | mode) |
|
225 | mode) | |
225 |
|
226 | |||
226 |
def match |
|
227 | def match(repo, pats=[], opts={}, globbed=False, default='relpath'): | |
227 | cwd = repo.getcwd() |
|
228 | if not globbed and default == 'relpath': | |
228 | return util.cmdmatcher(repo.root, cwd, pats or [], opts.get('include'), |
|
229 | pats = util.expand_glob(pats or []) | |
229 | opts.get('exclude'), globbed=globbed, |
|
230 | m = _match.match(repo.root, repo.getcwd(), pats, | |
230 | default=default) |
|
231 | opts.get('include'), opts.get('exclude'), default) | |
|
232 | def badfn(f, msg): | |||
|
233 | repo.ui.warn("%s: %s\n" % (m.rel(f), msg)) | |||
|
234 | return False | |||
|
235 | m.bad = badfn | |||
|
236 | return m | |||
231 |
|
237 | |||
232 | def walk(repo, pats=[], opts={}, node=None, badmatch=None, globbed=False, |
|
238 | def matchall(repo): | |
233 | default=None): |
|
239 | return _match.always(repo.root, repo.getcwd()) | |
234 | files, matchfn, anypats = matchpats(repo, pats, opts, globbed=globbed, |
|
240 | ||
235 | default=default) |
|
241 | def matchfiles(repo, files): | |
236 | exact = dict.fromkeys(files) |
|
242 | return _match.exact(repo.root, repo.getcwd(), files) | |
237 | cwd = repo.getcwd() |
|
|||
238 | for src, fn in repo.walk(node=node, files=files, match=matchfn, |
|
|||
239 | badmatch=badmatch): |
|
|||
240 | yield src, fn, repo.pathto(fn, cwd), fn in exact |
|
|||
241 |
|
243 | |||
242 | def findrenames(repo, added=None, removed=None, threshold=0.5): |
|
244 | def findrenames(repo, added=None, removed=None, threshold=0.5): | |
243 | '''find renamed files -- yields (before, after, score) tuples''' |
|
245 | '''find renamed files -- yields (before, after, score) tuples''' | |
244 | if added is None or removed is None: |
|
246 | if added is None or removed is None: | |
245 | added, removed = repo.status()[1:3] |
|
247 | added, removed = repo.status()[1:3] | |
246 |
ctx = repo. |
|
248 | ctx = repo['.'] | |
247 | for a in added: |
|
249 | for a in added: | |
248 | aa = repo.wread(a) |
|
250 | aa = repo.wread(a) | |
249 | bestname, bestscore = None, threshold |
|
251 | bestname, bestscore = None, threshold | |
@@ -275,16 +277,19 def addremove(repo, pats=[], opts={}, dr | |||||
275 | add, remove = [], [] |
|
277 | add, remove = [], [] | |
276 | mapping = {} |
|
278 | mapping = {} | |
277 | audit_path = util.path_auditor(repo.root) |
|
279 | audit_path = util.path_auditor(repo.root) | |
278 | for src, abs, rel, exact in walk(repo, pats, opts): |
|
280 | m = match(repo, pats, opts) | |
|
281 | for abs in repo.walk(m): | |||
279 | target = repo.wjoin(abs) |
|
282 | target = repo.wjoin(abs) | |
280 | good = True |
|
283 | good = True | |
281 | try: |
|
284 | try: | |
282 | audit_path(abs) |
|
285 | audit_path(abs) | |
283 | except: |
|
286 | except: | |
284 | good = False |
|
287 | good = False | |
285 | if src == 'f' and good and abs not in repo.dirstate: |
|
288 | rel = m.rel(abs) | |
|
289 | exact = m.exact(abs) | |||
|
290 | if good and abs not in repo.dirstate: | |||
286 | add.append(abs) |
|
291 | add.append(abs) | |
287 | mapping[abs] = rel, exact |
|
292 | mapping[abs] = rel, m.exact(abs) | |
288 | if repo.ui.verbose or not exact: |
|
293 | if repo.ui.verbose or not exact: | |
289 | repo.ui.status(_('adding %s\n') % ((pats and rel) or abs)) |
|
294 | repo.ui.status(_('adding %s\n') % ((pats and rel) or abs)) | |
290 | if repo.dirstate[abs] != 'r' and (not good or not util.lexists(target) |
|
295 | if repo.dirstate[abs] != 'r' and (not good or not util.lexists(target) | |
@@ -319,8 +324,11 def copy(ui, repo, pats, opts, rename=Fa | |||||
319 |
|
324 | |||
320 | def walkpat(pat): |
|
325 | def walkpat(pat): | |
321 | srcs = [] |
|
326 | srcs = [] | |
322 |
|
|
327 | m = match(repo, [pat], opts, globbed=True) | |
|
328 | for abs in repo.walk(m): | |||
323 | state = repo.dirstate[abs] |
|
329 | state = repo.dirstate[abs] | |
|
330 | rel = m.rel(abs) | |||
|
331 | exact = m.exact(abs) | |||
324 | if state in '?r': |
|
332 | if state in '?r': | |
325 | if exact and state == '?': |
|
333 | if exact and state == '?': | |
326 | ui.warn(_('%s: not copying - file is not managed\n') % rel) |
|
334 | ui.warn(_('%s: not copying - file is not managed\n') % rel) | |
@@ -645,9 +653,7 class changeset_printer(object): | |||||
645 | self.ui.write(_("copies: %s\n") % ' '.join(copies)) |
|
653 | self.ui.write(_("copies: %s\n") % ' '.join(copies)) | |
646 |
|
654 | |||
647 | if extra and self.ui.debugflag: |
|
655 | if extra and self.ui.debugflag: | |
648 |
|
|
656 | for key, value in util.sort(extra.items()): | |
649 | extraitems.sort() |
|
|||
650 | for key, value in extraitems: |
|
|||
651 | self.ui.write(_("extra: %s=%s\n") |
|
657 | self.ui.write(_("extra: %s=%s\n") | |
652 | % (key, value.encode('string_escape'))) |
|
658 | % (key, value.encode('string_escape'))) | |
653 |
|
659 | |||
@@ -791,9 +797,7 class changeset_templater(changeset_prin | |||||
791 | return showlist('tag', self.repo.nodetags(changenode), **args) |
|
797 | return showlist('tag', self.repo.nodetags(changenode), **args) | |
792 |
|
798 | |||
793 | def showextras(**args): |
|
799 | def showextras(**args): | |
794 |
|
|
800 | for key, value in util.sort(changes[5].items()): | |
795 | extras.sort() |
|
|||
796 | for key, value in extras: |
|
|||
797 | args = args.copy() |
|
801 | args = args.copy() | |
798 | args.update(dict(key=key, value=value)) |
|
802 | args.update(dict(key=key, value=value)) | |
799 | yield self.t('extra', **args) |
|
803 | yield self.t('extra', **args) | |
@@ -889,7 +893,7 def show_changeset(ui, repo, opts, buffe | |||||
889 | # options |
|
893 | # options | |
890 | patch = False |
|
894 | patch = False | |
891 | if opts.get('patch'): |
|
895 | if opts.get('patch'): | |
892 |
patch = matchfn or |
|
896 | patch = matchfn or matchall(repo) | |
893 |
|
897 | |||
894 | tmpl = opts.get('template') |
|
898 | tmpl = opts.get('template') | |
895 | mapfile = None |
|
899 | mapfile = None | |
@@ -922,7 +926,7 def show_changeset(ui, repo, opts, buffe | |||||
922 | def finddate(ui, repo, date): |
|
926 | def finddate(ui, repo, date): | |
923 | """Find the tipmost changeset that matches the given date spec""" |
|
927 | """Find the tipmost changeset that matches the given date spec""" | |
924 | df = util.matchdate(date) |
|
928 | df = util.matchdate(date) | |
925 |
get = util.cachefunc(lambda r: repo |
|
929 | get = util.cachefunc(lambda r: repo[r].changeset()) | |
926 | changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None}) |
|
930 | changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None}) | |
927 | results = {} |
|
931 | results = {} | |
928 | for st, rev, fns in changeiter: |
|
932 | for st, rev, fns in changeiter: | |
@@ -977,31 +981,31 def walkchangerevs(ui, repo, pats, chang | |||||
977 | if windowsize < sizelimit: |
|
981 | if windowsize < sizelimit: | |
978 | windowsize *= 2 |
|
982 | windowsize *= 2 | |
979 |
|
983 | |||
980 |
|
|
984 | m = match(repo, pats, opts) | |
981 | follow = opts.get('follow') or opts.get('follow_first') |
|
985 | follow = opts.get('follow') or opts.get('follow_first') | |
982 |
|
986 | |||
983 | if repo.changelog.count() == 0: |
|
987 | if not len(repo): | |
984 |
return [], m |
|
988 | return [], m | |
985 |
|
989 | |||
986 | if follow: |
|
990 | if follow: | |
987 |
defrange = '%s:0' % repo. |
|
991 | defrange = '%s:0' % repo['.'].rev() | |
988 | else: |
|
992 | else: | |
989 | defrange = '-1:0' |
|
993 | defrange = '-1:0' | |
990 | revs = revrange(repo, opts['rev'] or [defrange]) |
|
994 | revs = revrange(repo, opts['rev'] or [defrange]) | |
991 | wanted = {} |
|
995 | wanted = {} | |
992 | slowpath = anypats or opts.get('removed') |
|
996 | slowpath = m.anypats() or opts.get('removed') | |
993 | fncache = {} |
|
997 | fncache = {} | |
994 |
|
998 | |||
995 | if not slowpath and not files: |
|
999 | if not slowpath and not m.files(): | |
996 | # No files, no patterns. Display all revs. |
|
1000 | # No files, no patterns. Display all revs. | |
997 | wanted = dict.fromkeys(revs) |
|
1001 | wanted = dict.fromkeys(revs) | |
998 | copies = [] |
|
1002 | copies = [] | |
999 | if not slowpath: |
|
1003 | if not slowpath: | |
1000 | # Only files, no patterns. Check the history of each file. |
|
1004 | # Only files, no patterns. Check the history of each file. | |
1001 | def filerevgen(filelog, node): |
|
1005 | def filerevgen(filelog, node): | |
1002 |
cl_count = repo |
|
1006 | cl_count = len(repo) | |
1003 | if node is None: |
|
1007 | if node is None: | |
1004 |
last = filelog |
|
1008 | last = len(filelog) - 1 | |
1005 | else: |
|
1009 | else: | |
1006 | last = filelog.rev(node) |
|
1010 | last = filelog.rev(node) | |
1007 | for i, window in increasing_windows(last, nullrev): |
|
1011 | for i, window in increasing_windows(last, nullrev): | |
@@ -1017,14 +1021,14 def walkchangerevs(ui, repo, pats, chang | |||||
1017 | if rev[0] < cl_count: |
|
1021 | if rev[0] < cl_count: | |
1018 | yield rev |
|
1022 | yield rev | |
1019 | def iterfiles(): |
|
1023 | def iterfiles(): | |
1020 | for filename in files: |
|
1024 | for filename in m.files(): | |
1021 | yield filename, None |
|
1025 | yield filename, None | |
1022 | for filename_node in copies: |
|
1026 | for filename_node in copies: | |
1023 | yield filename_node |
|
1027 | yield filename_node | |
1024 | minrev, maxrev = min(revs), max(revs) |
|
1028 | minrev, maxrev = min(revs), max(revs) | |
1025 | for file_, node in iterfiles(): |
|
1029 | for file_, node in iterfiles(): | |
1026 | filelog = repo.file(file_) |
|
1030 | filelog = repo.file(file_) | |
1027 |
if filelog |
|
1031 | if not len(filelog): | |
1028 | if node is None: |
|
1032 | if node is None: | |
1029 | # A zero count may be a directory or deleted file, so |
|
1033 | # A zero count may be a directory or deleted file, so | |
1030 | # try to find matching entries on the slow path. |
|
1034 | # try to find matching entries on the slow path. | |
@@ -1050,13 +1054,12 def walkchangerevs(ui, repo, pats, chang | |||||
1050 |
|
1054 | |||
1051 | # The slow path checks files modified in every changeset. |
|
1055 | # The slow path checks files modified in every changeset. | |
1052 | def changerevgen(): |
|
1056 | def changerevgen(): | |
1053 |
for i, window in increasing_windows(repo |
|
1057 | for i, window in increasing_windows(len(repo) - 1, nullrev): | |
1054 | nullrev): |
|
|||
1055 | for j in xrange(i - window, i + 1): |
|
1058 | for j in xrange(i - window, i + 1): | |
1056 | yield j, change(j)[3] |
|
1059 | yield j, change(j)[3] | |
1057 |
|
1060 | |||
1058 | for rev, changefiles in changerevgen(): |
|
1061 | for rev, changefiles in changerevgen(): | |
1059 |
matches = filter(m |
|
1062 | matches = filter(m, changefiles) | |
1060 | if matches: |
|
1063 | if matches: | |
1061 | fncache[rev] = matches |
|
1064 | fncache[rev] = matches | |
1062 | wanted[rev] = 1 |
|
1065 | wanted[rev] = 1 | |
@@ -1109,7 +1112,7 def walkchangerevs(ui, repo, pats, chang | |||||
1109 | del wanted[x] |
|
1112 | del wanted[x] | |
1110 |
|
1113 | |||
1111 | def iterate(): |
|
1114 | def iterate(): | |
1112 | if follow and not files: |
|
1115 | if follow and not m.files(): | |
1113 | ff = followfilter(onlyfirst=opts.get('follow_first')) |
|
1116 | ff = followfilter(onlyfirst=opts.get('follow_first')) | |
1114 | def want(rev): |
|
1117 | def want(rev): | |
1115 | if ff.match(rev) and rev in wanted: |
|
1118 | if ff.match(rev) and rev in wanted: | |
@@ -1122,20 +1125,18 def walkchangerevs(ui, repo, pats, chang | |||||
1122 | for i, window in increasing_windows(0, len(revs)): |
|
1125 | for i, window in increasing_windows(0, len(revs)): | |
1123 | yield 'window', revs[0] < revs[-1], revs[-1] |
|
1126 | yield 'window', revs[0] < revs[-1], revs[-1] | |
1124 | nrevs = [rev for rev in revs[i:i+window] if want(rev)] |
|
1127 | nrevs = [rev for rev in revs[i:i+window] if want(rev)] | |
1125 |
|
|
1128 | for rev in util.sort(list(nrevs)): | |
1126 | srevs.sort() |
|
|||
1127 | for rev in srevs: |
|
|||
1128 | fns = fncache.get(rev) |
|
1129 | fns = fncache.get(rev) | |
1129 | if not fns: |
|
1130 | if not fns: | |
1130 | def fns_generator(): |
|
1131 | def fns_generator(): | |
1131 | for f in change(rev)[3]: |
|
1132 | for f in change(rev)[3]: | |
1132 |
if m |
|
1133 | if m(f): | |
1133 | yield f |
|
1134 | yield f | |
1134 | fns = fns_generator() |
|
1135 | fns = fns_generator() | |
1135 | yield 'add', rev, fns |
|
1136 | yield 'add', rev, fns | |
1136 | for rev in nrevs: |
|
1137 | for rev in nrevs: | |
1137 | yield 'iter', rev, None |
|
1138 | yield 'iter', rev, None | |
1138 |
return iterate(), m |
|
1139 | return iterate(), m | |
1139 |
|
1140 | |||
1140 | def commit(ui, repo, commitfunc, pats, opts): |
|
1141 | def commit(ui, repo, commitfunc, pats, opts): | |
1141 | '''commit the specified files or all outstanding changes''' |
|
1142 | '''commit the specified files or all outstanding changes''' | |
@@ -1149,13 +1150,12 def commit(ui, repo, commitfunc, pats, o | |||||
1149 | if opts.get('addremove'): |
|
1150 | if opts.get('addremove'): | |
1150 | addremove(repo, pats, opts) |
|
1151 | addremove(repo, pats, opts) | |
1151 |
|
1152 | |||
1152 |
|
|
1153 | m = match(repo, pats, opts) | |
1153 | if pats: |
|
1154 | if pats: | |
1154 | status = repo.status(files=fns, match=match) |
|
1155 | modified, added, removed = repo.status(match=m)[:3] | |
1155 | modified, added, removed, deleted, unknown = status[:5] |
|
1156 | files = util.sort(modified + added + removed) | |
1156 | files = modified + added + removed |
|
|||
1157 | slist = None |
|
1157 | slist = None | |
1158 |
for f in |
|
1158 | for f in m.files(): | |
1159 | if f == '.': |
|
1159 | if f == '.': | |
1160 | continue |
|
1160 | continue | |
1161 | if f not in files: |
|
1161 | if f not in files: | |
@@ -1167,11 +1167,8 def commit(ui, repo, commitfunc, pats, o | |||||
1167 | raise util.Abort(_("file %s not found!") % rel) |
|
1167 | raise util.Abort(_("file %s not found!") % rel) | |
1168 | if stat.S_ISDIR(mode): |
|
1168 | if stat.S_ISDIR(mode): | |
1169 | name = f + '/' |
|
1169 | name = f + '/' | |
1170 |
i |
|
1170 | i = bisect.bisect(files, name) | |
1171 | slist = list(files) |
|
1171 | if i >= len(files) or not files[i].startswith(name): | |
1172 | slist.sort() |
|
|||
1173 | i = bisect.bisect(slist, name) |
|
|||
1174 | if i >= len(slist) or not slist[i].startswith(name): |
|
|||
1175 | raise util.Abort(_("no match under directory %s!") |
|
1172 | raise util.Abort(_("no match under directory %s!") | |
1176 | % rel) |
|
1173 | % rel) | |
1177 | elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)): |
|
1174 | elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)): | |
@@ -1179,9 +1176,8 def commit(ui, repo, commitfunc, pats, o | |||||
1179 | "unsupported file type!") % rel) |
|
1176 | "unsupported file type!") % rel) | |
1180 | elif f not in repo.dirstate: |
|
1177 | elif f not in repo.dirstate: | |
1181 | raise util.Abort(_("file %s not tracked!") % rel) |
|
1178 | raise util.Abort(_("file %s not tracked!") % rel) | |
1182 | else: |
|
1179 | m = matchfiles(repo, files) | |
1183 | files = [] |
|
|||
1184 | try: |
|
1180 | try: | |
1185 |
return commitfunc(ui, repo, |
|
1181 | return commitfunc(ui, repo, message, m, opts) | |
1186 | except ValueError, inst: |
|
1182 | except ValueError, inst: | |
1187 | raise util.Abort(str(inst)) |
|
1183 | raise util.Abort(str(inst)) |
This diff has been collapsed as it changes many lines, (567 lines changed) Show them Hide them | |||||
@@ -13,6 +13,7 import hg, util, revlog, bundlerepo, ext | |||||
13 | import difflib, patch, time, help, mdiff, tempfile |
|
13 | import difflib, patch, time, help, mdiff, tempfile | |
14 | import version, socket |
|
14 | import version, socket | |
15 | import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect |
|
15 | import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect | |
|
16 | import merge as merge_ | |||
16 |
|
17 | |||
17 | # Commands start here, listed alphabetically |
|
18 | # Commands start here, listed alphabetically | |
18 |
|
19 | |||
@@ -30,15 +31,16 def add(ui, repo, *pats, **opts): | |||||
30 | rejected = None |
|
31 | rejected = None | |
31 | exacts = {} |
|
32 | exacts = {} | |
32 | names = [] |
|
33 | names = [] | |
33 |
|
|
34 | m = cmdutil.match(repo, pats, opts) | |
34 | badmatch=util.always): |
|
35 | m.bad = lambda x,y: True | |
35 | if exact: |
|
36 | for abs in repo.walk(m): | |
|
37 | if m.exact(abs): | |||
36 | if ui.verbose: |
|
38 | if ui.verbose: | |
37 | ui.status(_('adding %s\n') % rel) |
|
39 | ui.status(_('adding %s\n') % m.rel(abs)) | |
38 | names.append(abs) |
|
40 | names.append(abs) | |
39 | exacts[abs] = 1 |
|
41 | exacts[abs] = 1 | |
40 | elif abs not in repo.dirstate: |
|
42 | elif abs not in repo.dirstate: | |
41 | ui.status(_('adding %s\n') % rel) |
|
43 | ui.status(_('adding %s\n') % m.rel(abs)) | |
42 | names.append(abs) |
|
44 | names.append(abs) | |
43 | if not opts.get('dry_run'): |
|
45 | if not opts.get('dry_run'): | |
44 | rejected = repo.add(names) |
|
46 | rejected = repo.add(names) | |
@@ -105,13 +107,13 def annotate(ui, repo, *pats, **opts): | |||||
105 | lastfunc = funcmap[-1] |
|
107 | lastfunc = funcmap[-1] | |
106 | funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1]) |
|
108 | funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1]) | |
107 |
|
109 | |||
108 |
ctx = repo |
|
110 | ctx = repo[opts['rev']] | |
109 |
|
111 | |||
110 |
|
|
112 | m = cmdutil.match(repo, pats, opts) | |
111 | node=ctx.node()): |
|
113 | for abs in ctx.walk(m): | |
112 |
fctx = ctx |
|
114 | fctx = ctx[abs] | |
113 | if not opts['text'] and util.binary(fctx.data()): |
|
115 | if not opts['text'] and util.binary(fctx.data()): | |
114 | ui.write(_("%s: binary file\n") % ((pats and rel) or abs)) |
|
116 | ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs)) | |
115 | continue |
|
117 | continue | |
116 |
|
118 | |||
117 | lines = fctx.annotate(follow=opts.get('follow'), |
|
119 | lines = fctx.annotate(follow=opts.get('follow'), | |
@@ -152,14 +154,14 def archive(ui, repo, dest, **opts): | |||||
152 | The default is the basename of the archive, with suffixes removed. |
|
154 | The default is the basename of the archive, with suffixes removed. | |
153 | ''' |
|
155 | ''' | |
154 |
|
156 | |||
155 |
ctx = repo |
|
157 | ctx = repo[opts['rev']] | |
156 | if not ctx: |
|
158 | if not ctx: | |
157 | raise util.Abort(_('repository has no revisions')) |
|
159 | raise util.Abort(_('repository has no revisions')) | |
158 | node = ctx.node() |
|
160 | node = ctx.node() | |
159 | dest = cmdutil.make_filename(repo, dest, node) |
|
161 | dest = cmdutil.make_filename(repo, dest, node) | |
160 | if os.path.realpath(dest) == repo.root: |
|
162 | if os.path.realpath(dest) == repo.root: | |
161 | raise util.Abort(_('repository root cannot be destination')) |
|
163 | raise util.Abort(_('repository root cannot be destination')) | |
162 |
|
|
164 | matchfn = cmdutil.match(repo, [], opts) | |
163 | kind = opts.get('type') or 'files' |
|
165 | kind = opts.get('type') or 'files' | |
164 | prefix = opts['prefix'] |
|
166 | prefix = opts['prefix'] | |
165 | if dest == '-': |
|
167 | if dest == '-': | |
@@ -187,7 +189,7 def backout(ui, repo, node=None, rev=Non | |||||
187 |
hand. |
|
189 | hand. The result of this merge is not committed, as for a normal | |
188 | merge. |
|
190 | merge. | |
189 |
|
191 | |||
190 | See 'hg help dates' for a list of formats valid for -d/--date. |
|
192 | See \'hg help dates\' for a list of formats valid for -d/--date. | |
191 | ''' |
|
193 | ''' | |
192 | if rev and node: |
|
194 | if rev and node: | |
193 | raise util.Abort(_("please specify just one revision")) |
|
195 | raise util.Abort(_("please specify just one revision")) | |
@@ -368,7 +370,7 def branch(ui, repo, label=None, **opts) | |||||
368 |
|
370 | |||
369 | if label: |
|
371 | if label: | |
370 | if not opts.get('force') and label in repo.branchtags(): |
|
372 | if not opts.get('force') and label in repo.branchtags(): | |
371 |
if label not in [p.branch() for p in repo. |
|
373 | if label not in [p.branch() for p in repo.parents()]: | |
372 | raise util.Abort(_('a branch of the same name already exists' |
|
374 | raise util.Abort(_('a branch of the same name already exists' | |
373 | ' (use --force to override)')) |
|
375 | ' (use --force to override)')) | |
374 | repo.dirstate.setbranch(util.fromlocal(label)) |
|
376 | repo.dirstate.setbranch(util.fromlocal(label)) | |
@@ -387,11 +389,10 def branches(ui, repo, active=False): | |||||
387 | Use the command 'hg update' to switch to an existing branch. |
|
389 | Use the command 'hg update' to switch to an existing branch. | |
388 | """ |
|
390 | """ | |
389 | hexfunc = ui.debugflag and hex or short |
|
391 | hexfunc = ui.debugflag and hex or short | |
390 |
activebranches = [util.tolocal(repo |
|
392 | activebranches = [util.tolocal(repo[n].branch()) | |
391 | for n in repo.heads()] |
|
393 | for n in repo.heads()] | |
392 | branches = [(tag in activebranches, repo.changelog.rev(node), tag) |
|
394 | branches = util.sort([(tag in activebranches, repo.changelog.rev(node), tag) | |
393 |
|
|
395 | for tag, node in repo.branchtags().items()]) | |
394 | branches.sort() |
|
|||
395 | branches.reverse() |
|
396 | branches.reverse() | |
396 |
|
397 | |||
397 | for isactive, node, tag in branches: |
|
398 | for isactive, node, tag in branches: | |
@@ -413,7 +414,8 def bundle(ui, repo, fname, dest=None, * | |||||
413 | If no destination repository is specified the destination is |
|
414 | If no destination repository is specified the destination is | |
414 | assumed to have all the nodes specified by one or more --base |
|
415 | assumed to have all the nodes specified by one or more --base | |
415 |
parameters. |
|
416 | parameters. To create a bundle containing all changesets, use | |
416 | --all (or --base null). |
|
417 | --all (or --base null). To change the compression method applied, | |
|
418 | use the -t option (by default, bundles are compressed using bz2). | |||
417 |
|
419 | |||
418 | The bundle file can then be transferred using conventional means and |
|
420 | The bundle file can then be transferred using conventional means and | |
419 | applied to another repository with the unbundle or pull command. |
|
421 | applied to another repository with the unbundle or pull command. | |
@@ -467,7 +469,14 def bundle(ui, repo, fname, dest=None, * | |||||
467 | cg = repo.changegroupsubset(o, revs, 'bundle') |
|
469 | cg = repo.changegroupsubset(o, revs, 'bundle') | |
468 | else: |
|
470 | else: | |
469 | cg = repo.changegroup(o, 'bundle') |
|
471 | cg = repo.changegroup(o, 'bundle') | |
470 | changegroup.writebundle(cg, fname, "HG10BZ") |
|
472 | ||
|
473 | bundletype = opts.get('type', 'bzip2').lower() | |||
|
474 | btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'} | |||
|
475 | bundletype = btypes.get(bundletype) | |||
|
476 | if bundletype not in changegroup.bundletypes: | |||
|
477 | raise util.Abort(_('unknown bundle type specified with --type')) | |||
|
478 | ||||
|
479 | changegroup.writebundle(cg, fname, bundletype) | |||
471 |
|
480 | |||
472 | def cat(ui, repo, file1, *pats, **opts): |
|
481 | def cat(ui, repo, file1, *pats, **opts): | |
473 | """output the current or given revision of files |
|
482 | """output the current or given revision of files | |
@@ -484,12 +493,12 def cat(ui, repo, file1, *pats, **opts): | |||||
484 | %d dirname of file being printed, or '.' if in repo root |
|
493 | %d dirname of file being printed, or '.' if in repo root | |
485 | %p root-relative path name of file being printed |
|
494 | %p root-relative path name of file being printed | |
486 | """ |
|
495 | """ | |
487 |
ctx = repo |
|
496 | ctx = repo[opts['rev']] | |
488 | err = 1 |
|
497 | err = 1 | |
489 |
|
|
498 | m = cmdutil.match(repo, (file1,) + pats, opts) | |
490 | ctx.node()): |
|
499 | for abs in ctx.walk(m): | |
491 | fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs) |
|
500 | fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs) | |
492 |
data = ctx |
|
501 | data = ctx[abs].data() | |
493 | if opts.get('decode'): |
|
502 | if opts.get('decode'): | |
494 | data = repo.wwritedata(abs, data) |
|
503 | data = repo.wwritedata(abs, data) | |
495 | fp.write(data) |
|
504 | fp.write(data) | |
@@ -514,15 +523,17 def clone(ui, source, dest=None, **opts) | |||||
514 |
do not report errors. |
|
523 | do not report errors. In these cases, use the --pull option to | |
515 | avoid hardlinking. |
|
524 | avoid hardlinking. | |
516 |
|
525 | |||
517 |
|
|
526 | In some cases, you can clone repositories and checked out files | |
518 | hardlinks with |
|
527 | using full hardlinks with | |
519 |
|
528 | |||
520 | $ cp -al REPO REPOCLONE |
|
529 | $ cp -al REPO REPOCLONE | |
521 |
|
530 | |||
522 |
|
|
531 | This is the fastest way to clone, but it is not always safe. The | |
523 |
atomic (making sure REPO is not modified during |
|
532 | operation is not atomic (making sure REPO is not modified during | |
524 |
up to you) and you have to make sure your editor |
|
533 | the operation is up to you) and you have to make sure your editor | |
525 | (Emacs and most Linux Kernel tools do so). |
|
534 | breaks hardlinks (Emacs and most Linux Kernel tools do so). Also, | |
|
535 | this is not compatible with certain extensions that place their | |||
|
536 | metadata under the .hg directory, such as mq. | |||
526 |
|
537 | |||
527 | If you use the -r option to clone up to a specific revision, no |
|
538 | If you use the -r option to clone up to a specific revision, no | |
528 | subsequent revisions will be present in the cloned repository. |
|
539 | subsequent revisions will be present in the cloned repository. | |
@@ -561,9 +572,9 def commit(ui, repo, *pats, **opts): | |||||
561 |
|
572 | |||
562 | See 'hg help dates' for a list of formats valid for -d/--date. |
|
573 | See 'hg help dates' for a list of formats valid for -d/--date. | |
563 | """ |
|
574 | """ | |
564 |
def commitfunc(ui, repo, |
|
575 | def commitfunc(ui, repo, message, match, opts): | |
565 |
return repo.commit(files, message, opts['user'], opts['date'], |
|
576 | return repo.commit(match.files(), message, opts['user'], opts['date'], | |
566 | force_editor=opts.get('force_editor')) |
|
577 | match, force_editor=opts.get('force_editor')) | |
567 |
|
578 | |||
568 | node = cmdutil.commit(ui, repo, commitfunc, pats, opts) |
|
579 | node = cmdutil.commit(ui, repo, commitfunc, pats, opts) | |
569 | if not node: |
|
580 | if not node: | |
@@ -634,35 +645,30 def debugcomplete(ui, cmd='', **opts): | |||||
634 | ui.write("%s\n" % "\n".join(options)) |
|
645 | ui.write("%s\n" % "\n".join(options)) | |
635 | return |
|
646 | return | |
636 |
|
647 | |||
637 |
|
|
648 | ui.write("%s\n" % "\n".join(util.sort(cmdutil.findpossible(ui, cmd, table)))) | |
638 | clist.sort() |
|
|||
639 | ui.write("%s\n" % "\n".join(clist)) |
|
|||
640 |
|
649 | |||
641 | def debugfsinfo(ui, path = "."): |
|
650 | def debugfsinfo(ui, path = "."): | |
642 | file('.debugfsinfo', 'w').write('') |
|
651 | file('.debugfsinfo', 'w').write('') | |
643 | ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no')) |
|
652 | ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no')) | |
644 | ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no')) |
|
653 | ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no')) | |
645 |
ui.write('case-sensitive: %s\n' % (util.check |
|
654 | ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo') | |
646 | and 'yes' or 'no')) |
|
655 | and 'yes' or 'no')) | |
647 | os.unlink('.debugfsinfo') |
|
656 | os.unlink('.debugfsinfo') | |
648 |
|
657 | |||
649 | def debugrebuildstate(ui, repo, rev=""): |
|
658 | def debugrebuildstate(ui, repo, rev="tip"): | |
650 | """rebuild the dirstate as it would look like for the given revision""" |
|
659 | """rebuild the dirstate as it would look like for the given revision""" | |
651 | if rev == "": |
|
660 | ctx = repo[rev] | |
652 | rev = repo.changelog.tip() |
|
|||
653 | ctx = repo.changectx(rev) |
|
|||
654 | files = ctx.manifest() |
|
|||
655 | wlock = repo.wlock() |
|
661 | wlock = repo.wlock() | |
656 | try: |
|
662 | try: | |
657 |
repo.dirstate.rebuild( |
|
663 | repo.dirstate.rebuild(ctx.node(), ctx.manifest()) | |
658 | finally: |
|
664 | finally: | |
659 | del wlock |
|
665 | del wlock | |
660 |
|
666 | |||
661 | def debugcheckstate(ui, repo): |
|
667 | def debugcheckstate(ui, repo): | |
662 | """validate the correctness of the current dirstate""" |
|
668 | """validate the correctness of the current dirstate""" | |
663 | parent1, parent2 = repo.dirstate.parents() |
|
669 | parent1, parent2 = repo.dirstate.parents() | |
664 |
m1 = repo |
|
670 | m1 = repo[parent1].manifest() | |
665 |
m2 = repo |
|
671 | m2 = repo[parent2].manifest() | |
666 | errors = 0 |
|
672 | errors = 0 | |
667 | for f in repo.dirstate: |
|
673 | for f in repo.dirstate: | |
668 | state = repo.dirstate[f] |
|
674 | state = repo.dirstate[f] | |
@@ -729,11 +735,9 def debugsetparents(ui, repo, rev1, rev2 | |||||
729 |
|
735 | |||
730 | def debugstate(ui, repo, nodates=None): |
|
736 | def debugstate(ui, repo, nodates=None): | |
731 | """show the contents of the current dirstate""" |
|
737 | """show the contents of the current dirstate""" | |
732 | k = repo.dirstate._map.items() |
|
|||
733 | k.sort() |
|
|||
734 | timestr = "" |
|
738 | timestr = "" | |
735 | showdate = not nodates |
|
739 | showdate = not nodates | |
736 | for file_, ent in k: |
|
740 | for file_, ent in util.sort(repo.dirstate._map.items()): | |
737 | if showdate: |
|
741 | if showdate: | |
738 | if ent[3] == -1: |
|
742 | if ent[3] == -1: | |
739 | # Pad or slice to locale representation |
|
743 | # Pad or slice to locale representation | |
@@ -775,7 +779,7 def debugindex(ui, file_): | |||||
775 | r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_) |
|
779 | r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_) | |
776 | ui.write(" rev offset length base linkrev" + |
|
780 | ui.write(" rev offset length base linkrev" + | |
777 | " nodeid p1 p2\n") |
|
781 | " nodeid p1 p2\n") | |
778 |
for i in |
|
782 | for i in r: | |
779 | node = r.node(i) |
|
783 | node = r.node(i) | |
780 | try: |
|
784 | try: | |
781 | pp = r.parents(node) |
|
785 | pp = r.parents(node) | |
@@ -789,7 +793,7 def debugindexdot(ui, file_): | |||||
789 | """dump an index DAG as a .dot file""" |
|
793 | """dump an index DAG as a .dot file""" | |
790 | r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_) |
|
794 | r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_) | |
791 | ui.write("digraph G {\n") |
|
795 | ui.write("digraph G {\n") | |
792 |
for i in |
|
796 | for i in r: | |
793 | node = r.node(i) |
|
797 | node = r.node(i) | |
794 | pp = r.parents(node) |
|
798 | pp = r.parents(node) | |
795 | ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i)) |
|
799 | ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i)) | |
@@ -912,26 +916,28 def debuginstall(ui): | |||||
912 | def debugrename(ui, repo, file1, *pats, **opts): |
|
916 | def debugrename(ui, repo, file1, *pats, **opts): | |
913 | """dump rename information""" |
|
917 | """dump rename information""" | |
914 |
|
918 | |||
915 |
ctx = repo |
|
919 | ctx = repo[opts.get('rev')] | |
916 |
|
|
920 | m = cmdutil.match(repo, (file1,) + pats, opts) | |
917 | ctx.node()): |
|
921 | for abs in ctx.walk(m): | |
918 |
fctx = ctx |
|
922 | fctx = ctx[abs] | |
919 |
|
|
923 | o = fctx.filelog().renamed(fctx.filenode()) | |
920 | if m: |
|
924 | rel = m.rel(abs) | |
921 | ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1]))) |
|
925 | if o: | |
|
926 | ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1]))) | |||
922 | else: |
|
927 | else: | |
923 | ui.write(_("%s not renamed\n") % rel) |
|
928 | ui.write(_("%s not renamed\n") % rel) | |
924 |
|
929 | |||
925 | def debugwalk(ui, repo, *pats, **opts): |
|
930 | def debugwalk(ui, repo, *pats, **opts): | |
926 | """show how files match on given patterns""" |
|
931 | """show how files match on given patterns""" | |
927 |
|
|
932 | m = cmdutil.match(repo, pats, opts) | |
|
933 | items = list(repo.walk(m)) | |||
928 | if not items: |
|
934 | if not items: | |
929 | return |
|
935 | return | |
930 |
fmt = ' |
|
936 | fmt = 'f %%-%ds %%-%ds %%s' % ( | |
931 |
max([len(abs) for |
|
937 | max([len(abs) for abs in items]), | |
932 |
max([len(rel) for |
|
938 | max([len(m.rel(abs)) for abs in items])) | |
933 |
for |
|
939 | for abs in items: | |
934 |
line = fmt % ( |
|
940 | line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '') | |
935 | ui.write("%s\n" % line.rstrip()) |
|
941 | ui.write("%s\n" % line.rstrip()) | |
936 |
|
942 | |||
937 | def diff(ui, repo, *pats, **opts): |
|
943 | def diff(ui, repo, *pats, **opts): | |
@@ -957,10 +963,8 def diff(ui, repo, *pats, **opts): | |||||
957 | """ |
|
963 | """ | |
958 | node1, node2 = cmdutil.revpair(repo, opts['rev']) |
|
964 | node1, node2 = cmdutil.revpair(repo, opts['rev']) | |
959 |
|
965 | |||
960 |
|
|
966 | m = cmdutil.match(repo, pats, opts) | |
961 |
|
967 | patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts)) | ||
962 | patch.diff(repo, node1, node2, fns, match=matchfn, |
|
|||
963 | opts=patch.diffopts(ui, opts)) |
|
|||
964 |
|
968 | |||
965 | def export(ui, repo, *changesets, **opts): |
|
969 | def export(ui, repo, *changesets, **opts): | |
966 | """dump the header and diffs for one or more changesets |
|
970 | """dump the header and diffs for one or more changesets | |
@@ -1058,6 +1062,9 def grep(ui, repo, pattern, *pats, **opt | |||||
1058 | self.colstart = colstart |
|
1062 | self.colstart = colstart | |
1059 | self.colend = colend |
|
1063 | self.colend = colend | |
1060 |
|
1064 | |||
|
1065 | def __hash__(self): | |||
|
1066 | return hash((self.linenum, self.line)) | |||
|
1067 | ||||
1061 | def __eq__(self, other): |
|
1068 | def __eq__(self, other): | |
1062 | return self.line == other.line |
|
1069 | return self.line == other.line | |
1063 |
|
1070 | |||
@@ -1118,7 +1125,7 def grep(ui, repo, pattern, *pats, **opt | |||||
1118 |
|
1125 | |||
1119 | fstate = {} |
|
1126 | fstate = {} | |
1120 | skip = {} |
|
1127 | skip = {} | |
1121 |
get = util.cachefunc(lambda r: repo |
|
1128 | get = util.cachefunc(lambda r: repo[r].changeset()) | |
1122 | changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts) |
|
1129 | changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts) | |
1123 | found = False |
|
1130 | found = False | |
1124 | follow = opts.get('follow') |
|
1131 | follow = opts.get('follow') | |
@@ -1126,7 +1133,7 def grep(ui, repo, pattern, *pats, **opt | |||||
1126 | if st == 'window': |
|
1133 | if st == 'window': | |
1127 | matches.clear() |
|
1134 | matches.clear() | |
1128 | elif st == 'add': |
|
1135 | elif st == 'add': | |
1129 |
ctx = repo |
|
1136 | ctx = repo[rev] | |
1130 | matches[rev] = {} |
|
1137 | matches[rev] = {} | |
1131 | for fn in fns: |
|
1138 | for fn in fns: | |
1132 | if fn in skip: |
|
1139 | if fn in skip: | |
@@ -1141,9 +1148,7 def grep(ui, repo, pattern, *pats, **opt | |||||
1141 | except revlog.LookupError: |
|
1148 | except revlog.LookupError: | |
1142 | pass |
|
1149 | pass | |
1143 | elif st == 'iter': |
|
1150 | elif st == 'iter': | |
1144 |
|
|
1151 | for fn, m in util.sort(matches[rev].items()): | |
1145 | states.sort() |
|
|||
1146 | for fn, m in states: |
|
|||
1147 | copy = copies.get(rev, {}).get(fn) |
|
1152 | copy = copies.get(rev, {}).get(fn) | |
1148 | if fn in skip: |
|
1153 | if fn in skip: | |
1149 | if copy: |
|
1154 | if copy: | |
@@ -1161,9 +1166,7 def grep(ui, repo, pattern, *pats, **opt | |||||
1161 | fstate[copy] = m |
|
1166 | fstate[copy] = m | |
1162 | prev[fn] = rev |
|
1167 | prev[fn] = rev | |
1163 |
|
1168 | |||
1164 |
f |
|
1169 | for fn, state in util.sort(fstate.items()): | |
1165 | fstate.sort() |
|
|||
1166 | for fn, state in fstate: |
|
|||
1167 | if fn in skip: |
|
1170 | if fn in skip: | |
1168 | continue |
|
1171 | continue | |
1169 | if fn not in copies.get(prev[fn], {}): |
|
1172 | if fn not in copies.get(prev[fn], {}): | |
@@ -1198,7 +1201,7 def heads(ui, repo, *branchrevs, **opts) | |||||
1198 | heads = [] |
|
1201 | heads = [] | |
1199 | visitedset = util.set() |
|
1202 | visitedset = util.set() | |
1200 | for branchrev in branchrevs: |
|
1203 | for branchrev in branchrevs: | |
1201 |
branch = repo |
|
1204 | branch = repo[branchrev].branch() | |
1202 | if branch in visitedset: |
|
1205 | if branch in visitedset: | |
1203 | continue |
|
1206 | continue | |
1204 | visitedset.add(branch) |
|
1207 | visitedset.add(branch) | |
@@ -1250,7 +1253,14 def help_(ui, name=None, with_version=Fa | |||||
1250 | if with_version: |
|
1253 | if with_version: | |
1251 | version_(ui) |
|
1254 | version_(ui) | |
1252 | ui.write('\n') |
|
1255 | ui.write('\n') | |
|
1256 | ||||
|
1257 | try: | |||
1253 | aliases, i = cmdutil.findcmd(ui, name, table) |
|
1258 | aliases, i = cmdutil.findcmd(ui, name, table) | |
|
1259 | except cmdutil.AmbiguousCommand, inst: | |||
|
1260 | select = lambda c: c.lstrip('^').startswith(inst.args[0]) | |||
|
1261 | helplist(_('list of commands:\n\n'), select) | |||
|
1262 | return | |||
|
1263 | ||||
1254 | # synopsis |
|
1264 | # synopsis | |
1255 | ui.write("%s\n" % i[2]) |
|
1265 | ui.write("%s\n" % i[2]) | |
1256 |
|
1266 | |||
@@ -1296,8 +1306,7 def help_(ui, name=None, with_version=Fa | |||||
1296 | return |
|
1306 | return | |
1297 |
|
1307 | |||
1298 | ui.status(header) |
|
1308 | ui.status(header) | |
1299 |
fns = h |
|
1309 | fns = util.sort(h) | |
1300 | fns.sort() |
|
|||
1301 | m = max(map(len, fns)) |
|
1310 | m = max(map(len, fns)) | |
1302 | for f in fns: |
|
1311 | for f in fns: | |
1303 | if ui.verbose: |
|
1312 | if ui.verbose: | |
@@ -1311,16 +1320,16 def help_(ui, name=None, with_version=Fa | |||||
1311 |
|
1320 | |||
1312 | def helptopic(name): |
|
1321 | def helptopic(name): | |
1313 | v = None |
|
1322 | v = None | |
1314 | for i in help.helptable: |
|
1323 | for i, d in help.helptable: | |
1315 | l = i.split('|') |
|
1324 | l = i.split('|') | |
1316 | if name in l: |
|
1325 | if name in l: | |
1317 | v = i |
|
1326 | v = i | |
1318 | header = l[-1] |
|
1327 | header = l[-1] | |
|
1328 | doc = d | |||
1319 | if not v: |
|
1329 | if not v: | |
1320 | raise cmdutil.UnknownCommand(name) |
|
1330 | raise cmdutil.UnknownCommand(name) | |
1321 |
|
1331 | |||
1322 | # description |
|
1332 | # description | |
1323 | doc = help.helptable[v] |
|
|||
1324 | if not doc: |
|
1333 | if not doc: | |
1325 | doc = _("(No help text available)") |
|
1334 | doc = _("(No help text available)") | |
1326 | if callable(doc): |
|
1335 | if callable(doc): | |
@@ -1391,6 +1400,16 def help_(ui, name=None, with_version=Fa | |||||
1391 | and _(" (default: %s)") % default |
|
1400 | and _(" (default: %s)") % default | |
1392 | or ""))) |
|
1401 | or ""))) | |
1393 |
|
1402 | |||
|
1403 | if ui.verbose: | |||
|
1404 | ui.write(_("\nspecial help topics:\n")) | |||
|
1405 | topics = [] | |||
|
1406 | for i, d in help.helptable: | |||
|
1407 | l = i.split('|') | |||
|
1408 | topics.append((", ".join(l[:-1]), l[-1])) | |||
|
1409 | topics_len = max([len(s[0]) for s in topics]) | |||
|
1410 | for t, desc in topics: | |||
|
1411 | ui.write(" %-*s %s\n" % (topics_len, t, desc)) | |||
|
1412 | ||||
1394 | if opt_output: |
|
1413 | if opt_output: | |
1395 | opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0]) |
|
1414 | opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0]) | |
1396 | for first, second in opt_output: |
|
1415 | for first, second in opt_output: | |
@@ -1433,7 +1452,7 def identify(ui, repo, source=None, | |||||
1433 | "can't query remote revision number, branch, or tags") |
|
1452 | "can't query remote revision number, branch, or tags") | |
1434 | output = [hexfunc(srepo.lookup(rev))] |
|
1453 | output = [hexfunc(srepo.lookup(rev))] | |
1435 | elif not rev: |
|
1454 | elif not rev: | |
1436 |
ctx = repo |
|
1455 | ctx = repo[None] | |
1437 | parents = ctx.parents() |
|
1456 | parents = ctx.parents() | |
1438 | changed = False |
|
1457 | changed = False | |
1439 | if default or id or num: |
|
1458 | if default or id or num: | |
@@ -1445,7 +1464,7 def identify(ui, repo, source=None, | |||||
1445 | output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]), |
|
1464 | output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]), | |
1446 | (changed) and "+" or "")) |
|
1465 | (changed) and "+" or "")) | |
1447 | else: |
|
1466 | else: | |
1448 |
ctx = repo |
|
1467 | ctx = repo[rev] | |
1449 | if default or id: |
|
1468 | if default or id: | |
1450 | output = [hexfunc(ctx.node())] |
|
1469 | output = [hexfunc(ctx.node())] | |
1451 | if num: |
|
1470 | if num: | |
@@ -1542,7 +1561,7 def import_(ui, repo, patch1, *patches, | |||||
1542 | message = None |
|
1561 | message = None | |
1543 | ui.debug(_('message:\n%s\n') % message) |
|
1562 | ui.debug(_('message:\n%s\n') % message) | |
1544 |
|
1563 | |||
1545 |
wp = repo. |
|
1564 | wp = repo.parents() | |
1546 | if opts.get('exact'): |
|
1565 | if opts.get('exact'): | |
1547 | if not nodeid or not p1: |
|
1566 | if not nodeid or not p1: | |
1548 | raise util.Abort(_('not a mercurial patch')) |
|
1567 | raise util.Abort(_('not a mercurial patch')) | |
@@ -1685,24 +1704,18 def locate(ui, repo, *pats, **opts): | |||||
1685 | that contain white space as multiple filenames. |
|
1704 | that contain white space as multiple filenames. | |
1686 | """ |
|
1705 | """ | |
1687 | end = opts['print0'] and '\0' or '\n' |
|
1706 | end = opts['print0'] and '\0' or '\n' | |
1688 |
rev = opts |
|
1707 | rev = opts.get('rev') or None | |
1689 | if rev: |
|
|||
1690 | node = repo.lookup(rev) |
|
|||
1691 | else: |
|
|||
1692 | node = None |
|
|||
1693 |
|
1708 | |||
1694 | ret = 1 |
|
1709 | ret = 1 | |
1695 | for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node, |
|
1710 | m = cmdutil.match(repo, pats, opts, default='relglob') | |
1696 | badmatch=util.always, |
|
1711 | m.bad = lambda x,y: False | |
1697 | default='relglob'): |
|
1712 | for abs in repo[rev].walk(m): | |
1698 | if src == 'b': |
|
1713 | if not rev and abs not in repo.dirstate: | |
1699 | continue |
|
|||
1700 | if not node and abs not in repo.dirstate: |
|
|||
1701 | continue |
|
1714 | continue | |
1702 | if opts['fullpath']: |
|
1715 | if opts['fullpath']: | |
1703 | ui.write(os.path.join(repo.root, abs), end) |
|
1716 | ui.write(os.path.join(repo.root, abs), end) | |
1704 | else: |
|
1717 | else: | |
1705 | ui.write(((pats and rel) or abs), end) |
|
1718 | ui.write(((pats and m.rel(abs)) or abs), end) | |
1706 | ret = 0 |
|
1719 | ret = 0 | |
1707 |
|
1720 | |||
1708 | return ret |
|
1721 | return ret | |
@@ -1737,7 +1750,7 def log(ui, repo, *pats, **opts): | |||||
1737 |
|
1750 | |||
1738 | """ |
|
1751 | """ | |
1739 |
|
1752 | |||
1740 |
get = util.cachefunc(lambda r: repo |
|
1753 | get = util.cachefunc(lambda r: repo[r].changeset()) | |
1741 | changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts) |
|
1754 | changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts) | |
1742 |
|
1755 | |||
1743 | limit = cmdutil.loglimit(opts) |
|
1756 | limit = cmdutil.loglimit(opts) | |
@@ -1746,7 +1759,7 def log(ui, repo, *pats, **opts): | |||||
1746 | if opts['copies'] and opts['rev']: |
|
1759 | if opts['copies'] and opts['rev']: | |
1747 | endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1 |
|
1760 | endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1 | |
1748 | else: |
|
1761 | else: | |
1749 |
endrev = repo |
|
1762 | endrev = len(repo) | |
1750 | rcache = {} |
|
1763 | rcache = {} | |
1751 | ncache = {} |
|
1764 | ncache = {} | |
1752 | def getrenamed(fn, rev): |
|
1765 | def getrenamed(fn, rev): | |
@@ -1758,7 +1771,7 def log(ui, repo, *pats, **opts): | |||||
1758 | rcache[fn] = {} |
|
1771 | rcache[fn] = {} | |
1759 | ncache[fn] = {} |
|
1772 | ncache[fn] = {} | |
1760 | fl = repo.file(fn) |
|
1773 | fl = repo.file(fn) | |
1761 |
for i in |
|
1774 | for i in fl: | |
1762 | node = fl.node(i) |
|
1775 | node = fl.node(i) | |
1763 | lr = fl.linkrev(node) |
|
1776 | lr = fl.linkrev(node) | |
1764 | renamed = fl.renamed(node) |
|
1777 | renamed = fl.renamed(node) | |
@@ -1774,7 +1787,7 def log(ui, repo, *pats, **opts): | |||||
1774 | # filectx logic. |
|
1787 | # filectx logic. | |
1775 |
|
1788 | |||
1776 | try: |
|
1789 | try: | |
1777 |
return repo |
|
1790 | return repo[rev][fn].renamed() | |
1778 | except revlog.LookupError: |
|
1791 | except revlog.LookupError: | |
1779 | pass |
|
1792 | pass | |
1780 | return None |
|
1793 | return None | |
@@ -1850,17 +1863,13 def manifest(ui, repo, node=None, rev=No | |||||
1850 | if not node: |
|
1863 | if not node: | |
1851 | node = rev |
|
1864 | node = rev | |
1852 |
|
1865 | |||
1853 | m = repo.changectx(node).manifest() |
|
1866 | decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '} | |
1854 | files = m.keys() |
|
1867 | ctx = repo[node] | |
1855 | files.sort() |
|
1868 | for f in ctx: | |
1856 |
|
||||
1857 | for f in files: |
|
|||
1858 | if ui.debugflag: |
|
1869 | if ui.debugflag: | |
1859 | ui.write("%40s " % hex(m[f])) |
|
1870 | ui.write("%40s " % hex(ctx.manifest()[f])) | |
1860 | if ui.verbose: |
|
1871 | if ui.verbose: | |
1861 | type = m.execf(f) and "*" or m.linkf(f) and "@" or " " |
|
1872 | ui.write(decor[ctx.flags(f)]) | |
1862 | perm = m.execf(f) and "755" or "644" |
|
|||
1863 | ui.write("%3s %1s " % (perm, type)) |
|
|||
1864 | ui.write("%s\n" % f) |
|
1873 | ui.write("%s\n" % f) | |
1865 |
|
1874 | |||
1866 | def merge(ui, repo, node=None, force=None, rev=None): |
|
1875 | def merge(ui, repo, node=None, force=None, rev=None): | |
@@ -1872,7 +1881,7 def merge(ui, repo, node=None, force=Non | |||||
1872 | performed before any further updates are allowed. |
|
1881 | performed before any further updates are allowed. | |
1873 |
|
1882 | |||
1874 | If no revision is specified, the working directory's parent is a |
|
1883 | If no revision is specified, the working directory's parent is a | |
1875 |
head revision, and the |
|
1884 | head revision, and the current branch contains exactly one other head, | |
1876 |
the other head is merged with by default. |
|
1885 | the other head is merged with by default. Otherwise, an explicit | |
1877 | revision to merge with must be provided. |
|
1886 | revision to merge with must be provided. | |
1878 | """ |
|
1887 | """ | |
@@ -1883,22 +1892,28 def merge(ui, repo, node=None, force=Non | |||||
1883 | node = rev |
|
1892 | node = rev | |
1884 |
|
1893 | |||
1885 | if not node: |
|
1894 | if not node: | |
1886 | heads = repo.heads() |
|
1895 | branch = repo.changectx(None).branch() | |
1887 | if len(heads) > 2: |
|
1896 | bheads = repo.branchheads(branch) | |
1888 | raise util.Abort(_('repo has %d heads - ' |
|
1897 | if len(bheads) > 2: | |
1889 | 'please merge with an explicit rev') % |
|
1898 | raise util.Abort(_("branch '%s' has %d heads - " | |
1890 |
le |
|
1899 | "please merge with an explicit rev") % | |
|
1900 | (branch, len(bheads))) | |||
|
1901 | ||||
1891 | parent = repo.dirstate.parents()[0] |
|
1902 | parent = repo.dirstate.parents()[0] | |
1892 | if len(heads) == 1: |
|
1903 | if len(bheads) == 1: | |
|
1904 | if len(repo.heads()) > 1: | |||
|
1905 | raise util.Abort(_("branch '%s' has one head - " | |||
|
1906 | "please merge with an explicit rev") % | |||
|
1907 | branch) | |||
1893 | msg = _('there is nothing to merge') |
|
1908 | msg = _('there is nothing to merge') | |
1894 |
if parent != repo.lookup(repo |
|
1909 | if parent != repo.lookup(repo[None].branch()): | |
1895 | msg = _('%s - use "hg update" instead') % msg |
|
1910 | msg = _('%s - use "hg update" instead') % msg | |
1896 | raise util.Abort(msg) |
|
1911 | raise util.Abort(msg) | |
1897 |
|
1912 | |||
1898 | if parent not in heads: |
|
1913 | if parent not in bheads: | |
1899 | raise util.Abort(_('working dir not at a head rev - ' |
|
1914 | raise util.Abort(_('working dir not at a head rev - ' | |
1900 | 'use "hg update" or merge with an explicit rev')) |
|
1915 | 'use "hg update" or merge with an explicit rev')) | |
1901 | node = parent == heads[0] and heads[-1] or heads[0] |
|
1916 | node = parent == bheads[0] and bheads[-1] or bheads[0] | |
1902 | return hg.merge(repo, node, force=force) |
|
1917 | return hg.merge(repo, node, force=force) | |
1903 |
|
1918 | |||
1904 | def outgoing(ui, repo, dest=None, **opts): |
|
1919 | def outgoing(ui, repo, dest=None, **opts): | |
@@ -1948,15 +1963,15 def parents(ui, repo, file_=None, **opts | |||||
1948 | """ |
|
1963 | """ | |
1949 | rev = opts.get('rev') |
|
1964 | rev = opts.get('rev') | |
1950 | if rev: |
|
1965 | if rev: | |
1951 |
ctx = repo |
|
1966 | ctx = repo[rev] | |
1952 | else: |
|
1967 | else: | |
1953 |
ctx = repo |
|
1968 | ctx = repo[None] | |
1954 |
|
1969 | |||
1955 | if file_: |
|
1970 | if file_: | |
1956 |
|
|
1971 | m = cmdutil.match(repo, (file_,), opts) | |
1957 | if anypats or len(files) != 1: |
|
1972 | if m.anypats() or len(m.files()) != 1: | |
1958 | raise util.Abort(_('can only specify an explicit file name')) |
|
1973 | raise util.Abort(_('can only specify an explicit file name')) | |
1959 | file_ = files[0] |
|
1974 | file_ = m.files()[0] | |
1960 | filenodes = [] |
|
1975 | filenodes = [] | |
1961 | for cp in ctx.parents(): |
|
1976 | for cp in ctx.parents(): | |
1962 | if not cp: |
|
1977 | if not cp: | |
@@ -2022,9 +2037,9 def pull(ui, repo, source="default", **o | |||||
2022 | Valid URLs are of the form: |
|
2037 | Valid URLs are of the form: | |
2023 |
|
2038 | |||
2024 | local/filesystem/path (or file://local/filesystem/path) |
|
2039 | local/filesystem/path (or file://local/filesystem/path) | |
2025 | http://[user@]host[:port]/[path] |
|
2040 | http://[user[:pass]@]host[:port]/[path] | |
2026 | https://[user@]host[:port]/[path] |
|
2041 | https://[user[:pass]@]host[:port]/[path] | |
2027 | ssh://[user@]host[:port]/[path] |
|
2042 | ssh://[user[:pass]@]host[:port]/[path] | |
2028 | static-http://host[:port]/[path] |
|
2043 | static-http://host[:port]/[path] | |
2029 |
|
2044 | |||
2030 | Paths in the local filesystem can either point to Mercurial |
|
2045 | Paths in the local filesystem can either point to Mercurial | |
@@ -2084,9 +2099,9 def push(ui, repo, dest=None, **opts): | |||||
2084 | Valid URLs are of the form: |
|
2099 | Valid URLs are of the form: | |
2085 |
|
2100 | |||
2086 | local/filesystem/path (or file://local/filesystem/path) |
|
2101 | local/filesystem/path (or file://local/filesystem/path) | |
2087 | ssh://[user@]host[:port]/[path] |
|
2102 | ssh://[user[:pass]@]host[:port]/[path] | |
2088 | http://[user@]host[:port]/[path] |
|
2103 | http://[user[:pass]@]host[:port]/[path] | |
2089 | https://[user@]host[:port]/[path] |
|
2104 | https://[user[:pass]@]host[:port]/[path] | |
2090 |
|
2105 | |||
2091 | An optional identifier after # indicates a particular branch, tag, |
|
2106 | An optional identifier after # indicates a particular branch, tag, | |
2092 | or changeset to push. If -r is used, the named changeset and all its |
|
2107 | or changeset to push. If -r is used, the named changeset and all its | |
@@ -2126,7 +2141,7 def rawcommit(ui, repo, *pats, **opts): | |||||
2126 |
|
2141 | |||
2127 | message = cmdutil.logmessage(opts) |
|
2142 | message = cmdutil.logmessage(opts) | |
2128 |
|
2143 | |||
2129 |
files |
|
2144 | files = cmdutil.match(repo, pats, opts).files() | |
2130 | if opts['files']: |
|
2145 | if opts['files']: | |
2131 | files += open(opts['files']).read().splitlines() |
|
2146 | files += open(opts['files']).read().splitlines() | |
2132 |
|
2147 | |||
@@ -2178,47 +2193,28 def remove(ui, repo, *pats, **opts): | |||||
2178 | if not pats and not after: |
|
2193 | if not pats and not after: | |
2179 | raise util.Abort(_('no files specified')) |
|
2194 | raise util.Abort(_('no files specified')) | |
2180 |
|
2195 | |||
2181 |
|
|
2196 | m = cmdutil.match(repo, pats, opts) | |
2182 | mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5] |
|
2197 | s = repo.status(match=m, clean=True) | |
2183 | modified, added, removed, deleted, unknown = mardu |
|
2198 | modified, added, deleted, clean = s[0], s[1], s[3], s[6] | |
2184 |
|
2199 | |||
2185 | remove, forget = [], [] |
|
2200 | def warn(files, reason): | |
2186 | for src, abs, rel, exact in cmdutil.walk(repo, pats, opts): |
|
2201 | for f in files: | |
2187 |
|
2202 | ui.warn(_('not removing %s: file %s (use -f to force removal)\n') | ||
2188 | reason = None |
|
2203 | % (m.rel(f), reason)) | |
2189 | if abs in removed or abs in unknown: |
|
2204 | ||
2190 | continue |
|
2205 | if force: | |
2191 |
|
2206 | remove, forget = modified + deleted + clean, added | ||
2192 | # last column |
|
2207 | elif after: | |
2193 | elif abs in deleted: |
|
2208 | remove, forget = deleted, [] | |
2194 | remove.append(abs) |
|
2209 | warn(modified + added + clean, _('still exists')) | |
2195 |
|
||||
2196 | # rest of the third row |
|
|||
2197 | elif after and not force: |
|
|||
2198 | reason = _('still exists (use -f to force removal)') |
|
|||
2199 |
|
||||
2200 | # rest of the first column |
|
|||
2201 | elif abs in added: |
|
|||
2202 | if not force: |
|
|||
2203 | reason = _('has been marked for add (use -f to force removal)') |
|
|||
2204 |
|
|
2210 | else: | |
2205 | forget.append(abs) |
|
2211 | remove, forget = deleted + clean, [] | |
2206 |
|
2212 | warn(modified, _('is modified')) | ||
2207 | # rest of the third column |
|
2213 | warn(added, _('has been marked for add')) | |
2208 | elif abs in modified: |
|
2214 | ||
2209 | if not force: |
|
2215 | for f in util.sort(remove + forget): | |
2210 | reason = _('is modified (use -f to force removal)') |
|
2216 | if ui.verbose or not m.exact(f): | |
2211 | else: |
|
2217 | ui.status(_('removing %s\n') % m.rel(f)) | |
2212 | remove.append(abs) |
|
|||
2213 |
|
||||
2214 | # rest of the second column |
|
|||
2215 | elif not reason: |
|
|||
2216 | remove.append(abs) |
|
|||
2217 |
|
||||
2218 | if reason: |
|
|||
2219 | ui.warn(_('not removing %s: file %s\n') % (rel, reason)) |
|
|||
2220 | elif ui.verbose or not exact: |
|
|||
2221 | ui.status(_('removing %s\n') % rel) |
|
|||
2222 |
|
2218 | |||
2223 | repo.forget(forget) |
|
2219 | repo.forget(forget) | |
2224 | repo.remove(remove, unlink=not after) |
|
2220 | repo.remove(remove, unlink=not after) | |
@@ -2243,6 +2239,39 def rename(ui, repo, *pats, **opts): | |||||
2243 | finally: |
|
2239 | finally: | |
2244 | del wlock |
|
2240 | del wlock | |
2245 |
|
2241 | |||
|
2242 | def resolve(ui, repo, *pats, **opts): | |||
|
2243 | """resolve file merges from a branch merge or update | |||
|
2244 | ||||
|
2245 | This command will attempt to resolve unresolved merges from the | |||
|
2246 | last update or merge command. This will use the local file | |||
|
2247 | revision preserved at the last update or merge to cleanly retry | |||
|
2248 | the file merge attempt. With no file or options specified, this | |||
|
2249 | command will attempt to resolve all unresolved files. | |||
|
2250 | ||||
|
2251 | The codes used to show the status of files are: | |||
|
2252 | U = unresolved | |||
|
2253 | R = resolved | |||
|
2254 | """ | |||
|
2255 | ||||
|
2256 | if len([x for x in opts if opts[x]]) > 1: | |||
|
2257 | raise util.Abort(_("too many options specified")) | |||
|
2258 | ||||
|
2259 | ms = merge_.mergestate(repo) | |||
|
2260 | m = cmdutil.match(repo, pats, opts) | |||
|
2261 | ||||
|
2262 | for f in ms: | |||
|
2263 | if m(f): | |||
|
2264 | if opts.get("list"): | |||
|
2265 | ui.write("%s %s\n" % (ms[f].upper(), f)) | |||
|
2266 | elif opts.get("mark"): | |||
|
2267 | ms.mark(f, "r") | |||
|
2268 | elif opts.get("unmark"): | |||
|
2269 | ms.mark(f, "u") | |||
|
2270 | else: | |||
|
2271 | wctx = repo[None] | |||
|
2272 | mctx = wctx.parents()[-1] | |||
|
2273 | ms.resolve(f, wctx, mctx) | |||
|
2274 | ||||
2246 | def revert(ui, repo, *pats, **opts): |
|
2275 | def revert(ui, repo, *pats, **opts): | |
2247 | """restore individual files or dirs to an earlier state |
|
2276 | """restore individual files or dirs to an earlier state | |
2248 |
|
2277 | |||
@@ -2290,7 +2319,7 def revert(ui, repo, *pats, **opts): | |||||
2290 | if not opts['rev'] and p2 != nullid: |
|
2319 | if not opts['rev'] and p2 != nullid: | |
2291 | raise util.Abort(_('uncommitted merge - please provide a ' |
|
2320 | raise util.Abort(_('uncommitted merge - please provide a ' | |
2292 | 'specific revision')) |
|
2321 | 'specific revision')) | |
2293 |
ctx = repo |
|
2322 | ctx = repo[opts['rev']] | |
2294 | node = ctx.node() |
|
2323 | node = ctx.node() | |
2295 | mf = ctx.manifest() |
|
2324 | mf = ctx.manifest() | |
2296 | if node == parent: |
|
2325 | if node == parent: | |
@@ -2308,30 +2337,32 def revert(ui, repo, *pats, **opts): | |||||
2308 | try: |
|
2337 | try: | |
2309 | # walk dirstate. |
|
2338 | # walk dirstate. | |
2310 | files = [] |
|
2339 | files = [] | |
2311 | for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, |
|
2340 | ||
2312 | badmatch=mf.has_key): |
|
2341 | m = cmdutil.match(repo, pats, opts) | |
2313 | names[abs] = (rel, exact) |
|
2342 | m.bad = lambda x,y: False | |
2314 | if src != 'b': |
|
2343 | for abs in repo.walk(m): | |
2315 | files.append(abs) |
|
2344 | names[abs] = m.rel(abs), m.exact(abs) | |
2316 |
|
2345 | |||
2317 | # walk target manifest. |
|
2346 | # walk target manifest. | |
2318 |
|
2347 | |||
2319 |
def bad |
|
2348 | def badfn(path, msg): | |
2320 | if path in names: |
|
2349 | if path in names: | |
2321 |
return |
|
2350 | return False | |
2322 | path_ = path + '/' |
|
2351 | path_ = path + '/' | |
2323 | for f in names: |
|
2352 | for f in names: | |
2324 | if f.startswith(path_): |
|
2353 | if f.startswith(path_): | |
2325 |
return |
|
2354 | return False | |
|
2355 | repo.ui.warn("%s: %s\n" % (m.rel(path), msg)) | |||
2326 | return False |
|
2356 | return False | |
2327 |
|
2357 | |||
2328 |
|
|
2358 | m = cmdutil.match(repo, pats, opts) | |
2329 | badmatch=badmatch): |
|
2359 | m.bad = badfn | |
2330 | if abs in names or src == 'b': |
|
2360 | for abs in repo[node].walk(m): | |
2331 |
|
|
2361 | if abs not in names: | |
2332 |
names[abs] = ( |
|
2362 | names[abs] = m.rel(abs), m.exact(abs) | |
2333 |
|
2363 | |||
2334 | changes = repo.status(files=files, match=names.has_key)[:4] |
|
2364 | m = cmdutil.matchfiles(repo, names) | |
|
2365 | changes = repo.status(match=m)[:4] | |||
2335 | modified, added, removed, deleted = map(dict.fromkeys, changes) |
|
2366 | modified, added, removed, deleted = map(dict.fromkeys, changes) | |
2336 |
|
2367 | |||
2337 | # if f is a rename, also revert the source |
|
2368 | # if f is a rename, also revert the source | |
@@ -2365,10 +2396,7 def revert(ui, repo, *pats, **opts): | |||||
2365 | (deleted, revert, remove, False, False), |
|
2396 | (deleted, revert, remove, False, False), | |
2366 | ) |
|
2397 | ) | |
2367 |
|
2398 | |||
2368 | entries = names.items() |
|
2399 | for abs, (rel, exact) in util.sort(names.items()): | |
2369 | entries.sort() |
|
|||
2370 |
|
||||
2371 | for abs, (rel, exact) in entries: |
|
|||
2372 | mfentry = mf.get(abs) |
|
2400 | mfentry = mf.get(abs) | |
2373 | target = repo.wjoin(abs) |
|
2401 | target = repo.wjoin(abs) | |
2374 | def handle(xlist, dobackup): |
|
2402 | def handle(xlist, dobackup): | |
@@ -2406,7 +2434,7 def revert(ui, repo, *pats, **opts): | |||||
2406 | if pmf is None: |
|
2434 | if pmf is None: | |
2407 | # only need parent manifest in this unlikely case, |
|
2435 | # only need parent manifest in this unlikely case, | |
2408 | # so do not read by default |
|
2436 | # so do not read by default | |
2409 |
pmf = repo |
|
2437 | pmf = repo[parent].manifest() | |
2410 | if abs in pmf: |
|
2438 | if abs in pmf: | |
2411 | if mfentry: |
|
2439 | if mfentry: | |
2412 | # if version of file is same in parent and target |
|
2440 | # if version of file is same in parent and target | |
@@ -2420,7 +2448,7 def revert(ui, repo, *pats, **opts): | |||||
2420 | if not opts.get('dry_run'): |
|
2448 | if not opts.get('dry_run'): | |
2421 | def checkout(f): |
|
2449 | def checkout(f): | |
2422 | fc = ctx[f] |
|
2450 | fc = ctx[f] | |
2423 |
repo.wwrite(f, fc.data(), fc. |
|
2451 | repo.wwrite(f, fc.data(), fc.flags()) | |
2424 |
|
2452 | |||
2425 | audit_path = util.path_auditor(repo.root) |
|
2453 | audit_path = util.path_auditor(repo.root) | |
2426 | for f in remove[0]: |
|
2454 | for f in remove[0]: | |
@@ -2542,8 +2570,17 def serve(ui, repo, **opts): | |||||
2542 | if port == ':80': |
|
2570 | if port == ':80': | |
2543 | port = '' |
|
2571 | port = '' | |
2544 |
|
2572 | |||
2545 | ui.status(_('listening at http://%s%s/%s (%s:%d)\n') % |
|
2573 | bindaddr = self.httpd.addr | |
2546 | (self.httpd.fqaddr, port, prefix, self.httpd.addr, self.httpd.port)) |
|
2574 | if bindaddr == '0.0.0.0': | |
|
2575 | bindaddr = '*' | |||
|
2576 | elif ':' in bindaddr: # IPv6 | |||
|
2577 | bindaddr = '[%s]' % bindaddr | |||
|
2578 | ||||
|
2579 | fqaddr = self.httpd.fqaddr | |||
|
2580 | if ':' in fqaddr: | |||
|
2581 | fqaddr = '[%s]' % fqaddr | |||
|
2582 | ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') % | |||
|
2583 | (fqaddr, port, prefix, bindaddr, self.httpd.port)) | |||
2547 |
|
2584 | |||
2548 | def run(self): |
|
2585 | def run(self): | |
2549 | self.httpd.serve_forever() |
|
2586 | self.httpd.serve_forever() | |
@@ -2583,64 +2620,44 def status(ui, repo, *pats, **opts): | |||||
2583 | = the previous added file was copied from here |
|
2620 | = the previous added file was copied from here | |
2584 | """ |
|
2621 | """ | |
2585 |
|
2622 | |||
2586 | all = opts['all'] |
|
|||
2587 | node1, node2 = cmdutil.revpair(repo, opts.get('rev')) |
|
2623 | node1, node2 = cmdutil.revpair(repo, opts.get('rev')) | |
2588 |
|
||||
2589 | files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts) |
|
|||
2590 | cwd = (pats and repo.getcwd()) or '' |
|
2624 | cwd = (pats and repo.getcwd()) or '' | |
2591 | modified, added, removed, deleted, unknown, ignored, clean = [ |
|
2625 | end = opts['print0'] and '\0' or '\n' | |
2592 | n for n in repo.status(node1=node1, node2=node2, files=files, |
|
|||
2593 | match=matchfn, |
|
|||
2594 | list_ignored=opts['ignored'] |
|
|||
2595 | or all and not ui.quiet, |
|
|||
2596 | list_clean=opts['clean'] or all, |
|
|||
2597 | list_unknown=opts['unknown'] |
|
|||
2598 | or not (ui.quiet or |
|
|||
2599 | opts['modified'] or |
|
|||
2600 | opts['added'] or |
|
|||
2601 | opts['removed'] or |
|
|||
2602 | opts['deleted'] or |
|
|||
2603 | opts['ignored']))] |
|
|||
2604 |
|
||||
2605 | changetypes = (('modified', 'M', modified), |
|
|||
2606 | ('added', 'A', added), |
|
|||
2607 | ('removed', 'R', removed), |
|
|||
2608 | ('deleted', '!', deleted), |
|
|||
2609 | ('unknown', '?', unknown), |
|
|||
2610 | ('ignored', 'I', ignored)) |
|
|||
2611 |
|
||||
2612 | explicit_changetypes = changetypes + (('clean', 'C', clean),) |
|
|||
2613 |
|
||||
2614 | copy = {} |
|
2626 | copy = {} | |
2615 | showcopy = {} |
|
2627 | states = 'modified added removed deleted unknown ignored clean'.split() | |
2616 | if ((all or opts.get('copies')) and not opts.get('no_status')): |
|
2628 | show = [k for k in states if opts[k]] | |
2617 | if opts.get('rev') == []: |
|
2629 | if opts['all']: | |
2618 | # fast path, more correct with merge parents |
|
2630 | show += ui.quiet and (states[:4] + ['clean']) or states | |
2619 | showcopy = copy = repo.dirstate.copies().copy() |
|
2631 | if not show: | |
2620 | else: |
|
2632 | show = ui.quiet and states[:4] or states[:5] | |
2621 | ctxn = repo.changectx(nullid) |
|
2633 | ||
2622 | ctx1 = repo.changectx(node1) |
|
2634 | stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts), | |
2623 | ctx2 = repo.changectx(node2) |
|
2635 | 'ignored' in show, 'clean' in show, 'unknown' in show) | |
|
2636 | changestates = zip(states, 'MAR!?IC', stat) | |||
|
2637 | ||||
|
2638 | if (opts['all'] or opts['copies']) and not opts['no_status']: | |||
|
2639 | ctxn = repo[nullid] | |||
|
2640 | ctx1 = repo[node1] | |||
|
2641 | ctx2 = repo[node2] | |||
|
2642 | added = stat[1] | |||
2624 |
|
|
2643 | if node2 is None: | |
2625 | ctx2 = repo.workingctx() |
|
2644 | added = stat[0] + stat[1] # merged? | |
2626 | copy, diverge = copies.copies(repo, ctx1, ctx2, ctxn) |
|
2645 | ||
2627 |
|
|
2646 | for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].items(): | |
|
2647 | if k in added: | |||
|
2648 | copy[k] = v | |||
|
2649 | elif v in added: | |||
2628 | copy[v] = k |
|
2650 | copy[v] = k | |
2629 |
|
2651 | |||
2630 | end = opts['print0'] and '\0' or '\n' |
|
2652 | for state, char, files in changestates: | |
2631 |
|
2653 | if state in show: | ||
2632 | for opt, char, changes in ([ct for ct in explicit_changetypes |
|
2654 | format = "%s %%s%s" % (char, end) | |
2633 | if all or opts[ct[0]]] |
|
|||
2634 | or changetypes): |
|
|||
2635 |
|
||||
2636 | if opts['no_status']: |
|
2655 | if opts['no_status']: | |
2637 | format = "%%s%s" % end |
|
2656 | format = "%%s%s" % end | |
2638 | else: |
|
2657 | ||
2639 | format = "%s %%s%s" % (char, end) |
|
2658 | for f in files: | |
2640 |
|
||||
2641 | for f in changes: |
|
|||
2642 | ui.write(format % repo.pathto(f, cwd)) |
|
2659 | ui.write(format % repo.pathto(f, cwd)) | |
2643 | if f in copy and (f in added or f in showcopy): |
|
2660 | if f in copy: | |
2644 | ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end)) |
|
2661 | ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end)) | |
2645 |
|
2662 | |||
2646 | def tag(ui, repo, name1, *names, **opts): |
|
2663 | def tag(ui, repo, name1, *names, **opts): | |
@@ -2664,7 +2681,7 def tag(ui, repo, name1, *names, **opts) | |||||
2664 | See 'hg help dates' for a list of formats valid for -d/--date. |
|
2681 | See 'hg help dates' for a list of formats valid for -d/--date. | |
2665 | """ |
|
2682 | """ | |
2666 |
|
2683 | |||
2667 |
rev_ = |
|
2684 | rev_ = "." | |
2668 | names = (name1,) + names |
|
2685 | names = (name1,) + names | |
2669 | if len(names) != len(dict.fromkeys(names)): |
|
2686 | if len(names) != len(dict.fromkeys(names)): | |
2670 | raise util.Abort(_('tag names must be unique')) |
|
2687 | raise util.Abort(_('tag names must be unique')) | |
@@ -2695,7 +2712,7 def tag(ui, repo, name1, *names, **opts) | |||||
2695 | if not rev_ and repo.dirstate.parents()[1] != nullid: |
|
2712 | if not rev_ and repo.dirstate.parents()[1] != nullid: | |
2696 | raise util.Abort(_('uncommitted merge - please provide a ' |
|
2713 | raise util.Abort(_('uncommitted merge - please provide a ' | |
2697 | 'specific revision')) |
|
2714 | 'specific revision')) | |
2698 |
r = repo |
|
2715 | r = repo[rev_].node() | |
2699 |
|
2716 | |||
2700 | if not message: |
|
2717 | if not message: | |
2701 | message = (_('Added tag %s for changeset %s') % |
|
2718 | message = (_('Added tag %s for changeset %s') % | |
@@ -2752,7 +2769,7 def tip(ui, repo, **opts): | |||||
2752 | that repository becomes the current tip. The "tip" tag is special |
|
2769 | that repository becomes the current tip. The "tip" tag is special | |
2753 | and cannot be renamed or assigned to a different changeset. |
|
2770 | and cannot be renamed or assigned to a different changeset. | |
2754 | """ |
|
2771 | """ | |
2755 |
cmdutil.show_changeset(ui, repo, opts).show( |
|
2772 | cmdutil.show_changeset(ui, repo, opts).show(len(repo) - 1) | |
2756 |
|
2773 | |||
2757 | def unbundle(ui, repo, fname1, *fnames, **opts): |
|
2774 | def unbundle(ui, repo, fname1, *fnames, **opts): | |
2758 | """apply one or more changegroup files |
|
2775 | """apply one or more changegroup files | |
@@ -2780,8 +2797,8 def unbundle(ui, repo, fname1, *fnames, | |||||
2780 | def update(ui, repo, node=None, rev=None, clean=False, date=None): |
|
2797 | def update(ui, repo, node=None, rev=None, clean=False, date=None): | |
2781 | """update working directory |
|
2798 | """update working directory | |
2782 |
|
2799 | |||
2783 |
Update the working directory to the specified revision, |
|
2800 | Update the repository's working directory to the specified revision, | |
2784 | tip of the current branch if none is specified. |
|
2801 | or the tip of the current branch if none is specified. | |
2785 |
|
2802 | |||
2786 | If the requested revision is a descendant of the working |
|
2803 | If the requested revision is a descendant of the working | |
2787 | directory, any outstanding changes in the working directory will |
|
2804 | directory, any outstanding changes in the working directory will | |
@@ -2892,6 +2909,23 logopts = [ | |||||
2892 | ('M', 'no-merges', None, _('do not show merges')), |
|
2909 | ('M', 'no-merges', None, _('do not show merges')), | |
2893 | ] + templateopts |
|
2910 | ] + templateopts | |
2894 |
|
2911 | |||
|
2912 | diffopts = [ | |||
|
2913 | ('a', 'text', None, _('treat all files as text')), | |||
|
2914 | ('g', 'git', None, _('use git extended diff format')), | |||
|
2915 | ('', 'nodates', None, _("don't include dates in diff headers")) | |||
|
2916 | ] | |||
|
2917 | ||||
|
2918 | diffopts2 = [ | |||
|
2919 | ('p', 'show-function', None, _('show which function each change is in')), | |||
|
2920 | ('w', 'ignore-all-space', None, | |||
|
2921 | _('ignore white space when comparing lines')), | |||
|
2922 | ('b', 'ignore-space-change', None, | |||
|
2923 | _('ignore changes in the amount of white space')), | |||
|
2924 | ('B', 'ignore-blank-lines', None, | |||
|
2925 | _('ignore changes whose lines are all blank')), | |||
|
2926 | ('U', 'unified', '', _('number of lines of context to show')) | |||
|
2927 | ] | |||
|
2928 | ||||
2895 | table = { |
|
2929 | table = { | |
2896 | "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')), |
|
2930 | "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')), | |
2897 | "addremove": |
|
2931 | "addremove": | |
@@ -2955,8 +2989,8 table = { | |||||
2955 | _('a changeset up to which you would like to bundle')), |
|
2989 | _('a changeset up to which you would like to bundle')), | |
2956 | ('', 'base', [], |
|
2990 | ('', 'base', [], | |
2957 | _('a base changeset to specify instead of a destination')), |
|
2991 | _('a base changeset to specify instead of a destination')), | |
2958 | ('a', 'all', None, |
|
2992 | ('a', 'all', None, _('bundle all changesets in the repository')), | |
2959 | _('bundle all changesets in the repository')), |
|
2993 | ('t', 'type', 'bzip2', _('bundle compression type to use')), | |
2960 | ] + remoteopts, |
|
2994 | ] + remoteopts, | |
2961 | _('hg bundle [-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')), |
|
2995 | _('hg bundle [-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')), | |
2962 | "cat": |
|
2996 | "cat": | |
@@ -3031,29 +3065,14 table = { | |||||
3031 | "debugwalk": (debugwalk, walkopts, _('hg debugwalk [OPTION]... [FILE]...')), |
|
3065 | "debugwalk": (debugwalk, walkopts, _('hg debugwalk [OPTION]... [FILE]...')), | |
3032 | "^diff": |
|
3066 | "^diff": | |
3033 | (diff, |
|
3067 | (diff, | |
3034 |
[('r', 'rev', [], _('revision')) |
|
3068 | [('r', 'rev', [], _('revision')) | |
3035 | ('a', 'text', None, _('treat all files as text')), |
|
3069 | ] + diffopts + diffopts2 + walkopts, | |
3036 | ('p', 'show-function', None, |
|
|||
3037 | _('show which function each change is in')), |
|
|||
3038 | ('g', 'git', None, _('use git extended diff format')), |
|
|||
3039 | ('', 'nodates', None, _("don't include dates in diff headers")), |
|
|||
3040 | ('w', 'ignore-all-space', None, |
|
|||
3041 | _('ignore white space when comparing lines')), |
|
|||
3042 | ('b', 'ignore-space-change', None, |
|
|||
3043 | _('ignore changes in the amount of white space')), |
|
|||
3044 | ('B', 'ignore-blank-lines', None, |
|
|||
3045 | _('ignore changes whose lines are all blank')), |
|
|||
3046 | ('U', 'unified', '', |
|
|||
3047 | _('number of lines of context to show')) |
|
|||
3048 | ] + walkopts, |
|
|||
3049 | _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')), |
|
3070 | _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')), | |
3050 | "^export": |
|
3071 | "^export": | |
3051 | (export, |
|
3072 | (export, | |
3052 | [('o', 'output', '', _('print output to file with formatted name')), |
|
3073 | [('o', 'output', '', _('print output to file with formatted name')), | |
3053 | ('a', 'text', None, _('treat all files as text')), |
|
3074 | ('', 'switch-parent', None, _('diff against the second parent')) | |
3054 | ('g', 'git', None, _('use git extended diff format')), |
|
3075 | ] + diffopts, | |
3055 | ('', 'nodates', None, _("don't include dates in diff headers")), |
|
|||
3056 | ('', 'switch-parent', None, _('diff against the second parent'))], |
|
|||
3057 | _('hg export [OPTION]... [-o OUTFILESPEC] REV...')), |
|
3076 | _('hg export [OPTION]... [-o OUTFILESPEC] REV...')), | |
3058 | "grep": |
|
3077 | "grep": | |
3059 | (grep, |
|
3078 | (grep, | |
@@ -3197,6 +3216,12 table = { | |||||
3197 | _('forcibly copy over an existing managed file')), |
|
3216 | _('forcibly copy over an existing managed file')), | |
3198 | ] + walkopts + dryrunopts, |
|
3217 | ] + walkopts + dryrunopts, | |
3199 | _('hg rename [OPTION]... SOURCE... DEST')), |
|
3218 | _('hg rename [OPTION]... SOURCE... DEST')), | |
|
3219 | "resolve": | |||
|
3220 | (resolve, | |||
|
3221 | [('l', 'list', None, _('list state of files needing merge')), | |||
|
3222 | ('m', 'mark', None, _('mark files as resolved')), | |||
|
3223 | ('u', 'unmark', None, _('unmark files as resolved'))], | |||
|
3224 | ('hg resolve [OPTION] [FILES...]')), | |||
3200 | "revert": |
|
3225 | "revert": | |
3201 | (revert, |
|
3226 | (revert, | |
3202 | [('a', 'all', None, _('revert all changes when no arguments given')), |
|
3227 | [('a', 'all', None, _('revert all changes when no arguments given')), | |
@@ -3271,7 +3296,7 table = { | |||||
3271 | _('hg unbundle [-u] FILE...')), |
|
3296 | _('hg unbundle [-u] FILE...')), | |
3272 | "^update|up|checkout|co": |
|
3297 | "^update|up|checkout|co": | |
3273 | (update, |
|
3298 | (update, | |
3274 | [('C', 'clean', None, _('overwrite locally modified files')), |
|
3299 | [('C', 'clean', None, _('overwrite locally modified files (no backup)')), | |
3275 | ('d', 'date', '', _('tipmost revision matching date')), |
|
3300 | ('d', 'date', '', _('tipmost revision matching date')), | |
3276 | ('r', 'rev', '', _('revision'))], |
|
3301 | ('r', 'rev', '', _('revision'))], | |
3277 | _('hg update [-C] [-d DATE] [[-r] REV]')), |
|
3302 | _('hg update [-C] [-d DATE] [[-r] REV]')), |
@@ -5,35 +5,36 | |||||
5 | # This software may be used and distributed according to the terms |
|
5 | # This software may be used and distributed according to the terms | |
6 | # of the GNU General Public License, incorporated herein by reference. |
|
6 | # of the GNU General Public License, incorporated herein by reference. | |
7 |
|
7 | |||
8 | from node import nullid, nullrev, short |
|
8 | from node import nullid, nullrev, short, hex | |
9 | from i18n import _ |
|
9 | from i18n import _ | |
10 | import ancestor, bdiff, revlog, util, os, errno |
|
10 | import ancestor, bdiff, revlog, util, os, errno | |
11 |
|
11 | |||
12 | class changectx(object): |
|
12 | class changectx(object): | |
13 | """A changecontext object makes access to data related to a particular |
|
13 | """A changecontext object makes access to data related to a particular | |
14 | changeset convenient.""" |
|
14 | changeset convenient.""" | |
15 |
def __init__(self, repo, changeid= |
|
15 | def __init__(self, repo, changeid=''): | |
16 | """changeid is a revision number, node, or tag""" |
|
16 | """changeid is a revision number, node, or tag""" | |
|
17 | if changeid == '': | |||
|
18 | changeid = '.' | |||
17 | self._repo = repo |
|
19 | self._repo = repo | |
18 |
|
||||
19 | if not changeid and changeid != 0: |
|
|||
20 | p1, p2 = self._repo.dirstate.parents() |
|
|||
21 | self._rev = self._repo.changelog.rev(p1) |
|
|||
22 | if self._rev == -1: |
|
|||
23 | changeid = 'tip' |
|
|||
24 | else: |
|
|||
25 | self._node = p1 |
|
|||
26 | return |
|
|||
27 |
|
||||
28 | self._node = self._repo.lookup(changeid) |
|
20 | self._node = self._repo.lookup(changeid) | |
29 | self._rev = self._repo.changelog.rev(self._node) |
|
21 | self._rev = self._repo.changelog.rev(self._node) | |
30 |
|
22 | |||
31 | def __str__(self): |
|
23 | def __str__(self): | |
32 | return short(self.node()) |
|
24 | return short(self.node()) | |
33 |
|
25 | |||
|
26 | def __int__(self): | |||
|
27 | return self.rev() | |||
|
28 | ||||
34 | def __repr__(self): |
|
29 | def __repr__(self): | |
35 | return "<changectx %s>" % str(self) |
|
30 | return "<changectx %s>" % str(self) | |
36 |
|
31 | |||
|
32 | def __hash__(self): | |||
|
33 | try: | |||
|
34 | return hash(self._rev) | |||
|
35 | except AttributeError: | |||
|
36 | return id(self) | |||
|
37 | ||||
37 | def __eq__(self, other): |
|
38 | def __eq__(self, other): | |
38 | try: |
|
39 | try: | |
39 | return self._rev == other._rev |
|
40 | return self._rev == other._rev | |
@@ -57,6 +58,12 class changectx(object): | |||||
57 | md = self._repo.manifest.readdelta(self._changeset[0]) |
|
58 | md = self._repo.manifest.readdelta(self._changeset[0]) | |
58 | self._manifestdelta = md |
|
59 | self._manifestdelta = md | |
59 | return self._manifestdelta |
|
60 | return self._manifestdelta | |
|
61 | elif name == '_parents': | |||
|
62 | p = self._repo.changelog.parents(self._node) | |||
|
63 | if p[1] == nullid: | |||
|
64 | p = p[:-1] | |||
|
65 | self._parents = [changectx(self._repo, x) for x in p] | |||
|
66 | return self._parents | |||
60 | else: |
|
67 | else: | |
61 | raise AttributeError, name |
|
68 | raise AttributeError, name | |
62 |
|
69 | |||
@@ -67,9 +74,7 class changectx(object): | |||||
67 | return self.filectx(key) |
|
74 | return self.filectx(key) | |
68 |
|
75 | |||
69 | def __iter__(self): |
|
76 | def __iter__(self): | |
70 |
|
|
77 | for f in util.sort(self._manifest): | |
71 | a.sort() |
|
|||
72 | for f in a: |
|
|||
73 | yield f |
|
78 | yield f | |
74 |
|
79 | |||
75 | def changeset(self): return self._changeset |
|
80 | def changeset(self): return self._changeset | |
@@ -77,6 +82,7 class changectx(object): | |||||
77 |
|
82 | |||
78 | def rev(self): return self._rev |
|
83 | def rev(self): return self._rev | |
79 | def node(self): return self._node |
|
84 | def node(self): return self._node | |
|
85 | def hex(self): return hex(self._node) | |||
80 | def user(self): return self._changeset[1] |
|
86 | def user(self): return self._changeset[1] | |
81 | def date(self): return self._changeset[2] |
|
87 | def date(self): return self._changeset[2] | |
82 | def files(self): return self._changeset[3] |
|
88 | def files(self): return self._changeset[3] | |
@@ -87,14 +93,21 class changectx(object): | |||||
87 |
|
93 | |||
88 | def parents(self): |
|
94 | def parents(self): | |
89 | """return contexts for each parent changeset""" |
|
95 | """return contexts for each parent changeset""" | |
90 | p = self._repo.changelog.parents(self._node) |
|
96 | return self._parents | |
91 | return [changectx(self._repo, x) for x in p] |
|
|||
92 |
|
97 | |||
93 | def children(self): |
|
98 | def children(self): | |
94 | """return contexts for each child changeset""" |
|
99 | """return contexts for each child changeset""" | |
95 | c = self._repo.changelog.children(self._node) |
|
100 | c = self._repo.changelog.children(self._node) | |
96 | return [changectx(self._repo, x) for x in c] |
|
101 | return [changectx(self._repo, x) for x in c] | |
97 |
|
102 | |||
|
103 | def ancestors(self): | |||
|
104 | for a in self._repo.changelog.ancestors(self._rev): | |||
|
105 | yield changectx(self._repo, a) | |||
|
106 | ||||
|
107 | def descendants(self): | |||
|
108 | for d in self._repo.changelog.descendants(self._rev): | |||
|
109 | yield changectx(self._repo, d) | |||
|
110 | ||||
98 | def _fileinfo(self, path): |
|
111 | def _fileinfo(self, path): | |
99 | if '_manifest' in self.__dict__: |
|
112 | if '_manifest' in self.__dict__: | |
100 | try: |
|
113 | try: | |
@@ -115,7 +128,7 class changectx(object): | |||||
115 | def filenode(self, path): |
|
128 | def filenode(self, path): | |
116 | return self._fileinfo(path)[0] |
|
129 | return self._fileinfo(path)[0] | |
117 |
|
130 | |||
118 |
def |
|
131 | def flags(self, path): | |
119 | try: |
|
132 | try: | |
120 | return self._fileinfo(path)[1] |
|
133 | return self._fileinfo(path)[1] | |
121 | except revlog.LookupError: |
|
134 | except revlog.LookupError: | |
@@ -128,15 +141,6 class changectx(object): | |||||
128 | return filectx(self._repo, path, fileid=fileid, |
|
141 | return filectx(self._repo, path, fileid=fileid, | |
129 | changectx=self, filelog=filelog) |
|
142 | changectx=self, filelog=filelog) | |
130 |
|
143 | |||
131 | def filectxs(self): |
|
|||
132 | """generate a file context for each file in this changeset's |
|
|||
133 | manifest""" |
|
|||
134 | mf = self.manifest() |
|
|||
135 | m = mf.keys() |
|
|||
136 | m.sort() |
|
|||
137 | for f in m: |
|
|||
138 | yield self.filectx(f, fileid=mf[f]) |
|
|||
139 |
|
||||
140 | def ancestor(self, c2): |
|
144 | def ancestor(self, c2): | |
141 | """ |
|
145 | """ | |
142 | return the ancestor context of self and c2 |
|
146 | return the ancestor context of self and c2 | |
@@ -144,6 +148,23 class changectx(object): | |||||
144 | n = self._repo.changelog.ancestor(self._node, c2._node) |
|
148 | n = self._repo.changelog.ancestor(self._node, c2._node) | |
145 | return changectx(self._repo, n) |
|
149 | return changectx(self._repo, n) | |
146 |
|
150 | |||
|
151 | def walk(self, match): | |||
|
152 | fdict = dict.fromkeys(match.files()) | |||
|
153 | # for dirstate.walk, files=['.'] means "walk the whole tree". | |||
|
154 | # follow that here, too | |||
|
155 | fdict.pop('.', None) | |||
|
156 | for fn in self: | |||
|
157 | for ffn in fdict: | |||
|
158 | # match if the file is the exact name or a directory | |||
|
159 | if ffn == fn or fn.startswith("%s/" % ffn): | |||
|
160 | del fdict[ffn] | |||
|
161 | break | |||
|
162 | if match(fn): | |||
|
163 | yield fn | |||
|
164 | for fn in util.sort(fdict): | |||
|
165 | if match.bad(fn, 'No such file in rev ' + str(self)) and match(fn): | |||
|
166 | yield fn | |||
|
167 | ||||
147 | class filectx(object): |
|
168 | class filectx(object): | |
148 | """A filecontext object makes access to data related to a particular |
|
169 | """A filecontext object makes access to data related to a particular | |
149 | filerevision convenient.""" |
|
170 | filerevision convenient.""" | |
@@ -210,6 +231,12 class filectx(object): | |||||
210 | def __repr__(self): |
|
231 | def __repr__(self): | |
211 | return "<filectx %s>" % str(self) |
|
232 | return "<filectx %s>" % str(self) | |
212 |
|
233 | |||
|
234 | def __hash__(self): | |||
|
235 | try: | |||
|
236 | return hash((self._path, self._fileid)) | |||
|
237 | except AttributeError: | |||
|
238 | return id(self) | |||
|
239 | ||||
213 | def __eq__(self, other): |
|
240 | def __eq__(self, other): | |
214 | try: |
|
241 | try: | |
215 | return (self._path == other._path |
|
242 | return (self._path == other._path | |
@@ -228,9 +255,7 class filectx(object): | |||||
228 |
|
255 | |||
229 | def filerev(self): return self._filerev |
|
256 | def filerev(self): return self._filerev | |
230 | def filenode(self): return self._filenode |
|
257 | def filenode(self): return self._filenode | |
231 |
def |
|
258 | def flags(self): return self._changectx.flags(self._path) | |
232 | def isexec(self): return 'x' in self.fileflags() |
|
|||
233 | def islink(self): return 'l' in self.fileflags() |
|
|||
234 | def filelog(self): return self._filelog |
|
259 | def filelog(self): return self._filelog | |
235 |
|
260 | |||
236 | def rev(self): |
|
261 | def rev(self): | |
@@ -376,12 +401,11 class filectx(object): | |||||
376 | # sort by revision (per file) which is a topological order |
|
401 | # sort by revision (per file) which is a topological order | |
377 | visit = [] |
|
402 | visit = [] | |
378 | for f in files: |
|
403 | for f in files: | |
379 |
fn = [(n.rev(), n) for n in needed |
|
404 | fn = [(n.rev(), n) for n in needed if n._path == f] | |
380 | visit.extend(fn) |
|
405 | visit.extend(fn) | |
381 | visit.sort() |
|
406 | ||
382 | hist = {} |
|
407 | hist = {} | |
383 |
|
408 | for r, f in util.sort(visit): | ||
384 | for r, f in visit: |
|
|||
385 | curr = decorate(f.data(), f) |
|
409 | curr = decorate(f.data(), f) | |
386 | for p in parents(f): |
|
410 | for p in parents(f): | |
387 | if p != nullid: |
|
411 | if p != nullid: | |
@@ -432,11 +456,41 class filectx(object): | |||||
432 |
|
456 | |||
433 | class workingctx(changectx): |
|
457 | class workingctx(changectx): | |
434 | """A workingctx object makes access to data related to |
|
458 | """A workingctx object makes access to data related to | |
435 |
the current working directory convenient. |
|
459 | the current working directory convenient. | |
436 | def __init__(self, repo): |
|
460 | parents - a pair of parent nodeids, or None to use the dirstate. | |
|
461 | date - any valid date string or (unixtime, offset), or None. | |||
|
462 | user - username string, or None. | |||
|
463 | extra - a dictionary of extra values, or None. | |||
|
464 | changes - a list of file lists as returned by localrepo.status() | |||
|
465 | or None to use the repository status. | |||
|
466 | """ | |||
|
467 | def __init__(self, repo, parents=None, text="", user=None, date=None, | |||
|
468 | extra=None, changes=None): | |||
437 | self._repo = repo |
|
469 | self._repo = repo | |
438 | self._rev = None |
|
470 | self._rev = None | |
439 | self._node = None |
|
471 | self._node = None | |
|
472 | self._text = text | |||
|
473 | if date: | |||
|
474 | self._date = util.parsedate(date) | |||
|
475 | if user: | |||
|
476 | self._user = user | |||
|
477 | if parents: | |||
|
478 | self._parents = [changectx(self._repo, p) for p in parents] | |||
|
479 | if changes: | |||
|
480 | self._status = list(changes) | |||
|
481 | ||||
|
482 | self._extra = {} | |||
|
483 | if extra: | |||
|
484 | self._extra = extra.copy() | |||
|
485 | if 'branch' not in self._extra: | |||
|
486 | branch = self._repo.dirstate.branch() | |||
|
487 | try: | |||
|
488 | branch = branch.decode('UTF-8').encode('UTF-8') | |||
|
489 | except UnicodeDecodeError: | |||
|
490 | raise util.Abort(_('branch name not in UTF-8!')) | |||
|
491 | self._extra['branch'] = branch | |||
|
492 | if self._extra['branch'] == '': | |||
|
493 | self._extra['branch'] = 'default' | |||
440 |
|
494 | |||
441 | def __str__(self): |
|
495 | def __str__(self): | |
442 | return str(self._parents[0]) + "+" |
|
496 | return str(self._parents[0]) + "+" | |
@@ -444,16 +498,28 class workingctx(changectx): | |||||
444 | def __nonzero__(self): |
|
498 | def __nonzero__(self): | |
445 | return True |
|
499 | return True | |
446 |
|
500 | |||
|
501 | def __contains__(self, key): | |||
|
502 | return self._dirstate[key] not in "?r" | |||
|
503 | ||||
447 | def __getattr__(self, name): |
|
504 | def __getattr__(self, name): | |
448 | if name == '_parents': |
|
|||
449 | self._parents = self._repo.parents() |
|
|||
450 | return self._parents |
|
|||
451 | if name == '_status': |
|
505 | if name == '_status': | |
452 | self._status = self._repo.status() |
|
506 | self._status = self._repo.status(unknown=True) | |
453 | return self._status |
|
507 | return self._status | |
|
508 | elif name == '_user': | |||
|
509 | self._user = self._repo.ui.username() | |||
|
510 | return self._user | |||
|
511 | elif name == '_date': | |||
|
512 | self._date = util.makedate() | |||
|
513 | return self._date | |||
454 | if name == '_manifest': |
|
514 | if name == '_manifest': | |
455 | self._buildmanifest() |
|
515 | self._buildmanifest() | |
456 | return self._manifest |
|
516 | return self._manifest | |
|
517 | elif name == '_parents': | |||
|
518 | p = self._repo.dirstate.parents() | |||
|
519 | if p[1] == nullid: | |||
|
520 | p = p[:-1] | |||
|
521 | self._parents = [changectx(self._repo, x) for x in p] | |||
|
522 | return self._parents | |||
457 | else: |
|
523 | else: | |
458 | raise AttributeError, name |
|
524 | raise AttributeError, name | |
459 |
|
525 | |||
@@ -462,16 +528,14 class workingctx(changectx): | |||||
462 |
|
528 | |||
463 | man = self._parents[0].manifest().copy() |
|
529 | man = self._parents[0].manifest().copy() | |
464 | copied = self._repo.dirstate.copies() |
|
530 | copied = self._repo.dirstate.copies() | |
465 | is_exec = util.execfunc(self._repo.root, |
|
531 | cf = lambda x: man.flags(copied.get(x, x)) | |
466 | lambda p: man.execf(copied.get(p,p))) |
|
532 | ff = self._repo.dirstate.flagfunc(cf) | |
467 | is_link = util.linkfunc(self._repo.root, |
|
|||
468 | lambda p: man.linkf(copied.get(p,p))) |
|
|||
469 | modified, added, removed, deleted, unknown = self._status[:5] |
|
533 | modified, added, removed, deleted, unknown = self._status[:5] | |
470 | for i, l in (("a", added), ("m", modified), ("u", unknown)): |
|
534 | for i, l in (("a", added), ("m", modified), ("u", unknown)): | |
471 | for f in l: |
|
535 | for f in l: | |
472 | man[f] = man.get(copied.get(f, f), nullid) + i |
|
536 | man[f] = man.get(copied.get(f, f), nullid) + i | |
473 | try: |
|
537 | try: | |
474 |
man.set(f, |
|
538 | man.set(f, ff(f)) | |
475 | except OSError: |
|
539 | except OSError: | |
476 | pass |
|
540 | pass | |
477 |
|
541 | |||
@@ -483,13 +547,11 class workingctx(changectx): | |||||
483 |
|
547 | |||
484 | def manifest(self): return self._manifest |
|
548 | def manifest(self): return self._manifest | |
485 |
|
549 | |||
486 | def user(self): return self._repo.ui.username() |
|
550 | def user(self): return self._user or self._repo.ui.username() | |
487 |
def date(self): return |
|
551 | def date(self): return self._date | |
488 |
def description(self): return |
|
552 | def description(self): return self._text | |
489 | def files(self): |
|
553 | def files(self): | |
490 | f = self.modified() + self.added() + self.removed() |
|
554 | return util.sort(self._status[0] + self._status[1] + self._status[2]) | |
491 | f.sort() |
|
|||
492 | return f |
|
|||
493 |
|
555 | |||
494 | def modified(self): return self._status[0] |
|
556 | def modified(self): return self._status[0] | |
495 | def added(self): return self._status[1] |
|
557 | def added(self): return self._status[1] | |
@@ -497,21 +559,18 class workingctx(changectx): | |||||
497 | def deleted(self): return self._status[3] |
|
559 | def deleted(self): return self._status[3] | |
498 | def unknown(self): return self._status[4] |
|
560 | def unknown(self): return self._status[4] | |
499 | def clean(self): return self._status[5] |
|
561 | def clean(self): return self._status[5] | |
500 |
def branch(self): return self._ |
|
562 | def branch(self): return self._extra['branch'] | |
|
563 | def extra(self): return self._extra | |||
501 |
|
564 | |||
502 | def tags(self): |
|
565 | def tags(self): | |
503 | t = [] |
|
566 | t = [] | |
504 | [t.extend(p.tags()) for p in self.parents()] |
|
567 | [t.extend(p.tags()) for p in self.parents()] | |
505 | return t |
|
568 | return t | |
506 |
|
569 | |||
507 | def parents(self): |
|
|||
508 | """return contexts for each parent changeset""" |
|
|||
509 | return self._parents |
|
|||
510 |
|
||||
511 | def children(self): |
|
570 | def children(self): | |
512 | return [] |
|
571 | return [] | |
513 |
|
572 | |||
514 |
def |
|
573 | def flags(self, path): | |
515 | if '_manifest' in self.__dict__: |
|
574 | if '_manifest' in self.__dict__: | |
516 | try: |
|
575 | try: | |
517 | return self._manifest.flags(path) |
|
576 | return self._manifest.flags(path) | |
@@ -521,12 +580,9 class workingctx(changectx): | |||||
521 | pnode = self._parents[0].changeset()[0] |
|
580 | pnode = self._parents[0].changeset()[0] | |
522 | orig = self._repo.dirstate.copies().get(path, path) |
|
581 | orig = self._repo.dirstate.copies().get(path, path) | |
523 | node, flag = self._repo.manifest.find(pnode, orig) |
|
582 | node, flag = self._repo.manifest.find(pnode, orig) | |
524 | is_link = util.linkfunc(self._repo.root, |
|
|||
525 | lambda p: flag and 'l' in flag) |
|
|||
526 | is_exec = util.execfunc(self._repo.root, |
|
|||
527 | lambda p: flag and 'x' in flag) |
|
|||
528 | try: |
|
583 | try: | |
529 | return (is_link(path) and 'l' or '') + (is_exec(path) and 'x' or '') |
|
584 | ff = self._repo.dirstate.flagfunc(lambda x: flag or '') | |
|
585 | return ff(path) | |||
530 | except OSError: |
|
586 | except OSError: | |
531 | pass |
|
587 | pass | |
532 |
|
588 | |||
@@ -543,6 +599,9 class workingctx(changectx): | |||||
543 | """return the ancestor context of self and c2""" |
|
599 | """return the ancestor context of self and c2""" | |
544 | return self._parents[0].ancestor(c2) # punt on two parents for now |
|
600 | return self._parents[0].ancestor(c2) # punt on two parents for now | |
545 |
|
601 | |||
|
602 | def walk(self, match): | |||
|
603 | return util.sort(self._repo.dirstate.walk(match, True, False).keys()) | |||
|
604 | ||||
546 | class workingfilectx(filectx): |
|
605 | class workingfilectx(filectx): | |
547 | """A workingfilectx object makes access to data related to a particular |
|
606 | """A workingfilectx object makes access to data related to a particular | |
548 | file in the working directory convenient.""" |
|
607 | file in the working directory convenient.""" | |
@@ -625,3 +684,92 class workingfilectx(filectx): | |||||
625 | return (t, tz) |
|
684 | return (t, tz) | |
626 |
|
685 | |||
627 | def cmp(self, text): return self._repo.wread(self._path) == text |
|
686 | def cmp(self, text): return self._repo.wread(self._path) == text | |
|
687 | ||||
|
688 | class memctx(object): | |||
|
689 | """A memctx is a subset of changectx supposed to be built on memory | |||
|
690 | and passed to commit functions. | |||
|
691 | ||||
|
692 | NOTE: this interface and the related memfilectx are experimental and | |||
|
693 | may change without notice. | |||
|
694 | ||||
|
695 | parents - a pair of parent nodeids. | |||
|
696 | filectxfn - a callable taking (repo, memctx, path) arguments and | |||
|
697 | returning a memctx object. | |||
|
698 | date - any valid date string or (unixtime, offset), or None. | |||
|
699 | user - username string, or None. | |||
|
700 | extra - a dictionary of extra values, or None. | |||
|
701 | """ | |||
|
702 | def __init__(self, repo, parents, text, files, filectxfn, user=None, | |||
|
703 | date=None, extra=None): | |||
|
704 | self._repo = repo | |||
|
705 | self._rev = None | |||
|
706 | self._node = None | |||
|
707 | self._text = text | |||
|
708 | self._date = date and util.parsedate(date) or util.makedate() | |||
|
709 | self._user = user | |||
|
710 | parents = [(p or nullid) for p in parents] | |||
|
711 | p1, p2 = parents | |||
|
712 | self._parents = [changectx(self._repo, p) for p in (p1, p2)] | |||
|
713 | files = util.sort(list(files)) | |||
|
714 | self._status = [files, [], [], [], []] | |||
|
715 | self._filectxfn = filectxfn | |||
|
716 | ||||
|
717 | self._extra = extra and extra.copy() or {} | |||
|
718 | if 'branch' not in self._extra: | |||
|
719 | self._extra['branch'] = 'default' | |||
|
720 | elif self._extra.get('branch') == '': | |||
|
721 | self._extra['branch'] = 'default' | |||
|
722 | ||||
|
723 | def __str__(self): | |||
|
724 | return str(self._parents[0]) + "+" | |||
|
725 | ||||
|
726 | def __int__(self): | |||
|
727 | return self._rev | |||
|
728 | ||||
|
729 | def __nonzero__(self): | |||
|
730 | return True | |||
|
731 | ||||
|
732 | def user(self): return self._user or self._repo.ui.username() | |||
|
733 | def date(self): return self._date | |||
|
734 | def description(self): return self._text | |||
|
735 | def files(self): return self.modified() | |||
|
736 | def modified(self): return self._status[0] | |||
|
737 | def added(self): return self._status[1] | |||
|
738 | def removed(self): return self._status[2] | |||
|
739 | def deleted(self): return self._status[3] | |||
|
740 | def unknown(self): return self._status[4] | |||
|
741 | def clean(self): return self._status[5] | |||
|
742 | def branch(self): return self._extra['branch'] | |||
|
743 | def extra(self): return self._extra | |||
|
744 | def flags(self, f): return self[f].flags() | |||
|
745 | ||||
|
746 | def parents(self): | |||
|
747 | """return contexts for each parent changeset""" | |||
|
748 | return self._parents | |||
|
749 | ||||
|
750 | def filectx(self, path, filelog=None): | |||
|
751 | """get a file context from the working directory""" | |||
|
752 | return self._filectxfn(self._repo, self, path) | |||
|
753 | ||||
|
754 | class memfilectx(object): | |||
|
755 | """A memfilectx is a subset of filectx supposed to be built by client | |||
|
756 | code and passed to commit functions. | |||
|
757 | """ | |||
|
758 | def __init__(self, path, data, islink, isexec, copied): | |||
|
759 | """copied is the source file path, or None.""" | |||
|
760 | self._path = path | |||
|
761 | self._data = data | |||
|
762 | self._flags = (islink and 'l' or '') + (isexec and 'x' or '') | |||
|
763 | self._copied = None | |||
|
764 | if copied: | |||
|
765 | self._copied = (copied, nullid) | |||
|
766 | ||||
|
767 | def __nonzero__(self): return True | |||
|
768 | def __str__(self): return "%s@%s" % (self.path(), self._changectx) | |||
|
769 | def path(self): return self._path | |||
|
770 | def data(self): return self._data | |||
|
771 | def flags(self): return self._flags | |||
|
772 | def isexec(self): return 'x' in self._flags | |||
|
773 | def islink(self): return 'l' in self._flags | |||
|
774 | def renamed(self): return self._copied | |||
|
775 |
@@ -11,9 +11,7 import util, heapq | |||||
11 |
|
11 | |||
12 | def _nonoverlap(d1, d2, d3): |
|
12 | def _nonoverlap(d1, d2, d3): | |
13 | "Return list of elements in d1 not in d2 or d3" |
|
13 | "Return list of elements in d1 not in d2 or d3" | |
14 |
|
|
14 | return util.sort([d for d in d1 if d not in d3 and d not in d2]) | |
15 | l.sort() |
|
|||
16 | return l |
|
|||
17 |
|
15 | |||
18 | def _dirname(f): |
|
16 | def _dirname(f): | |
19 | s = f.rfind("/") |
|
17 | s = f.rfind("/") | |
@@ -49,9 +47,7 def _findoldnames(fctx, limit): | |||||
49 | visit += [(p, depth - 1) for p in fc.parents()] |
|
47 | visit += [(p, depth - 1) for p in fc.parents()] | |
50 |
|
48 | |||
51 | # return old names sorted by depth |
|
49 | # return old names sorted by depth | |
52 | old = old.values() |
|
50 | return [o[1] for o in util.sort(old.values())] | |
53 | old.sort() |
|
|||
54 | return [o[1] for o in old] |
|
|||
55 |
|
51 | |||
56 | def _findlimit(repo, a, b): |
|
52 | def _findlimit(repo, a, b): | |
57 | "find the earliest revision that's an ancestor of a or b but not both" |
|
53 | "find the earliest revision that's an ancestor of a or b but not both" | |
@@ -67,7 +63,7 def _findlimit(repo, a, b): | |||||
67 | # - quit when interesting revs is zero |
|
63 | # - quit when interesting revs is zero | |
68 |
|
64 | |||
69 | cl = repo.changelog |
|
65 | cl = repo.changelog | |
70 |
working = cl |
|
66 | working = len(cl) # pseudo rev for the working directory | |
71 | if a is None: |
|
67 | if a is None: | |
72 | a = working |
|
68 | a = working | |
73 | if b is None: |
|
69 | if b is None: | |
@@ -109,6 +105,10 def copies(repo, c1, c2, ca, checkdirs=F | |||||
109 | if not c1 or not c2 or c1 == c2: |
|
105 | if not c1 or not c2 or c1 == c2: | |
110 | return {}, {} |
|
106 | return {}, {} | |
111 |
|
107 | |||
|
108 | # avoid silly behavior for parent -> working dir | |||
|
109 | if c2.node() == None and c1.node() == repo.dirstate.parents()[0]: | |||
|
110 | return repo.dirstate.copies(), {} | |||
|
111 | ||||
112 | limit = _findlimit(repo, c1.rev(), c2.rev()) |
|
112 | limit = _findlimit(repo, c1.rev(), c2.rev()) | |
113 | m1 = c1.manifest() |
|
113 | m1 = c1.manifest() | |
114 | m2 = c2.manifest() |
|
114 | m2 = c2.manifest() |
@@ -9,12 +9,20 of the GNU General Public License, incor | |||||
9 |
|
9 | |||
10 | from node import nullid |
|
10 | from node import nullid | |
11 | from i18n import _ |
|
11 | from i18n import _ | |
12 |
import struct, os, bisect, stat, |
|
12 | import struct, os, bisect, stat, util, errno, ignore | |
13 | import cStringIO, osutil, sys |
|
13 | import cStringIO, osutil, sys | |
14 |
|
14 | |||
15 | _unknown = ('?', 0, 0, 0) |
|
15 | _unknown = ('?', 0, 0, 0) | |
16 | _format = ">cllll" |
|
16 | _format = ">cllll" | |
17 |
|
17 | |||
|
18 | def _finddirs(path): | |||
|
19 | pos = len(path) | |||
|
20 | while 1: | |||
|
21 | pos = path.rfind('/', 0, pos) | |||
|
22 | if pos == -1: | |||
|
23 | break | |||
|
24 | yield path[:pos] | |||
|
25 | ||||
18 | class dirstate(object): |
|
26 | class dirstate(object): | |
19 |
|
27 | |||
20 | def __init__(self, opener, ui, root): |
|
28 | def __init__(self, opener, ui, root): | |
@@ -31,6 +39,13 class dirstate(object): | |||||
31 | elif name == '_copymap': |
|
39 | elif name == '_copymap': | |
32 | self._read() |
|
40 | self._read() | |
33 | return self._copymap |
|
41 | return self._copymap | |
|
42 | elif name == '_foldmap': | |||
|
43 | _foldmap = {} | |||
|
44 | for name in self._map: | |||
|
45 | norm = os.path.normcase(os.path.normpath(name)) | |||
|
46 | _foldmap[norm] = name | |||
|
47 | self._foldmap = _foldmap | |||
|
48 | return self._foldmap | |||
34 | elif name == '_branch': |
|
49 | elif name == '_branch': | |
35 | try: |
|
50 | try: | |
36 | self._branch = (self._opener("branch").read().strip() |
|
51 | self._branch = (self._opener("branch").read().strip() | |
@@ -48,10 +63,12 class dirstate(object): | |||||
48 | if err.errno != errno.ENOENT: raise |
|
63 | if err.errno != errno.ENOENT: raise | |
49 | return self._pl |
|
64 | return self._pl | |
50 | elif name == '_dirs': |
|
65 | elif name == '_dirs': | |
51 |
|
|
66 | dirs = {} | |
52 | for f in self._map: |
|
67 | for f,s in self._map.items(): | |
53 |
if s |
|
68 | if s[0] != 'r': | |
54 |
|
|
69 | for base in _finddirs(f): | |
|
70 | dirs[base] = dirs.get(base, 0) + 1 | |||
|
71 | self._dirs = dirs | |||
55 | return self._dirs |
|
72 | return self._dirs | |
56 | elif name == '_ignore': |
|
73 | elif name == '_ignore': | |
57 | files = [self._join('.hgignore')] |
|
74 | files = [self._join('.hgignore')] | |
@@ -63,15 +80,55 class dirstate(object): | |||||
63 | elif name == '_slash': |
|
80 | elif name == '_slash': | |
64 | self._slash = self._ui.configbool('ui', 'slash') and os.sep != '/' |
|
81 | self._slash = self._ui.configbool('ui', 'slash') and os.sep != '/' | |
65 | return self._slash |
|
82 | return self._slash | |
|
83 | elif name == '_checklink': | |||
|
84 | self._checklink = util.checklink(self._root) | |||
|
85 | return self._checklink | |||
66 | elif name == '_checkexec': |
|
86 | elif name == '_checkexec': | |
67 | self._checkexec = util.checkexec(self._root) |
|
87 | self._checkexec = util.checkexec(self._root) | |
68 | return self._checkexec |
|
88 | return self._checkexec | |
|
89 | elif name == '_checkcase': | |||
|
90 | self._checkcase = not util.checkcase(self._join('.hg')) | |||
|
91 | return self._checkcase | |||
|
92 | elif name == 'normalize': | |||
|
93 | if self._checkcase: | |||
|
94 | self.normalize = self._normalize | |||
|
95 | else: | |||
|
96 | self.normalize = lambda x: x | |||
|
97 | return self.normalize | |||
69 | else: |
|
98 | else: | |
70 | raise AttributeError, name |
|
99 | raise AttributeError, name | |
71 |
|
100 | |||
72 | def _join(self, f): |
|
101 | def _join(self, f): | |
73 | return os.path.join(self._root, f) |
|
102 | return os.path.join(self._root, f) | |
74 |
|
103 | |||
|
104 | def flagfunc(self, fallback): | |||
|
105 | if self._checklink: | |||
|
106 | if self._checkexec: | |||
|
107 | def f(x): | |||
|
108 | p = os.path.join(self._root, x) | |||
|
109 | if os.path.islink(p): | |||
|
110 | return 'l' | |||
|
111 | if util.is_exec(p): | |||
|
112 | return 'x' | |||
|
113 | return '' | |||
|
114 | return f | |||
|
115 | def f(x): | |||
|
116 | if os.path.islink(os.path.join(self._root, x)): | |||
|
117 | return 'l' | |||
|
118 | if 'x' in fallback(x): | |||
|
119 | return 'x' | |||
|
120 | return '' | |||
|
121 | return f | |||
|
122 | if self._checkexec: | |||
|
123 | def f(x): | |||
|
124 | if 'l' in fallback(x): | |||
|
125 | return 'l' | |||
|
126 | if util.is_exec(os.path.join(self._root, x)): | |||
|
127 | return 'x' | |||
|
128 | return '' | |||
|
129 | return f | |||
|
130 | return fallback | |||
|
131 | ||||
75 | def getcwd(self): |
|
132 | def getcwd(self): | |
76 | cwd = os.getcwd() |
|
133 | cwd = os.getcwd() | |
77 | if cwd == self._root: return '' |
|
134 | if cwd == self._root: return '' | |
@@ -106,9 +163,7 class dirstate(object): | |||||
106 | return key in self._map |
|
163 | return key in self._map | |
107 |
|
164 | |||
108 | def __iter__(self): |
|
165 | def __iter__(self): | |
109 |
|
|
166 | for x in util.sort(self._map): | |
110 | a.sort() |
|
|||
111 | for x in a: |
|
|||
112 | yield x |
|
167 | yield x | |
113 |
|
168 | |||
114 | def parents(self): |
|
169 | def parents(self): | |
@@ -161,7 +216,7 class dirstate(object): | |||||
161 | dmap[f] = e # we hold onto e[4] because making a subtuple is slow |
|
216 | dmap[f] = e # we hold onto e[4] because making a subtuple is slow | |
162 |
|
217 | |||
163 | def invalidate(self): |
|
218 | def invalidate(self): | |
164 | for a in "_map _copymap _branch _pl _dirs _ignore".split(): |
|
219 | for a in "_map _copymap _foldmap _branch _pl _dirs _ignore".split(): | |
165 | if a in self.__dict__: |
|
220 | if a in self.__dict__: | |
166 | delattr(self, a) |
|
221 | delattr(self, a) | |
167 | self._dirty = False |
|
222 | self._dirty = False | |
@@ -178,67 +233,39 class dirstate(object): | |||||
178 | def copies(self): |
|
233 | def copies(self): | |
179 | return self._copymap |
|
234 | return self._copymap | |
180 |
|
235 | |||
181 |
def _ |
|
236 | def _droppath(self, f): | |
182 | c = path.rfind('/') |
|
237 | if self[f] not in "?r" and "_dirs" in self.__dict__: | |
183 | if c >= 0: |
|
|||
184 | dirs = self._dirs |
|
238 | dirs = self._dirs | |
185 | base = path[:c] |
|
239 | for base in _finddirs(f): | |
186 | if base not in dirs: |
|
|||
187 | self._incpath(base) |
|
|||
188 | dirs[base] = 1 |
|
|||
189 | else: |
|
|||
190 | dirs[base] += 1 |
|
|||
191 |
|
||||
192 | def _decpath(self, path): |
|
|||
193 | c = path.rfind('/') |
|
|||
194 | if c >= 0: |
|
|||
195 | base = path[:c] |
|
|||
196 | dirs = self._dirs |
|
|||
197 | if dirs[base] == 1: |
|
240 | if dirs[base] == 1: | |
198 | del dirs[base] |
|
241 | del dirs[base] | |
199 | self._decpath(base) |
|
|||
200 | else: |
|
242 | else: | |
201 | dirs[base] -= 1 |
|
243 | dirs[base] -= 1 | |
202 |
|
244 | |||
203 |
def _ |
|
245 | def _addpath(self, f, check=False): | |
|
246 | oldstate = self[f] | |||
|
247 | if check or oldstate == "r": | |||
204 | if '\r' in f or '\n' in f: |
|
248 | if '\r' in f or '\n' in f: | |
205 | raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") |
|
249 | raise util.Abort( | |
206 | % f) |
|
250 | _("'\\n' and '\\r' disallowed in filenames: %r") % f) | |
207 | # shadows |
|
|||
208 | if f in self._dirs: |
|
251 | if f in self._dirs: | |
209 | raise util.Abort(_('directory %r already in dirstate') % f) |
|
252 | raise util.Abort(_('directory %r already in dirstate') % f) | |
210 | for c in strutil.rfindall(f, '/'): |
|
253 | # shadows | |
211 |
d |
|
254 | for d in _finddirs(f): | |
212 | if d in self._dirs: |
|
255 | if d in self._dirs: | |
213 | break |
|
256 | break | |
214 | if d in self._map and self[d] != 'r': |
|
257 | if d in self._map and self[d] != 'r': | |
215 | raise util.Abort(_('file %r in dirstate clashes with %r') % |
|
258 | raise util.Abort( | |
216 |
|
|
259 | _('file %r in dirstate clashes with %r') % (d, f)) | |
217 | self._incpath(f) |
|
260 | if oldstate in "?r" and "_dirs" in self.__dict__: | |
218 |
|
261 | dirs = self._dirs | ||
219 | def _changepath(self, f, newstate, relaxed=False): |
|
262 | for base in _finddirs(f): | |
220 | # handle upcoming path changes |
|
263 | dirs[base] = dirs.get(base, 0) + 1 | |
221 | oldstate = self[f] |
|
|||
222 | if oldstate not in "?r" and newstate in "?r": |
|
|||
223 | if "_dirs" in self.__dict__: |
|
|||
224 | self._decpath(f) |
|
|||
225 | return |
|
|||
226 | if oldstate in "?r" and newstate not in "?r": |
|
|||
227 | if relaxed and oldstate == '?': |
|
|||
228 | # XXX |
|
|||
229 | # in relaxed mode we assume the caller knows |
|
|||
230 | # what it is doing, workaround for updating |
|
|||
231 | # dir-to-file revisions |
|
|||
232 | if "_dirs" in self.__dict__: |
|
|||
233 | self._incpath(f) |
|
|||
234 | return |
|
|||
235 | self._incpathcheck(f) |
|
|||
236 | return |
|
|||
237 |
|
264 | |||
238 | def normal(self, f): |
|
265 | def normal(self, f): | |
239 | 'mark a file normal and clean' |
|
266 | 'mark a file normal and clean' | |
240 | self._dirty = True |
|
267 | self._dirty = True | |
241 |
self._ |
|
268 | self._addpath(f) | |
242 | s = os.lstat(self._join(f)) |
|
269 | s = os.lstat(self._join(f)) | |
243 | self._map[f] = ('n', s.st_mode, s.st_size, s.st_mtime, 0) |
|
270 | self._map[f] = ('n', s.st_mode, s.st_size, s.st_mtime, 0) | |
244 | if f in self._copymap: |
|
271 | if f in self._copymap: | |
@@ -262,7 +289,7 class dirstate(object): | |||||
262 | if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2: |
|
289 | if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2: | |
263 | return |
|
290 | return | |
264 | self._dirty = True |
|
291 | self._dirty = True | |
265 |
self._ |
|
292 | self._addpath(f) | |
266 | self._map[f] = ('n', 0, -1, -1, 0) |
|
293 | self._map[f] = ('n', 0, -1, -1, 0) | |
267 | if f in self._copymap: |
|
294 | if f in self._copymap: | |
268 | del self._copymap[f] |
|
295 | del self._copymap[f] | |
@@ -270,7 +297,7 class dirstate(object): | |||||
270 | def normaldirty(self, f): |
|
297 | def normaldirty(self, f): | |
271 | 'mark a file normal, but dirty' |
|
298 | 'mark a file normal, but dirty' | |
272 | self._dirty = True |
|
299 | self._dirty = True | |
273 |
self._ |
|
300 | self._addpath(f) | |
274 | self._map[f] = ('n', 0, -2, -1, 0) |
|
301 | self._map[f] = ('n', 0, -2, -1, 0) | |
275 | if f in self._copymap: |
|
302 | if f in self._copymap: | |
276 | del self._copymap[f] |
|
303 | del self._copymap[f] | |
@@ -278,7 +305,7 class dirstate(object): | |||||
278 | def add(self, f): |
|
305 | def add(self, f): | |
279 | 'mark a file added' |
|
306 | 'mark a file added' | |
280 | self._dirty = True |
|
307 | self._dirty = True | |
281 |
self._ |
|
308 | self._addpath(f, True) | |
282 | self._map[f] = ('a', 0, -1, -1, 0) |
|
309 | self._map[f] = ('a', 0, -1, -1, 0) | |
283 | if f in self._copymap: |
|
310 | if f in self._copymap: | |
284 | del self._copymap[f] |
|
311 | del self._copymap[f] | |
@@ -286,7 +313,7 class dirstate(object): | |||||
286 | def remove(self, f): |
|
313 | def remove(self, f): | |
287 | 'mark a file removed' |
|
314 | 'mark a file removed' | |
288 | self._dirty = True |
|
315 | self._dirty = True | |
289 |
self._ |
|
316 | self._droppath(f) | |
290 | size = 0 |
|
317 | size = 0 | |
291 | if self._pl[1] != nullid and f in self._map: |
|
318 | if self._pl[1] != nullid and f in self._map: | |
292 | entry = self._map[f] |
|
319 | entry = self._map[f] | |
@@ -302,7 +329,7 class dirstate(object): | |||||
302 | 'mark a file merged' |
|
329 | 'mark a file merged' | |
303 | self._dirty = True |
|
330 | self._dirty = True | |
304 | s = os.lstat(self._join(f)) |
|
331 | s = os.lstat(self._join(f)) | |
305 |
self._ |
|
332 | self._addpath(f) | |
306 | self._map[f] = ('m', s.st_mode, s.st_size, s.st_mtime, 0) |
|
333 | self._map[f] = ('m', s.st_mode, s.st_size, s.st_mtime, 0) | |
307 | if f in self._copymap: |
|
334 | if f in self._copymap: | |
308 | del self._copymap[f] |
|
335 | del self._copymap[f] | |
@@ -311,11 +338,18 class dirstate(object): | |||||
311 | 'forget a file' |
|
338 | 'forget a file' | |
312 | self._dirty = True |
|
339 | self._dirty = True | |
313 | try: |
|
340 | try: | |
314 |
self._ |
|
341 | self._droppath(f) | |
315 | del self._map[f] |
|
342 | del self._map[f] | |
316 | except KeyError: |
|
343 | except KeyError: | |
317 | self._ui.warn(_("not in dirstate: %s\n") % f) |
|
344 | self._ui.warn(_("not in dirstate: %s\n") % f) | |
318 |
|
345 | |||
|
346 | def _normalize(self, path): | |||
|
347 | if path not in self._foldmap: | |||
|
348 | if not os.path.exists(path): | |||
|
349 | return path | |||
|
350 | self._foldmap[path] = util.fspath(path, self._root) | |||
|
351 | return self._foldmap[path] | |||
|
352 | ||||
319 | def clear(self): |
|
353 | def clear(self): | |
320 | self._map = {} |
|
354 | self._map = {} | |
321 | if "_dirs" in self.__dict__: |
|
355 | if "_dirs" in self.__dict__: | |
@@ -327,7 +361,7 class dirstate(object): | |||||
327 | def rebuild(self, parent, files): |
|
361 | def rebuild(self, parent, files): | |
328 | self.clear() |
|
362 | self.clear() | |
329 | for f in files: |
|
363 | for f in files: | |
330 |
if files. |
|
364 | if 'x' in files.flags(f): | |
331 | self._map[f] = ('n', 0777, -1, 0, 0) |
|
365 | self._map[f] = ('n', 0777, -1, 0, 0) | |
332 | else: |
|
366 | else: | |
333 | self._map[f] = ('n', 0666, -1, 0, 0) |
|
367 | self._map[f] = ('n', 0666, -1, 0, 0) | |
@@ -364,40 +398,33 class dirstate(object): | |||||
364 | st.rename() |
|
398 | st.rename() | |
365 | self._dirty = self._dirtypl = False |
|
399 | self._dirty = self._dirtypl = False | |
366 |
|
400 | |||
367 |
def _ |
|
401 | def _dirignore(self, f): | |
368 |
|
|
402 | if f == '.': | |
369 | unknown = [] |
|
403 | return False | |
370 |
|
404 | if self._ignore(f): | ||
371 | for x in files: |
|
405 | return True | |
372 | if x == '.': |
|
406 | for p in _finddirs(f): | |
373 |
|
|
407 | if self._ignore(p): | |
374 | if x not in self._map: |
|
408 | return True | |
375 | unknown.append(x) |
|
409 | return False | |
376 | else: |
|
|||
377 | ret[x] = self._map[x] |
|
|||
378 |
|
||||
379 | if not unknown: |
|
|||
380 | return ret |
|
|||
381 |
|
410 | |||
382 | b = self._map.keys() |
|
411 | def walk(self, match, unknown, ignored): | |
383 | b.sort() |
|
412 | ''' | |
384 | blen = len(b) |
|
413 | walk recursively through the directory tree, finding all files | |
|
414 | matched by the match function | |||
|
415 | ||||
|
416 | results are yielded in a tuple (filename, stat), where stat | |||
|
417 | and st is the stat result if the file was found in the directory. | |||
|
418 | ''' | |||
385 |
|
419 | |||
386 | for x in unknown: |
|
420 | def fwarn(f, msg): | |
387 | bs = bisect.bisect(b, "%s%s" % (x, '/')) |
|
421 | self._ui.warn('%s: %s\n' % (self.pathto(ff), msg)) | |
388 | while bs < blen: |
|
422 | return False | |
389 | s = b[bs] |
|
423 | badfn = fwarn | |
390 | if len(s) > len(x) and s.startswith(x): |
|
424 | if hasattr(match, 'bad'): | |
391 | ret[s] = self._map[s] |
|
425 | badfn = match.bad | |
392 | else: |
|
|||
393 | break |
|
|||
394 | bs += 1 |
|
|||
395 | return ret |
|
|||
396 |
|
426 | |||
397 | def _supported(self, f, mode, verbose=False): |
|
427 | def badtype(f, mode): | |
398 | if stat.S_ISREG(mode) or stat.S_ISLNK(mode): |
|
|||
399 | return True |
|
|||
400 | if verbose: |
|
|||
401 | kind = 'unknown' |
|
428 | kind = 'unknown' | |
402 | if stat.S_ISCHR(mode): kind = _('character device') |
|
429 | if stat.S_ISCHR(mode): kind = _('character device') | |
403 | elif stat.S_ISBLK(mode): kind = _('block device') |
|
430 | elif stat.S_ISBLK(mode): kind = _('block device') | |
@@ -406,173 +433,121 class dirstate(object): | |||||
406 | elif stat.S_ISDIR(mode): kind = _('directory') |
|
433 | elif stat.S_ISDIR(mode): kind = _('directory') | |
407 | self._ui.warn(_('%s: unsupported file type (type is %s)\n') |
|
434 | self._ui.warn(_('%s: unsupported file type (type is %s)\n') | |
408 | % (self.pathto(f), kind)) |
|
435 | % (self.pathto(f), kind)) | |
409 | return False |
|
|||
410 |
|
||||
411 | def _dirignore(self, f): |
|
|||
412 | if f == '.': |
|
|||
413 | return False |
|
|||
414 | if self._ignore(f): |
|
|||
415 | return True |
|
|||
416 | for c in strutil.findall(f, '/'): |
|
|||
417 | if self._ignore(f[:c]): |
|
|||
418 | return True |
|
|||
419 | return False |
|
|||
420 |
|
||||
421 | def walk(self, files=None, match=util.always, badmatch=None): |
|
|||
422 | # filter out the stat |
|
|||
423 | for src, f, st in self.statwalk(files, match, badmatch=badmatch): |
|
|||
424 | yield src, f |
|
|||
425 |
|
||||
426 | def statwalk(self, files=None, match=util.always, unknown=True, |
|
|||
427 | ignored=False, badmatch=None, directories=False): |
|
|||
428 | ''' |
|
|||
429 | walk recursively through the directory tree, finding all files |
|
|||
430 | matched by the match function |
|
|||
431 |
|
||||
432 | results are yielded in a tuple (src, filename, st), where src |
|
|||
433 | is one of: |
|
|||
434 | 'f' the file was found in the directory tree |
|
|||
435 | 'd' the file is a directory of the tree |
|
|||
436 | 'm' the file was only in the dirstate and not in the tree |
|
|||
437 | 'b' file was not found and matched badmatch |
|
|||
438 |
|
||||
439 | and st is the stat result if the file was found in the directory. |
|
|||
440 | ''' |
|
|||
441 |
|
||||
442 | # walk all files by default |
|
|||
443 | if not files: |
|
|||
444 | files = ['.'] |
|
|||
445 | dc = self._map.copy() |
|
|||
446 | else: |
|
|||
447 | files = util.unique(files) |
|
|||
448 | dc = self._filter(files) |
|
|||
449 |
|
||||
450 | def imatch(file_): |
|
|||
451 | if file_ not in dc and self._ignore(file_): |
|
|||
452 | return False |
|
|||
453 | return match(file_) |
|
|||
454 |
|
436 | |||
455 | # TODO: don't walk unknown directories if unknown and ignored are False |
|
437 | # TODO: don't walk unknown directories if unknown and ignored are False | |
456 | ignore = self._ignore |
|
438 | ignore = self._ignore | |
457 | dirignore = self._dirignore |
|
439 | dirignore = self._dirignore | |
458 | if ignored: |
|
440 | if ignored: | |
459 | imatch = match |
|
|||
460 | ignore = util.never |
|
441 | ignore = util.never | |
461 | dirignore = util.never |
|
442 | dirignore = util.never | |
462 |
|
443 | |||
463 | # self._root may end with a path separator when self._root == '/' |
|
444 | matchfn = match.matchfn | |
464 | common_prefix_len = len(self._root) |
|
445 | dmap = self._map | |
465 | if not util.endswithsep(self._root): |
|
|||
466 | common_prefix_len += 1 |
|
|||
467 |
|
||||
468 | normpath = util.normpath |
|
446 | normpath = util.normpath | |
|
447 | normalize = self.normalize | |||
469 | listdir = osutil.listdir |
|
448 | listdir = osutil.listdir | |
470 | lstat = os.lstat |
|
449 | lstat = os.lstat | |
471 | bisect_left = bisect.bisect_left |
|
450 | bisect_left = bisect.bisect_left | |
472 | isdir = os.path.isdir |
|
|||
473 | pconvert = util.pconvert |
|
451 | pconvert = util.pconvert | |
474 |
|
|
452 | getkind = stat.S_IFMT | |
475 |
|
|
453 | dirkind = stat.S_IFDIR | |
476 | supported = self._supported |
|
454 | regkind = stat.S_IFREG | |
477 | _join = self._join |
|
455 | lnkkind = stat.S_IFLNK | |
478 | known = {'.hg': 1} |
|
456 | join = self._join | |
|
457 | work = [] | |||
|
458 | wadd = work.append | |||
|
459 | ||||
|
460 | files = util.unique(match.files()) | |||
|
461 | if not files or '.' in files: | |||
|
462 | files = [''] | |||
|
463 | results = {'.hg': None} | |||
|
464 | ||||
|
465 | # step 1: find all explicit files | |||
|
466 | for ff in util.sort(files): | |||
|
467 | nf = normalize(normpath(ff)) | |||
|
468 | if nf in results: | |||
|
469 | continue | |||
479 |
|
470 | |||
480 | # recursion free walker, faster than os.walk. |
|
471 | try: | |
481 | def findfiles(s): |
|
472 | st = lstat(join(nf)) | |
482 | work = [s] |
|
473 | kind = getkind(st.st_mode) | |
483 |
|
|
474 | if kind == dirkind: | |
484 | found = [] |
|
475 | if not dirignore(nf): | |
485 | add = found.append |
|
476 | wadd(nf) | |
486 | if directories: |
|
477 | elif kind == regkind or kind == lnkkind: | |
487 | add((normpath(s[common_prefix_len:]), 'd', lstat(s))) |
|
478 | results[nf] = st | |
|
479 | else: | |||
|
480 | badtype(ff, kind) | |||
|
481 | if nf in dmap: | |||
|
482 | results[nf] = None | |||
|
483 | except OSError, inst: | |||
|
484 | keep = False | |||
|
485 | prefix = nf + "/" | |||
|
486 | for fn in dmap: | |||
|
487 | if nf == fn or fn.startswith(prefix): | |||
|
488 | keep = True | |||
|
489 | break | |||
|
490 | if not keep: | |||
|
491 | if inst.errno != errno.ENOENT: | |||
|
492 | fwarn(ff, inst.strerror) | |||
|
493 | elif badfn(ff, inst.strerror): | |||
|
494 | if (nf in dmap or not ignore(nf)) and matchfn(nf): | |||
|
495 | results[nf] = None | |||
|
496 | ||||
|
497 | # step 2: visit subdirectories | |||
488 |
|
|
498 | while work: | |
489 |
|
|
499 | nd = work.pop() | |
490 | entries = listdir(top, stat=True) |
|
500 | if hasattr(match, 'dir'): | |
491 | # nd is the top of the repository dir tree |
|
501 | match.dir(nd) | |
492 | nd = normpath(top[common_prefix_len:]) |
|
502 | entries = listdir(join(nd), stat=True) | |
493 |
|
|
503 | if nd == '.': | |
494 |
|
|
504 | nd = '' | |
495 |
|
|
505 | else: | |
496 |
|
|
506 | # do not recurse into a repo contained in this | |
497 |
|
|
507 | # one. use bisect to find .hg directory so speed | |
498 |
|
|
508 | # is good on big directory. | |
499 | names = [e[0] for e in entries] |
|
509 | hg = bisect_left(entries, ('.hg')) | |
500 | hg = bisect_left(names, '.hg') |
|
510 | if hg < len(entries) and entries[hg][0] == '.hg' \ | |
501 |
|
|
511 | and entries[hg][1] == dirkind: | |
502 | if isdir(join(top, '.hg')): |
|
|||
503 |
|
|
512 | continue | |
504 |
|
|
513 | for f, kind, st in entries: | |
505 | np = pconvert(join(nd, f)) |
|
514 | nf = normalize(nd and (nd + "/" + f) or f) | |
506 |
|
|
515 | if nf not in results: | |
507 |
|
|
516 | if kind == dirkind: | |
508 |
|
|
517 | if not ignore(nf): | |
509 |
|
|
518 | wadd(nf) | |
510 | # don't trip over symlinks |
|
519 | if nf in dmap and matchfn(nf): | |
511 | if kind == stat.S_IFDIR: |
|
520 | results[nf] = None | |
512 |
|
|
521 | elif kind == regkind or kind == lnkkind: | |
513 |
|
|
522 | if nf in dmap: | |
514 |
if |
|
523 | if matchfn(nf): | |
515 |
|
|
524 | results[nf] = st | |
516 |
if |
|
525 | elif matchfn(nf) and not ignore(nf): | |
517 |
|
|
526 | results[nf] = st | |
518 |
elif imatch(n |
|
527 | elif nf in dmap and matchfn(nf): | |
519 | if supported(np, st.st_mode): |
|
528 | results[nf] = None | |
520 | add((np, 'f', st)) |
|
|||
521 | elif np in dc: |
|
|||
522 | add((np, 'm', st)) |
|
|||
523 | found.sort() |
|
|||
524 | return found |
|
|||
525 |
|
529 | |||
526 | # step one, find all files that match our criteria |
|
530 | # step 3: report unseen items in the dmap hash | |
527 | files.sort() |
|
531 | visit = [f for f in dmap if f not in results and match(f)] | |
528 |
for |
|
532 | for nf in util.sort(visit): | |
529 |
nf = |
|
533 | results[nf] = None | |
530 | f = _join(ff) |
|
|||
531 | try: |
|
534 | try: | |
532 | st = lstat(f) |
|
535 | st = lstat(join(nf)) | |
|
536 | kind = getkind(st.st_mode) | |||
|
537 | if kind == regkind or kind == lnkkind: | |||
|
538 | results[nf] = st | |||
533 | except OSError, inst: |
|
539 | except OSError, inst: | |
534 | found = False |
|
540 | if inst.errno not in (errno.ENOENT, errno.ENOTDIR): | |
535 |
|
|
541 | raise | |
536 | if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'): |
|
|||
537 | found = True |
|
|||
538 | break |
|
|||
539 | if not found: |
|
|||
540 | if inst.errno != errno.ENOENT or not badmatch: |
|
|||
541 | self._ui.warn('%s: %s\n' % |
|
|||
542 | (self.pathto(ff), inst.strerror)) |
|
|||
543 | elif badmatch and badmatch(ff) and imatch(nf): |
|
|||
544 | yield 'b', ff, None |
|
|||
545 | continue |
|
|||
546 | if s_isdir(st.st_mode): |
|
|||
547 | if not dirignore(nf): |
|
|||
548 | for f, src, st in findfiles(f): |
|
|||
549 | yield src, f, st |
|
|||
550 | else: |
|
|||
551 | if nf in known: |
|
|||
552 | continue |
|
|||
553 | known[nf] = 1 |
|
|||
554 | if match(nf): |
|
|||
555 | if supported(ff, st.st_mode, verbose=True): |
|
|||
556 | yield 'f', nf, st |
|
|||
557 | elif ff in dc: |
|
|||
558 | yield 'm', nf, st |
|
|||
559 |
|
542 | |||
560 | # step two run through anything left in the dc hash and yield |
|
543 | del results['.hg'] | |
561 | # if we haven't already seen it |
|
544 | return results | |
562 | ks = dc.keys() |
|
|||
563 | ks.sort() |
|
|||
564 | for k in ks: |
|
|||
565 | if k in known: |
|
|||
566 | continue |
|
|||
567 | known[k] = 1 |
|
|||
568 | if imatch(k): |
|
|||
569 | yield 'm', k, None |
|
|||
570 |
|
545 | |||
571 |
def status(self, |
|
546 | def status(self, match, ignored, clean, unknown): | |
|
547 | listignored, listclean, listunknown = ignored, clean, unknown | |||
572 | lookup, modified, added, unknown, ignored = [], [], [], [], [] |
|
548 | lookup, modified, added, unknown, ignored = [], [], [], [], [] | |
573 | removed, deleted, clean = [], [], [] |
|
549 | removed, deleted, clean = [], [], [] | |
574 |
|
550 | |||
575 | files = files or [] |
|
|||
576 | _join = self._join |
|
551 | _join = self._join | |
577 | lstat = os.lstat |
|
552 | lstat = os.lstat | |
578 | cmap = self._copymap |
|
553 | cmap = self._copymap | |
@@ -586,38 +561,20 class dirstate(object): | |||||
586 | dadd = deleted.append |
|
561 | dadd = deleted.append | |
587 | cadd = clean.append |
|
562 | cadd = clean.append | |
588 |
|
563 | |||
589 |
for |
|
564 | for fn, st in self.walk(match, listunknown, listignored).iteritems(): | |
590 | ignored=list_ignored): |
|
565 | if fn not in dmap: | |
591 | if fn in dmap: |
|
566 | if (listignored or match.exact(fn)) and self._dirignore(fn): | |
592 | type_, mode, size, time, foo = dmap[fn] |
|
567 | if listignored: | |
593 | else: |
|
|||
594 | if (list_ignored or fn in files) and self._dirignore(fn): |
|
|||
595 | if list_ignored: |
|
|||
596 | iadd(fn) |
|
568 | iadd(fn) | |
597 |
elif list |
|
569 | elif listunknown: | |
598 | uadd(fn) |
|
570 | uadd(fn) | |
599 | continue |
|
571 | continue | |
600 | if src == 'm': |
|
572 | ||
601 | nonexistent = True |
|
573 | state, mode, size, time, foo = dmap[fn] | |
602 | if not st: |
|
574 | ||
603 | try: |
|
575 | if not st and state in "nma": | |
604 | st = lstat(_join(fn)) |
|
|||
605 | except OSError, inst: |
|
|||
606 | if inst.errno not in (errno.ENOENT, errno.ENOTDIR): |
|
|||
607 | raise |
|
|||
608 | st = None |
|
|||
609 | # We need to re-check that it is a valid file |
|
|||
610 | if st and self._supported(fn, st.st_mode): |
|
|||
611 | nonexistent = False |
|
|||
612 | # XXX: what to do with file no longer present in the fs |
|
|||
613 | # who are not removed in the dirstate ? |
|
|||
614 | if nonexistent and type_ in "nma": |
|
|||
615 |
|
|
576 | dadd(fn) | |
616 | continue |
|
577 | elif state == 'n': | |
617 | # check the common case first |
|
|||
618 | if type_ == 'n': |
|
|||
619 | if not st: |
|
|||
620 | st = lstat(_join(fn)) |
|
|||
621 | if (size >= 0 and |
|
578 | if (size >= 0 and | |
622 | (size != st.st_size |
|
579 | (size != st.st_size | |
623 | or ((mode ^ st.st_mode) & 0100 and self._checkexec)) |
|
580 | or ((mode ^ st.st_mode) & 0100 and self._checkexec)) | |
@@ -626,13 +583,13 class dirstate(object): | |||||
626 | madd(fn) |
|
583 | madd(fn) | |
627 | elif time != int(st.st_mtime): |
|
584 | elif time != int(st.st_mtime): | |
628 | ladd(fn) |
|
585 | ladd(fn) | |
629 |
elif list |
|
586 | elif listclean: | |
630 | cadd(fn) |
|
587 | cadd(fn) | |
631 |
elif |
|
588 | elif state == 'm': | |
632 | madd(fn) |
|
589 | madd(fn) | |
633 |
elif |
|
590 | elif state == 'a': | |
634 | aadd(fn) |
|
591 | aadd(fn) | |
635 |
elif |
|
592 | elif state == 'r': | |
636 | radd(fn) |
|
593 | radd(fn) | |
637 |
|
594 | |||
638 | return (lookup, modified, added, removed, deleted, unknown, ignored, |
|
595 | return (lookup, modified, added, removed, deleted, unknown, ignored, |
@@ -5,7 +5,7 | |||||
5 | # This software may be used and distributed according to the terms |
|
5 | # This software may be used and distributed according to the terms | |
6 | # of the GNU General Public License, incorporated herein by reference. |
|
6 | # of the GNU General Public License, incorporated herein by reference. | |
7 |
|
7 | |||
8 | from node import nullrev |
|
8 | from node import nullrev, short | |
9 | from i18n import _ |
|
9 | from i18n import _ | |
10 | import util, os, tempfile, simplemerge, re, filecmp |
|
10 | import util, os, tempfile, simplemerge, re, filecmp | |
11 |
|
11 | |||
@@ -63,8 +63,7 def _picktool(repo, ui, path, binary, sy | |||||
63 | if t not in tools: |
|
63 | if t not in tools: | |
64 | tools[t] = int(_toolstr(ui, t, "priority", "0")) |
|
64 | tools[t] = int(_toolstr(ui, t, "priority", "0")) | |
65 | names = tools.keys() |
|
65 | names = tools.keys() | |
66 | tools = [(-p,t) for t,p in tools.items()] |
|
66 | tools = util.sort([(-p,t) for t,p in tools.items()]) | |
67 | tools.sort() |
|
|||
68 | uimerge = ui.config("ui", "merge") |
|
67 | uimerge = ui.config("ui", "merge") | |
69 | if uimerge: |
|
68 | if uimerge: | |
70 | if uimerge not in names: |
|
69 | if uimerge not in names: | |
@@ -101,13 +100,14 def _matcheol(file, origfile): | |||||
101 | if newdata != data: |
|
100 | if newdata != data: | |
102 | open(file, "wb").write(newdata) |
|
101 | open(file, "wb").write(newdata) | |
103 |
|
102 | |||
104 |
def filemerge(repo, |
|
103 | def filemerge(repo, mynode, orig, fcd, fco, fca): | |
105 | """perform a 3-way merge in the working directory |
|
104 | """perform a 3-way merge in the working directory | |
106 |
|
105 | |||
107 | fw = original filename in the working directory |
|
106 | mynode = parent node before merge | |
108 | fd = destination filename in the working directory |
|
107 | orig = original local filename before merge | |
109 | fo = filename in other parent |
|
108 | fco = other file context | |
110 | wctx, mctx = working and merge changecontexts |
|
109 | fca = ancestor file context | |
|
110 | fcd = local file context for current/destination file | |||
111 | """ |
|
111 | """ | |
112 |
|
112 | |||
113 | def temp(prefix, ctx): |
|
113 | def temp(prefix, ctx): | |
@@ -125,29 +125,27 def filemerge(repo, fw, fd, fo, wctx, mc | |||||
125 | except IOError: |
|
125 | except IOError: | |
126 | return False |
|
126 | return False | |
127 |
|
127 | |||
128 | fco = mctx.filectx(fo) |
|
128 | if not fco.cmp(fcd.data()): # files identical? | |
129 | if not fco.cmp(wctx.filectx(fd).data()): # files identical? |
|
|||
130 | return None |
|
129 | return None | |
131 |
|
130 | |||
132 | ui = repo.ui |
|
131 | ui = repo.ui | |
133 | fcm = wctx.filectx(fw) |
|
132 | fd = fcd.path() | |
134 | fca = fcm.ancestor(fco) or repo.filectx(fw, fileid=nullrev) |
|
133 | binary = isbin(fcd) or isbin(fco) or isbin(fca) | |
135 | binary = isbin(fcm) or isbin(fco) or isbin(fca) |
|
134 | symlink = 'l' in fcd.flags() + fco.flags() | |
136 | symlink = fcm.islink() or fco.islink() |
|
135 | tool, toolpath = _picktool(repo, ui, fd, binary, symlink) | |
137 | tool, toolpath = _picktool(repo, ui, fw, binary, symlink) |
|
|||
138 | ui.debug(_("picked tool '%s' for %s (binary %s symlink %s)\n") % |
|
136 | ui.debug(_("picked tool '%s' for %s (binary %s symlink %s)\n") % | |
139 |
(tool, f |
|
137 | (tool, fd, binary, symlink)) | |
140 |
|
138 | |||
141 | if not tool: |
|
139 | if not tool: | |
142 | tool = "internal:local" |
|
140 | tool = "internal:local" | |
143 | if ui.prompt(_(" no tool found to merge %s\n" |
|
141 | if ui.prompt(_(" no tool found to merge %s\n" | |
144 |
"keep (l)ocal or take (o)ther?") % f |
|
142 | "keep (l)ocal or take (o)ther?") % fd, | |
145 | _("[lo]"), _("l")) != _("l"): |
|
143 | _("[lo]"), _("l")) != _("l"): | |
146 | tool = "internal:other" |
|
144 | tool = "internal:other" | |
147 | if tool == "internal:local": |
|
145 | if tool == "internal:local": | |
148 | return 0 |
|
146 | return 0 | |
149 | if tool == "internal:other": |
|
147 | if tool == "internal:other": | |
150 |
repo.wwrite(fd, fco.data(), fco. |
|
148 | repo.wwrite(fd, fco.data(), fco.flags()) | |
151 | return 0 |
|
149 | return 0 | |
152 | if tool == "internal:fail": |
|
150 | if tool == "internal:fail": | |
153 | return 1 |
|
151 | return 1 | |
@@ -160,11 +158,12 def filemerge(repo, fw, fd, fo, wctx, mc | |||||
160 | back = a + ".orig" |
|
158 | back = a + ".orig" | |
161 | util.copyfile(a, back) |
|
159 | util.copyfile(a, back) | |
162 |
|
160 | |||
163 |
if |
|
161 | if orig != fco.path(): | |
164 |
repo.ui.status(_("merging %s and %s\n") % ( |
|
162 | repo.ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd)) | |
165 | else: |
|
163 | else: | |
166 |
repo.ui.status(_("merging %s\n") % f |
|
164 | repo.ui.status(_("merging %s\n") % fd) | |
167 | repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca)) |
|
165 | ||
|
166 | repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcd, fco, fca)) | |||
168 |
|
167 | |||
169 | # do we attempt to simplemerge first? |
|
168 | # do we attempt to simplemerge first? | |
170 | if _toolbool(ui, tool, "premerge", not (binary or symlink)): |
|
169 | if _toolbool(ui, tool, "premerge", not (binary or symlink)): | |
@@ -178,11 +177,11 def filemerge(repo, fw, fd, fo, wctx, mc | |||||
178 | util.copyfile(back, a) # restore from backup and try again |
|
177 | util.copyfile(back, a) # restore from backup and try again | |
179 |
|
178 | |||
180 | env = dict(HG_FILE=fd, |
|
179 | env = dict(HG_FILE=fd, | |
181 |
HG_MY_NODE=s |
|
180 | HG_MY_NODE=short(mynode), | |
182 |
HG_OTHER_NODE=str( |
|
181 | HG_OTHER_NODE=str(fco.changectx()), | |
183 |
HG_MY_ISLINK= |
|
182 | HG_MY_ISLINK='l' in fcd.flags(), | |
184 |
HG_OTHER_ISLINK=fco. |
|
183 | HG_OTHER_ISLINK='l' in fco.flags(), | |
185 |
HG_BASE_ISLINK=fca. |
|
184 | HG_BASE_ISLINK='l' in fca.flags()) | |
186 |
|
185 | |||
187 | if tool == "internal:merge": |
|
186 | if tool == "internal:merge": | |
188 | r = simplemerge.simplemerge(a, b, c, label=['local', 'other']) |
|
187 | r = simplemerge.simplemerge(a, b, c, label=['local', 'other']) | |
@@ -196,7 +195,7 def filemerge(repo, fw, fd, fo, wctx, mc | |||||
196 | r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env) |
|
195 | r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env) | |
197 |
|
196 | |||
198 | if not r and _toolbool(ui, tool, "checkconflicts"): |
|
197 | if not r and _toolbool(ui, tool, "checkconflicts"): | |
199 |
if re.match("^(<<<<<<< .*|=======|>>>>>>> .*)$", fc |
|
198 | if re.match("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcd.data()): | |
200 | r = 1 |
|
199 | r = 1 | |
201 |
|
200 | |||
202 | if not r and _toolbool(ui, tool, "checkchanged"): |
|
201 | if not r and _toolbool(ui, tool, "checkchanged"): |
@@ -30,12 +30,12 def bisect(changelog, state): | |||||
30 | badrev = min([changelog.rev(n) for n in bad]) |
|
30 | badrev = min([changelog.rev(n) for n in bad]) | |
31 | goodrevs = [changelog.rev(n) for n in good] |
|
31 | goodrevs = [changelog.rev(n) for n in good] | |
32 | # build ancestors array |
|
32 | # build ancestors array | |
33 |
ancestors = [[]] * (changelog |
|
33 | ancestors = [[]] * (len(changelog) + 1) # an extra for [-1] | |
34 |
|
34 | |||
35 | # clear good revs from array |
|
35 | # clear good revs from array | |
36 | for node in goodrevs: |
|
36 | for node in goodrevs: | |
37 | ancestors[node] = None |
|
37 | ancestors[node] = None | |
38 |
for rev in xrange(changelog |
|
38 | for rev in xrange(len(changelog), -1, -1): | |
39 | if ancestors[rev] is None: |
|
39 | if ancestors[rev] is None: | |
40 | for prev in clparents(rev): |
|
40 | for prev in clparents(rev): | |
41 | ancestors[prev] = None |
|
41 | ancestors[prev] = None |
@@ -5,8 +5,8 | |||||
5 | # This software may be used and distributed according to the terms |
|
5 | # This software may be used and distributed according to the terms | |
6 | # of the GNU General Public License, incorporated herein by reference. |
|
6 | # of the GNU General Public License, incorporated herein by reference. | |
7 |
|
7 | |||
8 |
helptable = |
|
8 | helptable = ( | |
9 |
"dates|Date Formats" |
|
9 | ("dates|Date Formats", | |
10 | r''' |
|
10 | r''' | |
11 | Some commands allow the user to specify a date: |
|
11 | Some commands allow the user to specify a date: | |
12 | backout, commit, import, tag: Specify the commit date. |
|
12 | backout, commit, import, tag: Specify the commit date. | |
@@ -21,7 +21,7 helptable = { | |||||
21 | "13:18" (today assumed) |
|
21 | "13:18" (today assumed) | |
22 | "3:39" (3:39AM assumed) |
|
22 | "3:39" (3:39AM assumed) | |
23 | "3:39pm" (15:39) |
|
23 | "3:39pm" (15:39) | |
24 | "2006-12-6 13:18:29" (ISO 8601 format) |
|
24 | "2006-12-06 13:18:29" (ISO 8601 format) | |
25 | "2006-12-6 13:18" |
|
25 | "2006-12-6 13:18" | |
26 | "2006-12-6" |
|
26 | "2006-12-6" | |
27 | "12-6" |
|
27 | "12-6" | |
@@ -43,9 +43,55 helptable = { | |||||
43 | ">{date}" - on or after a given date |
|
43 | ">{date}" - on or after a given date | |
44 | "{date} to {date}" - a date range, inclusive |
|
44 | "{date} to {date}" - a date range, inclusive | |
45 | "-{days}" - within a given number of days of today |
|
45 | "-{days}" - within a given number of days of today | |
46 | ''', |
|
46 | '''), | |
|
47 | ||||
|
48 | ("patterns|File Name Patterns", | |||
|
49 | r''' | |||
|
50 | Mercurial accepts several notations for identifying one or more | |||
|
51 | files at a time. | |||
|
52 | ||||
|
53 | By default, Mercurial treats filenames as shell-style extended | |||
|
54 | glob patterns. | |||
|
55 | ||||
|
56 | Alternate pattern notations must be specified explicitly. | |||
|
57 | ||||
|
58 | To use a plain path name without any pattern matching, start a | |||
|
59 | name with "path:". These path names must match completely, from | |||
|
60 | the root of the current repository. | |||
|
61 | ||||
|
62 | To use an extended glob, start a name with "glob:". Globs are | |||
|
63 | rooted at the current directory; a glob such as "*.c" will match | |||
|
64 | files ending in ".c" in the current directory only. | |||
|
65 | ||||
|
66 | The supported glob syntax extensions are "**" to match any string | |||
|
67 | across path separators, and "{a,b}" to mean "a or b". | |||
47 |
|
|
68 | ||
48 | 'environment|env|Environment Variables': |
|
69 | To use a Perl/Python regular expression, start a name with "re:". | |
|
70 | Regexp pattern matching is anchored at the root of the repository. | |||
|
71 | ||||
|
72 | Plain examples: | |||
|
73 | ||||
|
74 | path:foo/bar a name bar in a directory named foo in the root of | |||
|
75 | the repository | |||
|
76 | path:path:name a file or directory named "path:name" | |||
|
77 | ||||
|
78 | Glob examples: | |||
|
79 | ||||
|
80 | glob:*.c any name ending in ".c" in the current directory | |||
|
81 | *.c any name ending in ".c" in the current directory | |||
|
82 | **.c any name ending in ".c" in the current directory, or | |||
|
83 | any subdirectory | |||
|
84 | foo/*.c any name ending in ".c" in the directory foo | |||
|
85 | foo/**.c any name ending in ".c" in the directory foo, or any | |||
|
86 | subdirectory | |||
|
87 | ||||
|
88 | Regexp examples: | |||
|
89 | ||||
|
90 | re:.*\.c$ any name ending in ".c", anywhere in the repository | |||
|
91 | ||||
|
92 | '''), | |||
|
93 | ||||
|
94 | ('environment|env|Environment Variables', | |||
49 | r''' |
|
95 | r''' | |
50 | HG:: |
|
96 | HG:: | |
51 | Path to the 'hg' executable, automatically passed when running hooks, |
|
97 | Path to the 'hg' executable, automatically passed when running hooks, | |
@@ -114,51 +160,57 EDITOR:: | |||||
114 | PYTHONPATH:: |
|
160 | PYTHONPATH:: | |
115 | This is used by Python to find imported modules and may need to be set |
|
161 | This is used by Python to find imported modules and may need to be set | |
116 | appropriately if Mercurial is not installed system-wide. |
|
162 | appropriately if Mercurial is not installed system-wide. | |
117 | ''', |
|
163 | '''), | |
118 |
|
164 | |||
119 | "patterns|File Name Patterns": r''' |
|
165 | ('revs|revisions|Specifying Single Revisions', | |
120 | Mercurial accepts several notations for identifying one or more |
|
166 | r''' | |
121 | files at a time. |
|
167 | Mercurial accepts several notations for identifying individual | |
|
168 | revisions. | |||
122 |
|
|
169 | ||
123 | By default, Mercurial treats filenames as shell-style extended |
|
170 | A plain integer is treated as a revision number. Negative | |
124 | glob patterns. |
|
171 | integers are treated as offsets from the tip, with -1 denoting the | |
125 |
|
172 | tip. | ||
126 | Alternate pattern notations must be specified explicitly. |
|
|||
127 |
|
|
173 | ||
128 | To use a plain path name without any pattern matching, start a |
|
174 | A 40-digit hexadecimal string is treated as a unique revision | |
129 | name with "path:". These path names must match completely, from |
|
175 | identifier. | |
130 | the root of the current repository. |
|
|||
131 |
|
|
176 | ||
132 | To use an extended glob, start a name with "glob:". Globs are |
|
177 | A hexadecimal string less than 40 characters long is treated as a | |
133 | rooted at the current directory; a glob such as "*.c" will match |
|
178 | unique revision identifier, and referred to as a short-form | |
134 | files ending in ".c" in the current directory only. |
|
179 | identifier. A short-form identifier is only valid if it is the | |
|
180 | prefix of one full-length identifier. | |||
135 |
|
|
181 | ||
136 | The supported glob syntax extensions are "**" to match any string |
|
182 | Any other string is treated as a tag name, which is a symbolic | |
137 | across path separators, and "{a,b}" to mean "a or b". |
|
183 | name associated with a revision identifier. Tag names may not | |
|
184 | contain the ":" character. | |||
|
185 | ||||
|
186 | The reserved name "tip" is a special tag that always identifies | |||
|
187 | the most recent revision. | |||
138 |
|
|
188 | ||
139 | To use a Perl/Python regular expression, start a name with "re:". |
|
189 | The reserved name "null" indicates the null revision. This is the | |
140 | Regexp pattern matching is anchored at the root of the repository. |
|
190 | revision of an empty repository, and the parent of revision 0. | |
141 |
|
||||
142 | Plain examples: |
|
|||
143 |
|
|
191 | ||
144 | path:foo/bar a name bar in a directory named foo in the root of |
|
192 | The reserved name "." indicates the working directory parent. If | |
145 | the repository |
|
193 | no working directory is checked out, it is equivalent to null. | |
146 | path:path:name a file or directory named "path:name" |
|
194 | If an uncommitted merge is in progress, "." is the revision of | |
147 |
|
195 | the first parent. | ||
148 | Glob examples: |
|
196 | '''), | |
149 |
|
197 | |||
150 | glob:*.c any name ending in ".c" in the current directory |
|
198 | ('mrevs|multirevs|Specifying Multiple Revisions', | |
151 | *.c any name ending in ".c" in the current directory |
|
199 | r''' | |
152 | **.c any name ending in ".c" in the current directory, or |
|
200 | When Mercurial accepts more than one revision, they may be | |
153 | any subdirectory |
|
201 | specified individually, or provided as a continuous range, | |
154 | foo/*.c any name ending in ".c" in the directory foo |
|
202 | separated by the ":" character. | |
155 | foo/**.c any name ending in ".c" in the directory foo, or any |
|
|||
156 | subdirectory |
|
|||
157 |
|
|
203 | ||
158 | Regexp examples: |
|
204 | The syntax of range notation is [BEGIN]:[END], where BEGIN and END | |
159 |
|
205 | are revision identifiers. Both BEGIN and END are optional. If | ||
160 | re:.*\.c$ any name ending in ".c", anywhere in the repository |
|
206 | BEGIN is not specified, it defaults to revision number 0. If END | |
|
207 | is not specified, it defaults to the tip. The range ":" thus | |||
|
208 | means "all revisions". | |||
161 |
|
|
209 | ||
162 | ''', |
|
210 | If BEGIN is greater than END, revisions are treated in reverse | |
163 | } |
|
211 | order. | |
164 |
|
|
212 | ||
|
213 | A range acts as a closed interval. This means that a range of 3:5 | |||
|
214 | gives 3, 4 and 5. Similarly, a range of 4:2 gives 4, 3, and 2. | |||
|
215 | '''), | |||
|
216 | ) |
@@ -16,7 +16,7 def _local(path): | |||||
16 | return (os.path.isfile(util.drop_scheme('file', path)) and |
|
16 | return (os.path.isfile(util.drop_scheme('file', path)) and | |
17 | bundlerepo or localrepo) |
|
17 | bundlerepo or localrepo) | |
18 |
|
18 | |||
19 | def parseurl(url, revs): |
|
19 | def parseurl(url, revs=[]): | |
20 | '''parse url#branch, returning url, branch + revs''' |
|
20 | '''parse url#branch, returning url, branch + revs''' | |
21 |
|
21 | |||
22 | if '#' not in url: |
|
22 | if '#' not in url: | |
@@ -69,6 +69,15 def defaultdest(source): | |||||
69 | '''return default destination of clone if none is given''' |
|
69 | '''return default destination of clone if none is given''' | |
70 | return os.path.basename(os.path.normpath(source)) |
|
70 | return os.path.basename(os.path.normpath(source)) | |
71 |
|
71 | |||
|
72 | def localpath(path): | |||
|
73 | if path.startswith('file://localhost/'): | |||
|
74 | return path[16:] | |||
|
75 | if path.startswith('file://'): | |||
|
76 | return path[7:] | |||
|
77 | if path.startswith('file:'): | |||
|
78 | return path[5:] | |||
|
79 | return path | |||
|
80 | ||||
72 | def clone(ui, source, dest=None, pull=False, rev=None, update=True, |
|
81 | def clone(ui, source, dest=None, pull=False, rev=None, update=True, | |
73 | stream=False): |
|
82 | stream=False): | |
74 | """Make a copy of an existing repository. |
|
83 | """Make a copy of an existing repository. | |
@@ -100,7 +109,8 def clone(ui, source, dest=None, pull=Fa | |||||
100 | rev: revision to clone up to (implies pull=True) |
|
109 | rev: revision to clone up to (implies pull=True) | |
101 |
|
110 | |||
102 | update: update working directory after clone completes, if |
|
111 | update: update working directory after clone completes, if | |
103 | destination is local repository |
|
112 | destination is local repository (True means update to default rev, | |
|
113 | anything else is treated as a revision) | |||
104 | """ |
|
114 | """ | |
105 |
|
115 | |||
106 | if isinstance(source, str): |
|
116 | if isinstance(source, str): | |
@@ -116,15 +126,6 def clone(ui, source, dest=None, pull=Fa | |||||
116 | dest = defaultdest(source) |
|
126 | dest = defaultdest(source) | |
117 | ui.status(_("destination directory: %s\n") % dest) |
|
127 | ui.status(_("destination directory: %s\n") % dest) | |
118 |
|
128 | |||
119 | def localpath(path): |
|
|||
120 | if path.startswith('file://localhost/'): |
|
|||
121 | return path[16:] |
|
|||
122 | if path.startswith('file://'): |
|
|||
123 | return path[7:] |
|
|||
124 | if path.startswith('file:'): |
|
|||
125 | return path[5:] |
|
|||
126 | return path |
|
|||
127 |
|
||||
128 | dest = localpath(dest) |
|
129 | dest = localpath(dest) | |
129 | source = localpath(source) |
|
130 | source = localpath(source) | |
130 |
|
131 | |||
@@ -244,7 +245,9 def clone(ui, source, dest=None, pull=Fa | |||||
244 |
|
245 | |||
245 | if update: |
|
246 | if update: | |
246 | dest_repo.ui.status(_("updating working directory\n")) |
|
247 | dest_repo.ui.status(_("updating working directory\n")) | |
247 |
if not |
|
248 | if update is not True: | |
|
249 | checkout = update | |||
|
250 | elif not checkout: | |||
248 | try: |
|
251 | try: | |
249 | checkout = dest_repo.lookup("default") |
|
252 | checkout = dest_repo.lookup("default") | |
250 | except: |
|
253 | except: | |
@@ -271,15 +274,7 def update(repo, node): | |||||
271 | stats = _merge.update(repo, node, False, False, None) |
|
274 | stats = _merge.update(repo, node, False, False, None) | |
272 | _showstats(repo, stats) |
|
275 | _showstats(repo, stats) | |
273 | if stats[3]: |
|
276 | if stats[3]: | |
274 |
repo.ui.status(_(" |
|
277 | repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n")) | |
275 | " locally modified files.\n")) |
|
|||
276 | if stats[1]: |
|
|||
277 | repo.ui.status(_("You can finish the partial merge using:\n")) |
|
|||
278 | else: |
|
|||
279 | repo.ui.status(_("You can redo the full merge using:\n")) |
|
|||
280 | # len(pl)==1, otherwise _merge.update() would have raised util.Abort: |
|
|||
281 | repo.ui.status(_(" hg update %s\n hg update %s\n") |
|
|||
282 | % (pl[0].rev(), repo.changectx(node).rev())) |
|
|||
283 | return stats[3] > 0 |
|
278 | return stats[3] > 0 | |
284 |
|
279 | |||
285 | def clean(repo, node, show_stats=True): |
|
280 | def clean(repo, node, show_stats=True): | |
@@ -294,11 +289,7 def merge(repo, node, force=None, remind | |||||
294 | _showstats(repo, stats) |
|
289 | _showstats(repo, stats) | |
295 | if stats[3]: |
|
290 | if stats[3]: | |
296 | pl = repo.parents() |
|
291 | pl = repo.parents() | |
297 |
repo.ui.status(_(" |
|
292 | repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n")) | |
298 | " you can redo the full merge using:\n" |
|
|||
299 | " hg update -C %s\n" |
|
|||
300 | " hg merge %s\n") |
|
|||
301 | % (pl[0].rev(), pl[1].rev())) |
|
|||
302 | elif remind: |
|
293 | elif remind: | |
303 | repo.ui.status(_("(branch merge, don't forget to commit)\n")) |
|
294 | repo.ui.status(_("(branch merge, don't forget to commit)\n")) | |
304 | return stats[3] > 0 |
|
295 | return stats[3] > 0 |
This diff has been collapsed as it changes many lines, (730 lines changed) Show them Hide them | |||||
@@ -6,79 +6,22 | |||||
6 | # This software may be used and distributed according to the terms |
|
6 | # This software may be used and distributed according to the terms | |
7 | # of the GNU General Public License, incorporated herein by reference. |
|
7 | # of the GNU General Public License, incorporated herein by reference. | |
8 |
|
8 | |||
9 |
import os, mimetypes |
|
9 | import os, mimetypes | |
10 |
from mercurial.node import hex, nullid |
|
10 | from mercurial.node import hex, nullid | |
11 | from mercurial.repo import RepoError |
|
11 | from mercurial.repo import RepoError | |
12 |
from mercurial import mdiff, ui, hg, util, |
|
12 | from mercurial import mdiff, ui, hg, util, patch, hook | |
13 |
from mercurial import revlog, templater, templatefilters |
|
13 | from mercurial import revlog, templater, templatefilters | |
14 |
from common import get_mtime, style_map, paritygen, countgen, |
|
14 | from common import get_mtime, style_map, paritygen, countgen, ErrorResponse | |
15 | from common import ErrorResponse |
|
|||
16 | from common import HTTP_OK, HTTP_BAD_REQUEST, HTTP_NOT_FOUND, HTTP_SERVER_ERROR |
|
15 | from common import HTTP_OK, HTTP_BAD_REQUEST, HTTP_NOT_FOUND, HTTP_SERVER_ERROR | |
17 | from request import wsgirequest |
|
16 | from request import wsgirequest | |
18 | import webcommands, protocol |
|
17 | import webcommands, protocol, webutil | |
19 |
|
||||
20 | shortcuts = { |
|
|||
21 | 'cl': [('cmd', ['changelog']), ('rev', None)], |
|
|||
22 | 'sl': [('cmd', ['shortlog']), ('rev', None)], |
|
|||
23 | 'cs': [('cmd', ['changeset']), ('node', None)], |
|
|||
24 | 'f': [('cmd', ['file']), ('filenode', None)], |
|
|||
25 | 'fl': [('cmd', ['filelog']), ('filenode', None)], |
|
|||
26 | 'fd': [('cmd', ['filediff']), ('node', None)], |
|
|||
27 | 'fa': [('cmd', ['annotate']), ('filenode', None)], |
|
|||
28 | 'mf': [('cmd', ['manifest']), ('manifest', None)], |
|
|||
29 | 'ca': [('cmd', ['archive']), ('node', None)], |
|
|||
30 | 'tags': [('cmd', ['tags'])], |
|
|||
31 | 'tip': [('cmd', ['changeset']), ('node', ['tip'])], |
|
|||
32 | 'static': [('cmd', ['static']), ('file', None)] |
|
|||
33 | } |
|
|||
34 |
|
||||
35 | def _up(p): |
|
|||
36 | if p[0] != "/": |
|
|||
37 | p = "/" + p |
|
|||
38 | if p[-1] == "/": |
|
|||
39 | p = p[:-1] |
|
|||
40 | up = os.path.dirname(p) |
|
|||
41 | if up == "/": |
|
|||
42 | return "/" |
|
|||
43 | return up + "/" |
|
|||
44 |
|
18 | |||
45 | def revnavgen(pos, pagelen, limit, nodefunc): |
|
19 | perms = { | |
46 | def seq(factor, limit=None): |
|
20 | 'changegroup': 'pull', | |
47 | if limit: |
|
21 | 'changegroupsubset': 'pull', | |
48 | yield limit |
|
22 | 'unbundle': 'push', | |
49 | if limit >= 20 and limit <= 40: |
|
23 | 'stream_out': 'pull', | |
50 | yield 50 |
|
24 | } | |
51 | else: |
|
|||
52 | yield 1 * factor |
|
|||
53 | yield 3 * factor |
|
|||
54 | for f in seq(factor * 10): |
|
|||
55 | yield f |
|
|||
56 |
|
||||
57 | def nav(**map): |
|
|||
58 | l = [] |
|
|||
59 | last = 0 |
|
|||
60 | for f in seq(1, pagelen): |
|
|||
61 | if f < pagelen or f <= last: |
|
|||
62 | continue |
|
|||
63 | if f > limit: |
|
|||
64 | break |
|
|||
65 | last = f |
|
|||
66 | if pos + f < limit: |
|
|||
67 | l.append(("+%d" % f, hex(nodefunc(pos + f).node()))) |
|
|||
68 | if pos - f >= 0: |
|
|||
69 | l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node()))) |
|
|||
70 |
|
||||
71 | try: |
|
|||
72 | yield {"label": "(0)", "node": hex(nodefunc('0').node())} |
|
|||
73 |
|
||||
74 | for label, node in l: |
|
|||
75 | yield {"label": label, "node": node} |
|
|||
76 |
|
||||
77 | yield {"label": "tip", "node": "tip"} |
|
|||
78 | except RepoError: |
|
|||
79 | pass |
|
|||
80 |
|
||||
81 | return nav |
|
|||
82 |
|
25 | |||
83 | class hgweb(object): |
|
26 | class hgweb(object): | |
84 | def __init__(self, repo, name=None): |
|
27 | def __init__(self, repo, name=None): | |
@@ -93,7 +36,6 class hgweb(object): | |||||
93 | self.reponame = name |
|
36 | self.reponame = name | |
94 | self.archives = 'zip', 'gz', 'bz2' |
|
37 | self.archives = 'zip', 'gz', 'bz2' | |
95 | self.stripecount = 1 |
|
38 | self.stripecount = 1 | |
96 | self._capabilities = None |
|
|||
97 | # a repo owner may set web.templates in .hg/hgrc to get any file |
|
39 | # a repo owner may set web.templates in .hg/hgrc to get any file | |
98 | # readable by the user running the CGI script |
|
40 | # readable by the user running the CGI script | |
99 | self.templatepath = self.config("web", "templates", |
|
41 | self.templatepath = self.config("web", "templates", | |
@@ -125,18 +67,6 class hgweb(object): | |||||
125 | self.maxfiles = int(self.config("web", "maxfiles", 10)) |
|
67 | self.maxfiles = int(self.config("web", "maxfiles", 10)) | |
126 | self.allowpull = self.configbool("web", "allowpull", True) |
|
68 | self.allowpull = self.configbool("web", "allowpull", True) | |
127 | self.encoding = self.config("web", "encoding", util._encoding) |
|
69 | self.encoding = self.config("web", "encoding", util._encoding) | |
128 | self._capabilities = None |
|
|||
129 |
|
||||
130 | def capabilities(self): |
|
|||
131 | if self._capabilities is not None: |
|
|||
132 | return self._capabilities |
|
|||
133 | caps = ['lookup', 'changegroupsubset'] |
|
|||
134 | if self.configbool('server', 'uncompressed'): |
|
|||
135 | caps.append('stream=%d' % self.repo.changelog.version) |
|
|||
136 | if changegroup.bundlepriority: |
|
|||
137 | caps.append('unbundle=%s' % ','.join(changegroup.bundlepriority)) |
|
|||
138 | self._capabilities = caps |
|
|||
139 | return caps |
|
|||
140 |
|
70 | |||
141 | def run(self): |
|
71 | def run(self): | |
142 | if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."): |
|
72 | if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."): | |
@@ -146,22 +76,22 class hgweb(object): | |||||
146 |
|
76 | |||
147 | def __call__(self, env, respond): |
|
77 | def __call__(self, env, respond): | |
148 | req = wsgirequest(env, respond) |
|
78 | req = wsgirequest(env, respond) | |
149 | self.run_wsgi(req) |
|
79 | return self.run_wsgi(req) | |
150 | return req |
|
|||
151 |
|
80 | |||
152 | def run_wsgi(self, req): |
|
81 | def run_wsgi(self, req): | |
153 |
|
82 | |||
154 | self.refresh() |
|
83 | self.refresh() | |
155 |
|
84 | |||
156 | # expand form shortcuts |
|
85 | # process this if it's a protocol request | |
|
86 | # protocol bits don't need to create any URLs | |||
|
87 | # and the clients always use the old URL structure | |||
157 |
|
88 | |||
158 | for k in shortcuts.iterkeys(): |
|
89 | cmd = req.form.get('cmd', [''])[0] | |
159 | if k in req.form: |
|
90 | if cmd and cmd in protocol.__all__: | |
160 | for name, value in shortcuts[k]: |
|
91 | if cmd in perms and not self.check_perm(req, perms[cmd]): | |
161 | if value is None: |
|
92 | return [] | |
162 | value = req.form[k] |
|
93 | method = getattr(protocol, cmd) | |
163 | req.form[name] = value |
|
94 | return method(self.repo, req) | |
164 | del req.form[k] |
|
|||
165 |
|
95 | |||
166 | # work with CGI variables to create coherent structure |
|
96 | # work with CGI variables to create coherent structure | |
167 | # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME |
|
97 | # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME | |
@@ -194,8 +124,10 class hgweb(object): | |||||
194 | cmd = cmd[style+1:] |
|
124 | cmd = cmd[style+1:] | |
195 |
|
125 | |||
196 | # avoid accepting e.g. style parameter as command |
|
126 | # avoid accepting e.g. style parameter as command | |
197 |
if hasattr(webcommands, cmd) |
|
127 | if hasattr(webcommands, cmd): | |
198 | req.form['cmd'] = [cmd] |
|
128 | req.form['cmd'] = [cmd] | |
|
129 | else: | |||
|
130 | cmd = '' | |||
199 |
|
131 | |||
200 | if args and args[0]: |
|
132 | if args and args[0]: | |
201 | node = args.pop(0) |
|
133 | node = args.pop(0) | |
@@ -213,30 +145,13 class hgweb(object): | |||||
213 | req.form['node'] = [fn[:-len(ext)]] |
|
145 | req.form['node'] = [fn[:-len(ext)]] | |
214 | req.form['type'] = [type_] |
|
146 | req.form['type'] = [type_] | |
215 |
|
147 | |||
216 | # process this if it's a protocol request |
|
|||
217 |
|
||||
218 | cmd = req.form.get('cmd', [''])[0] |
|
|||
219 | if cmd in protocol.__all__: |
|
|||
220 | method = getattr(protocol, cmd) |
|
|||
221 | method(self, req) |
|
|||
222 | return |
|
|||
223 |
|
||||
224 | # process the web interface request |
|
148 | # process the web interface request | |
225 |
|
149 | |||
226 | try: |
|
150 | try: | |
227 |
|
151 | |||
228 | tmpl = self.templater(req) |
|
152 | tmpl = self.templater(req) | |
229 | try: |
|
|||
230 |
|
|
153 | ctype = tmpl('mimetype', encoding=self.encoding) | |
231 |
|
|
154 | ctype = templater.stringify(ctype) | |
232 | except KeyError: |
|
|||
233 | # old templates with inline HTTP headers? |
|
|||
234 | if 'mimetype' in tmpl: |
|
|||
235 | raise |
|
|||
236 | header = tmpl('header', encoding=self.encoding) |
|
|||
237 | header_file = cStringIO.StringIO(templater.stringify(header)) |
|
|||
238 | msg = mimetools.Message(header_file, 0) |
|
|||
239 | ctype = msg['content-type'] |
|
|||
240 |
|
155 | |||
241 | if cmd == '': |
|
156 | if cmd == '': | |
242 | req.form['cmd'] = [tmpl.cache['default']] |
|
157 | req.form['cmd'] = [tmpl.cache['default']] | |
@@ -254,6 +169,7 class hgweb(object): | |||||
254 |
|
169 | |||
255 | req.write(content) |
|
170 | req.write(content) | |
256 | del tmpl |
|
171 | del tmpl | |
|
172 | return [] | |||
257 |
|
173 | |||
258 | except revlog.LookupError, err: |
|
174 | except revlog.LookupError, err: | |
259 | req.respond(HTTP_NOT_FOUND, ctype) |
|
175 | req.respond(HTTP_NOT_FOUND, ctype) | |
@@ -261,12 +177,15 class hgweb(object): | |||||
261 | if 'manifest' not in msg: |
|
177 | if 'manifest' not in msg: | |
262 | msg = 'revision not found: %s' % err.name |
|
178 | msg = 'revision not found: %s' % err.name | |
263 | req.write(tmpl('error', error=msg)) |
|
179 | req.write(tmpl('error', error=msg)) | |
|
180 | return [] | |||
264 | except (RepoError, revlog.RevlogError), inst: |
|
181 | except (RepoError, revlog.RevlogError), inst: | |
265 | req.respond(HTTP_SERVER_ERROR, ctype) |
|
182 | req.respond(HTTP_SERVER_ERROR, ctype) | |
266 | req.write(tmpl('error', error=str(inst))) |
|
183 | req.write(tmpl('error', error=str(inst))) | |
|
184 | return [] | |||
267 | except ErrorResponse, inst: |
|
185 | except ErrorResponse, inst: | |
268 | req.respond(inst.code, ctype) |
|
186 | req.respond(inst.code, ctype) | |
269 | req.write(tmpl('error', error=inst.message)) |
|
187 | req.write(tmpl('error', error=inst.message)) | |
|
188 | return [] | |||
270 |
|
189 | |||
271 | def templater(self, req): |
|
190 | def templater(self, req): | |
272 |
|
191 | |||
@@ -291,13 +210,7 class hgweb(object): | |||||
291 | # some functions for the templater |
|
210 | # some functions for the templater | |
292 |
|
211 | |||
293 | def header(**map): |
|
212 | def header(**map): | |
294 |
|
|
213 | yield tmpl('header', encoding=self.encoding, **map) | |
295 | if 'mimetype' not in tmpl: |
|
|||
296 | # old template with inline HTTP headers |
|
|||
297 | header_file = cStringIO.StringIO(templater.stringify(header)) |
|
|||
298 | msg = mimetools.Message(header_file, 0) |
|
|||
299 | header = header_file.read() |
|
|||
300 | yield header |
|
|||
301 |
|
214 | |||
302 | def footer(**map): |
|
215 | def footer(**map): | |
303 | yield tmpl("footer", **map) |
|
216 | yield tmpl("footer", **map) | |
@@ -355,54 +268,6 class hgweb(object): | |||||
355 | if len(files) > self.maxfiles: |
|
268 | if len(files) > self.maxfiles: | |
356 | yield tmpl("fileellipses") |
|
269 | yield tmpl("fileellipses") | |
357 |
|
270 | |||
358 | def siblings(self, siblings=[], hiderev=None, **args): |
|
|||
359 | siblings = [s for s in siblings if s.node() != nullid] |
|
|||
360 | if len(siblings) == 1 and siblings[0].rev() == hiderev: |
|
|||
361 | return |
|
|||
362 | for s in siblings: |
|
|||
363 | d = {'node': hex(s.node()), 'rev': s.rev()} |
|
|||
364 | if hasattr(s, 'path'): |
|
|||
365 | d['file'] = s.path() |
|
|||
366 | d.update(args) |
|
|||
367 | yield d |
|
|||
368 |
|
||||
369 | def renamelink(self, fl, node): |
|
|||
370 | r = fl.renamed(node) |
|
|||
371 | if r: |
|
|||
372 | return [dict(file=r[0], node=hex(r[1]))] |
|
|||
373 | return [] |
|
|||
374 |
|
||||
375 | def nodetagsdict(self, node): |
|
|||
376 | return [{"name": i} for i in self.repo.nodetags(node)] |
|
|||
377 |
|
||||
378 | def nodebranchdict(self, ctx): |
|
|||
379 | branches = [] |
|
|||
380 | branch = ctx.branch() |
|
|||
381 | # If this is an empty repo, ctx.node() == nullid, |
|
|||
382 | # ctx.branch() == 'default', but branchtags() is |
|
|||
383 | # an empty dict. Using dict.get avoids a traceback. |
|
|||
384 | if self.repo.branchtags().get(branch) == ctx.node(): |
|
|||
385 | branches.append({"name": branch}) |
|
|||
386 | return branches |
|
|||
387 |
|
||||
388 | def nodeinbranch(self, ctx): |
|
|||
389 | branches = [] |
|
|||
390 | branch = ctx.branch() |
|
|||
391 | if branch != 'default' and self.repo.branchtags().get(branch) != ctx.node(): |
|
|||
392 | branches.append({"name": branch}) |
|
|||
393 | return branches |
|
|||
394 |
|
||||
395 | def nodebranchnodefault(self, ctx): |
|
|||
396 | branches = [] |
|
|||
397 | branch = ctx.branch() |
|
|||
398 | if branch != 'default': |
|
|||
399 | branches.append({"name": branch}) |
|
|||
400 | return branches |
|
|||
401 |
|
||||
402 | def showtag(self, tmpl, t1, node=nullid, **args): |
|
|||
403 | for t in self.repo.nodetags(node): |
|
|||
404 | yield tmpl(t1, tag=t, **args) |
|
|||
405 |
|
||||
406 | def diff(self, tmpl, node1, node2, files): |
|
271 | def diff(self, tmpl, node1, node2, files): | |
407 | def filterfiles(filters, files): |
|
272 | def filterfiles(filters, files): | |
408 | l = [x for x in files if x in filters] |
|
273 | l = [x for x in files if x in filters] | |
@@ -443,8 +308,8 class hgweb(object): | |||||
443 | linenumber="% 8s" % lineno) |
|
308 | linenumber="% 8s" % lineno) | |
444 |
|
309 | |||
445 | r = self.repo |
|
310 | r = self.repo | |
446 |
c1 = r |
|
311 | c1 = r[node1] | |
447 |
c2 = r |
|
312 | c2 = r[node2] | |
448 | date1 = util.datestr(c1.date()) |
|
313 | date1 = util.datestr(c1.date()) | |
449 | date2 = util.datestr(c2.date()) |
|
314 | date2 = util.datestr(c2.date()) | |
450 |
|
315 | |||
@@ -470,524 +335,45 class hgweb(object): | |||||
470 | yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f, |
|
335 | yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f, | |
471 | opts=diffopts), f, tn) |
|
336 | opts=diffopts), f, tn) | |
472 |
|
337 | |||
473 | def changelog(self, tmpl, ctx, shortlog=False): |
|
|||
474 | def changelist(limit=0,**map): |
|
|||
475 | cl = self.repo.changelog |
|
|||
476 | l = [] # build a list in forward order for efficiency |
|
|||
477 | for i in xrange(start, end): |
|
|||
478 | ctx = self.repo.changectx(i) |
|
|||
479 | n = ctx.node() |
|
|||
480 | showtags = self.showtag(tmpl, 'changelogtag', n) |
|
|||
481 |
|
||||
482 | l.insert(0, {"parity": parity.next(), |
|
|||
483 | "author": ctx.user(), |
|
|||
484 | "parent": self.siblings(ctx.parents(), i - 1), |
|
|||
485 | "child": self.siblings(ctx.children(), i + 1), |
|
|||
486 | "changelogtag": showtags, |
|
|||
487 | "desc": ctx.description(), |
|
|||
488 | "date": ctx.date(), |
|
|||
489 | "files": self.listfilediffs(tmpl, ctx.files(), n), |
|
|||
490 | "rev": i, |
|
|||
491 | "node": hex(n), |
|
|||
492 | "tags": self.nodetagsdict(n), |
|
|||
493 | "inbranch": self.nodeinbranch(ctx), |
|
|||
494 | "branches": self.nodebranchdict(ctx)}) |
|
|||
495 |
|
||||
496 | if limit > 0: |
|
|||
497 | l = l[:limit] |
|
|||
498 |
|
||||
499 | for e in l: |
|
|||
500 | yield e |
|
|||
501 |
|
||||
502 | maxchanges = shortlog and self.maxshortchanges or self.maxchanges |
|
|||
503 | cl = self.repo.changelog |
|
|||
504 | count = cl.count() |
|
|||
505 | pos = ctx.rev() |
|
|||
506 | start = max(0, pos - maxchanges + 1) |
|
|||
507 | end = min(count, start + maxchanges) |
|
|||
508 | pos = end - 1 |
|
|||
509 | parity = paritygen(self.stripecount, offset=start-end) |
|
|||
510 |
|
||||
511 | changenav = revnavgen(pos, maxchanges, count, self.repo.changectx) |
|
|||
512 |
|
||||
513 | return tmpl(shortlog and 'shortlog' or 'changelog', |
|
|||
514 | changenav=changenav, |
|
|||
515 | node=hex(cl.tip()), |
|
|||
516 | rev=pos, changesets=count, |
|
|||
517 | entries=lambda **x: changelist(limit=0,**x), |
|
|||
518 | latestentry=lambda **x: changelist(limit=1,**x), |
|
|||
519 | archives=self.archivelist("tip")) |
|
|||
520 |
|
||||
521 | def search(self, tmpl, query): |
|
|||
522 |
|
||||
523 | def changelist(**map): |
|
|||
524 | cl = self.repo.changelog |
|
|||
525 | count = 0 |
|
|||
526 | qw = query.lower().split() |
|
|||
527 |
|
||||
528 | def revgen(): |
|
|||
529 | for i in xrange(cl.count() - 1, 0, -100): |
|
|||
530 | l = [] |
|
|||
531 | for j in xrange(max(0, i - 100), i + 1): |
|
|||
532 | ctx = self.repo.changectx(j) |
|
|||
533 | l.append(ctx) |
|
|||
534 | l.reverse() |
|
|||
535 | for e in l: |
|
|||
536 | yield e |
|
|||
537 |
|
||||
538 | for ctx in revgen(): |
|
|||
539 | miss = 0 |
|
|||
540 | for q in qw: |
|
|||
541 | if not (q in ctx.user().lower() or |
|
|||
542 | q in ctx.description().lower() or |
|
|||
543 | q in " ".join(ctx.files()).lower()): |
|
|||
544 | miss = 1 |
|
|||
545 | break |
|
|||
546 | if miss: |
|
|||
547 | continue |
|
|||
548 |
|
||||
549 | count += 1 |
|
|||
550 | n = ctx.node() |
|
|||
551 | showtags = self.showtag(tmpl, 'changelogtag', n) |
|
|||
552 |
|
||||
553 | yield tmpl('searchentry', |
|
|||
554 | parity=parity.next(), |
|
|||
555 | author=ctx.user(), |
|
|||
556 | parent=self.siblings(ctx.parents()), |
|
|||
557 | child=self.siblings(ctx.children()), |
|
|||
558 | changelogtag=showtags, |
|
|||
559 | desc=ctx.description(), |
|
|||
560 | date=ctx.date(), |
|
|||
561 | files=self.listfilediffs(tmpl, ctx.files(), n), |
|
|||
562 | rev=ctx.rev(), |
|
|||
563 | node=hex(n), |
|
|||
564 | tags=self.nodetagsdict(n), |
|
|||
565 | inbranch=self.nodeinbranch(ctx), |
|
|||
566 | branches=self.nodebranchdict(ctx)) |
|
|||
567 |
|
||||
568 | if count >= self.maxchanges: |
|
|||
569 | break |
|
|||
570 |
|
||||
571 | cl = self.repo.changelog |
|
|||
572 | parity = paritygen(self.stripecount) |
|
|||
573 |
|
||||
574 | return tmpl('search', |
|
|||
575 | query=query, |
|
|||
576 | node=hex(cl.tip()), |
|
|||
577 | entries=changelist, |
|
|||
578 | archives=self.archivelist("tip")) |
|
|||
579 |
|
||||
580 | def changeset(self, tmpl, ctx): |
|
|||
581 | n = ctx.node() |
|
|||
582 | showtags = self.showtag(tmpl, 'changesettag', n) |
|
|||
583 | parents = ctx.parents() |
|
|||
584 | p1 = parents[0].node() |
|
|||
585 |
|
||||
586 | files = [] |
|
|||
587 | parity = paritygen(self.stripecount) |
|
|||
588 | for f in ctx.files(): |
|
|||
589 | files.append(tmpl("filenodelink", |
|
|||
590 | node=hex(n), file=f, |
|
|||
591 | parity=parity.next())) |
|
|||
592 |
|
||||
593 | def diff(**map): |
|
|||
594 | yield self.diff(tmpl, p1, n, None) |
|
|||
595 |
|
||||
596 | return tmpl('changeset', |
|
|||
597 | diff=diff, |
|
|||
598 | rev=ctx.rev(), |
|
|||
599 | node=hex(n), |
|
|||
600 | parent=self.siblings(parents), |
|
|||
601 | child=self.siblings(ctx.children()), |
|
|||
602 | changesettag=showtags, |
|
|||
603 | author=ctx.user(), |
|
|||
604 | desc=ctx.description(), |
|
|||
605 | date=ctx.date(), |
|
|||
606 | files=files, |
|
|||
607 | archives=self.archivelist(hex(n)), |
|
|||
608 | tags=self.nodetagsdict(n), |
|
|||
609 | branch=self.nodebranchnodefault(ctx), |
|
|||
610 | inbranch=self.nodeinbranch(ctx), |
|
|||
611 | branches=self.nodebranchdict(ctx)) |
|
|||
612 |
|
||||
613 | def filelog(self, tmpl, fctx): |
|
|||
614 | f = fctx.path() |
|
|||
615 | fl = fctx.filelog() |
|
|||
616 | count = fl.count() |
|
|||
617 | pagelen = self.maxshortchanges |
|
|||
618 | pos = fctx.filerev() |
|
|||
619 | start = max(0, pos - pagelen + 1) |
|
|||
620 | end = min(count, start + pagelen) |
|
|||
621 | pos = end - 1 |
|
|||
622 | parity = paritygen(self.stripecount, offset=start-end) |
|
|||
623 |
|
||||
624 | def entries(limit=0, **map): |
|
|||
625 | l = [] |
|
|||
626 |
|
||||
627 | for i in xrange(start, end): |
|
|||
628 | ctx = fctx.filectx(i) |
|
|||
629 | n = fl.node(i) |
|
|||
630 |
|
||||
631 | l.insert(0, {"parity": parity.next(), |
|
|||
632 | "filerev": i, |
|
|||
633 | "file": f, |
|
|||
634 | "node": hex(ctx.node()), |
|
|||
635 | "author": ctx.user(), |
|
|||
636 | "date": ctx.date(), |
|
|||
637 | "rename": self.renamelink(fl, n), |
|
|||
638 | "parent": self.siblings(fctx.parents()), |
|
|||
639 | "child": self.siblings(fctx.children()), |
|
|||
640 | "desc": ctx.description()}) |
|
|||
641 |
|
||||
642 | if limit > 0: |
|
|||
643 | l = l[:limit] |
|
|||
644 |
|
||||
645 | for e in l: |
|
|||
646 | yield e |
|
|||
647 |
|
||||
648 | nodefunc = lambda x: fctx.filectx(fileid=x) |
|
|||
649 | nav = revnavgen(pos, pagelen, count, nodefunc) |
|
|||
650 | return tmpl("filelog", file=f, node=hex(fctx.node()), nav=nav, |
|
|||
651 | entries=lambda **x: entries(limit=0, **x), |
|
|||
652 | latestentry=lambda **x: entries(limit=1, **x)) |
|
|||
653 |
|
||||
654 | def filerevision(self, tmpl, fctx): |
|
|||
655 | f = fctx.path() |
|
|||
656 | text = fctx.data() |
|
|||
657 | fl = fctx.filelog() |
|
|||
658 | n = fctx.filenode() |
|
|||
659 | parity = paritygen(self.stripecount) |
|
|||
660 |
|
||||
661 | if util.binary(text): |
|
|||
662 | mt = mimetypes.guess_type(f)[0] or 'application/octet-stream' |
|
|||
663 | text = '(binary:%s)' % mt |
|
|||
664 |
|
||||
665 | def lines(): |
|
|||
666 | for lineno, t in enumerate(text.splitlines(1)): |
|
|||
667 | yield {"line": t, |
|
|||
668 | "lineid": "l%d" % (lineno + 1), |
|
|||
669 | "linenumber": "% 6d" % (lineno + 1), |
|
|||
670 | "parity": parity.next()} |
|
|||
671 |
|
||||
672 | return tmpl("filerevision", |
|
|||
673 | file=f, |
|
|||
674 | path=_up(f), |
|
|||
675 | text=lines(), |
|
|||
676 | rev=fctx.rev(), |
|
|||
677 | node=hex(fctx.node()), |
|
|||
678 | author=fctx.user(), |
|
|||
679 | date=fctx.date(), |
|
|||
680 | desc=fctx.description(), |
|
|||
681 | branch=self.nodebranchnodefault(fctx), |
|
|||
682 | parent=self.siblings(fctx.parents()), |
|
|||
683 | child=self.siblings(fctx.children()), |
|
|||
684 | rename=self.renamelink(fl, n), |
|
|||
685 | permissions=fctx.manifest().flags(f)) |
|
|||
686 |
|
||||
687 | def fileannotate(self, tmpl, fctx): |
|
|||
688 | f = fctx.path() |
|
|||
689 | n = fctx.filenode() |
|
|||
690 | fl = fctx.filelog() |
|
|||
691 | parity = paritygen(self.stripecount) |
|
|||
692 |
|
||||
693 | def annotate(**map): |
|
|||
694 | last = None |
|
|||
695 | if util.binary(fctx.data()): |
|
|||
696 | mt = (mimetypes.guess_type(fctx.path())[0] |
|
|||
697 | or 'application/octet-stream') |
|
|||
698 | lines = enumerate([((fctx.filectx(fctx.filerev()), 1), |
|
|||
699 | '(binary:%s)' % mt)]) |
|
|||
700 | else: |
|
|||
701 | lines = enumerate(fctx.annotate(follow=True, linenumber=True)) |
|
|||
702 | for lineno, ((f, targetline), l) in lines: |
|
|||
703 | fnode = f.filenode() |
|
|||
704 | name = self.repo.ui.shortuser(f.user()) |
|
|||
705 |
|
||||
706 | if last != fnode: |
|
|||
707 | last = fnode |
|
|||
708 |
|
||||
709 | yield {"parity": parity.next(), |
|
|||
710 | "node": hex(f.node()), |
|
|||
711 | "rev": f.rev(), |
|
|||
712 | "author": name, |
|
|||
713 | "file": f.path(), |
|
|||
714 | "targetline": targetline, |
|
|||
715 | "line": l, |
|
|||
716 | "lineid": "l%d" % (lineno + 1), |
|
|||
717 | "linenumber": "% 6d" % (lineno + 1)} |
|
|||
718 |
|
||||
719 | return tmpl("fileannotate", |
|
|||
720 | file=f, |
|
|||
721 | annotate=annotate, |
|
|||
722 | path=_up(f), |
|
|||
723 | rev=fctx.rev(), |
|
|||
724 | node=hex(fctx.node()), |
|
|||
725 | author=fctx.user(), |
|
|||
726 | date=fctx.date(), |
|
|||
727 | desc=fctx.description(), |
|
|||
728 | rename=self.renamelink(fl, n), |
|
|||
729 | branch=self.nodebranchnodefault(fctx), |
|
|||
730 | parent=self.siblings(fctx.parents()), |
|
|||
731 | child=self.siblings(fctx.children()), |
|
|||
732 | permissions=fctx.manifest().flags(f)) |
|
|||
733 |
|
||||
734 | def manifest(self, tmpl, ctx, path): |
|
|||
735 | mf = ctx.manifest() |
|
|||
736 | node = ctx.node() |
|
|||
737 |
|
||||
738 | files = {} |
|
|||
739 | parity = paritygen(self.stripecount) |
|
|||
740 |
|
||||
741 | if path and path[-1] != "/": |
|
|||
742 | path += "/" |
|
|||
743 | l = len(path) |
|
|||
744 | abspath = "/" + path |
|
|||
745 |
|
||||
746 | for f, n in mf.items(): |
|
|||
747 | if f[:l] != path: |
|
|||
748 | continue |
|
|||
749 | remain = f[l:] |
|
|||
750 | if "/" in remain: |
|
|||
751 | short = remain[:remain.index("/") + 1] # bleah |
|
|||
752 | files[short] = (f, None) |
|
|||
753 | else: |
|
|||
754 | short = os.path.basename(remain) |
|
|||
755 | files[short] = (f, n) |
|
|||
756 |
|
||||
757 | if not files: |
|
|||
758 | raise ErrorResponse(HTTP_NOT_FOUND, 'path not found: ' + path) |
|
|||
759 |
|
||||
760 | def filelist(**map): |
|
|||
761 | fl = files.keys() |
|
|||
762 | fl.sort() |
|
|||
763 | for f in fl: |
|
|||
764 | full, fnode = files[f] |
|
|||
765 | if not fnode: |
|
|||
766 | continue |
|
|||
767 |
|
||||
768 | fctx = ctx.filectx(full) |
|
|||
769 | yield {"file": full, |
|
|||
770 | "parity": parity.next(), |
|
|||
771 | "basename": f, |
|
|||
772 | "date": fctx.changectx().date(), |
|
|||
773 | "size": fctx.size(), |
|
|||
774 | "permissions": mf.flags(full)} |
|
|||
775 |
|
||||
776 | def dirlist(**map): |
|
|||
777 | fl = files.keys() |
|
|||
778 | fl.sort() |
|
|||
779 | for f in fl: |
|
|||
780 | full, fnode = files[f] |
|
|||
781 | if fnode: |
|
|||
782 | continue |
|
|||
783 |
|
||||
784 | yield {"parity": parity.next(), |
|
|||
785 | "path": "%s%s" % (abspath, f), |
|
|||
786 | "basename": f[:-1]} |
|
|||
787 |
|
||||
788 | return tmpl("manifest", |
|
|||
789 | rev=ctx.rev(), |
|
|||
790 | node=hex(node), |
|
|||
791 | path=abspath, |
|
|||
792 | up=_up(abspath), |
|
|||
793 | upparity=parity.next(), |
|
|||
794 | fentries=filelist, |
|
|||
795 | dentries=dirlist, |
|
|||
796 | archives=self.archivelist(hex(node)), |
|
|||
797 | tags=self.nodetagsdict(node), |
|
|||
798 | inbranch=self.nodeinbranch(ctx), |
|
|||
799 | branches=self.nodebranchdict(ctx)) |
|
|||
800 |
|
||||
801 | def tags(self, tmpl): |
|
|||
802 | i = self.repo.tagslist() |
|
|||
803 | i.reverse() |
|
|||
804 | parity = paritygen(self.stripecount) |
|
|||
805 |
|
||||
806 | def entries(notip=False,limit=0, **map): |
|
|||
807 | count = 0 |
|
|||
808 | for k, n in i: |
|
|||
809 | if notip and k == "tip": |
|
|||
810 | continue |
|
|||
811 | if limit > 0 and count >= limit: |
|
|||
812 | continue |
|
|||
813 | count = count + 1 |
|
|||
814 | yield {"parity": parity.next(), |
|
|||
815 | "tag": k, |
|
|||
816 | "date": self.repo.changectx(n).date(), |
|
|||
817 | "node": hex(n)} |
|
|||
818 |
|
||||
819 | return tmpl("tags", |
|
|||
820 | node=hex(self.repo.changelog.tip()), |
|
|||
821 | entries=lambda **x: entries(False,0, **x), |
|
|||
822 | entriesnotip=lambda **x: entries(True,0, **x), |
|
|||
823 | latestentry=lambda **x: entries(True,1, **x)) |
|
|||
824 |
|
||||
825 | def summary(self, tmpl): |
|
|||
826 | i = self.repo.tagslist() |
|
|||
827 | i.reverse() |
|
|||
828 |
|
||||
829 | def tagentries(**map): |
|
|||
830 | parity = paritygen(self.stripecount) |
|
|||
831 | count = 0 |
|
|||
832 | for k, n in i: |
|
|||
833 | if k == "tip": # skip tip |
|
|||
834 | continue; |
|
|||
835 |
|
||||
836 | count += 1 |
|
|||
837 | if count > 10: # limit to 10 tags |
|
|||
838 | break; |
|
|||
839 |
|
||||
840 | yield tmpl("tagentry", |
|
|||
841 | parity=parity.next(), |
|
|||
842 | tag=k, |
|
|||
843 | node=hex(n), |
|
|||
844 | date=self.repo.changectx(n).date()) |
|
|||
845 |
|
||||
846 |
|
||||
847 | def branches(**map): |
|
|||
848 | parity = paritygen(self.stripecount) |
|
|||
849 |
|
||||
850 | b = self.repo.branchtags() |
|
|||
851 | l = [(-self.repo.changelog.rev(n), n, t) for t, n in b.items()] |
|
|||
852 | l.sort() |
|
|||
853 |
|
||||
854 | for r,n,t in l: |
|
|||
855 | ctx = self.repo.changectx(n) |
|
|||
856 |
|
||||
857 | yield {'parity': parity.next(), |
|
|||
858 | 'branch': t, |
|
|||
859 | 'node': hex(n), |
|
|||
860 | 'date': ctx.date()} |
|
|||
861 |
|
||||
862 | def changelist(**map): |
|
|||
863 | parity = paritygen(self.stripecount, offset=start-end) |
|
|||
864 | l = [] # build a list in forward order for efficiency |
|
|||
865 | for i in xrange(start, end): |
|
|||
866 | ctx = self.repo.changectx(i) |
|
|||
867 | n = ctx.node() |
|
|||
868 | hn = hex(n) |
|
|||
869 |
|
||||
870 | l.insert(0, tmpl( |
|
|||
871 | 'shortlogentry', |
|
|||
872 | parity=parity.next(), |
|
|||
873 | author=ctx.user(), |
|
|||
874 | desc=ctx.description(), |
|
|||
875 | date=ctx.date(), |
|
|||
876 | rev=i, |
|
|||
877 | node=hn, |
|
|||
878 | tags=self.nodetagsdict(n), |
|
|||
879 | inbranch=self.nodeinbranch(ctx), |
|
|||
880 | branches=self.nodebranchdict(ctx))) |
|
|||
881 |
|
||||
882 | yield l |
|
|||
883 |
|
||||
884 | cl = self.repo.changelog |
|
|||
885 | count = cl.count() |
|
|||
886 | start = max(0, count - self.maxchanges) |
|
|||
887 | end = min(count, start + self.maxchanges) |
|
|||
888 |
|
||||
889 | return tmpl("summary", |
|
|||
890 | desc=self.config("web", "description", "unknown"), |
|
|||
891 | owner=get_contact(self.config) or "unknown", |
|
|||
892 | lastchange=cl.read(cl.tip())[2], |
|
|||
893 | tags=tagentries, |
|
|||
894 | branches=branches, |
|
|||
895 | shortlog=changelist, |
|
|||
896 | node=hex(cl.tip()), |
|
|||
897 | archives=self.archivelist("tip")) |
|
|||
898 |
|
||||
899 | def filediff(self, tmpl, fctx): |
|
|||
900 | n = fctx.node() |
|
|||
901 | path = fctx.path() |
|
|||
902 | parents = fctx.parents() |
|
|||
903 | p1 = parents and parents[0].node() or nullid |
|
|||
904 |
|
||||
905 | def diff(**map): |
|
|||
906 | yield self.diff(tmpl, p1, n, [path]) |
|
|||
907 |
|
||||
908 | return tmpl("filediff", |
|
|||
909 | file=path, |
|
|||
910 | node=hex(n), |
|
|||
911 | rev=fctx.rev(), |
|
|||
912 | branch=self.nodebranchnodefault(fctx), |
|
|||
913 | parent=self.siblings(parents), |
|
|||
914 | child=self.siblings(fctx.children()), |
|
|||
915 | diff=diff) |
|
|||
916 |
|
||||
917 | archive_specs = { |
|
338 | archive_specs = { | |
918 | 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None), |
|
339 | 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None), | |
919 | 'gz': ('application/x-tar', 'tgz', '.tar.gz', None), |
|
340 | 'gz': ('application/x-tar', 'tgz', '.tar.gz', None), | |
920 | 'zip': ('application/zip', 'zip', '.zip', None), |
|
341 | 'zip': ('application/zip', 'zip', '.zip', None), | |
921 | } |
|
342 | } | |
922 |
|
343 | |||
923 |
def |
|
344 | def check_perm(self, req, op): | |
924 | reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame)) |
|
345 | '''Check permission for operation based on request data (including | |
925 | cnode = self.repo.lookup(key) |
|
346 | authentication info. Return true if op allowed, else false.''' | |
926 | arch_version = key |
|
|||
927 | if cnode == key or key == 'tip': |
|
|||
928 | arch_version = short(cnode) |
|
|||
929 | name = "%s-%s" % (reponame, arch_version) |
|
|||
930 | mimetype, artype, extension, encoding = self.archive_specs[type_] |
|
|||
931 | headers = [ |
|
|||
932 | ('Content-Type', mimetype), |
|
|||
933 | ('Content-Disposition', 'attachment; filename=%s%s' % |
|
|||
934 | (name, extension)) |
|
|||
935 | ] |
|
|||
936 | if encoding: |
|
|||
937 | headers.append(('Content-Encoding', encoding)) |
|
|||
938 | req.header(headers) |
|
|||
939 | req.respond(HTTP_OK) |
|
|||
940 | archival.archive(self.repo, req, cnode, artype, prefix=name) |
|
|||
941 |
|
347 | |||
942 | # add tags to things |
|
348 | def error(status, message): | |
943 | # tags -> list of changesets corresponding to tags |
|
349 | req.respond(status, protocol.HGTYPE) | |
944 | # find tag, changeset, file |
|
350 | req.write('0\n%s\n' % message) | |
945 |
|
351 | |||
946 | def cleanpath(self, path): |
|
352 | if op == 'pull': | |
947 | path = path.lstrip('/') |
|
353 | return self.allowpull | |
948 | return util.canonpath(self.repo.root, '', path) |
|
|||
949 |
|
354 | |||
950 | def changectx(self, req): |
|
355 | # enforce that you can only push using POST requests | |
951 | if 'node' in req.form: |
|
356 | if req.env['REQUEST_METHOD'] != 'POST': | |
952 | changeid = req.form['node'][0] |
|
357 | error('405 Method Not Allowed', 'push requires POST request') | |
953 | elif 'manifest' in req.form: |
|
358 | return False | |
954 | changeid = req.form['manifest'][0] |
|
|||
955 | else: |
|
|||
956 | changeid = self.repo.changelog.count() - 1 |
|
|||
957 |
|
||||
958 | try: |
|
|||
959 | ctx = self.repo.changectx(changeid) |
|
|||
960 | except RepoError: |
|
|||
961 | man = self.repo.manifest |
|
|||
962 | mn = man.lookup(changeid) |
|
|||
963 | ctx = self.repo.changectx(man.linkrev(mn)) |
|
|||
964 |
|
||||
965 | return ctx |
|
|||
966 |
|
359 | |||
967 | def filectx(self, req): |
|
360 | # require ssl by default for pushing, auth info cannot be sniffed | |
968 | path = self.cleanpath(req.form['file'][0]) |
|
361 | # and replayed | |
969 | if 'node' in req.form: |
|
362 | scheme = req.env.get('wsgi.url_scheme') | |
970 | changeid = req.form['node'][0] |
|
363 | if self.configbool('web', 'push_ssl', True) and scheme != 'https': | |
971 | else: |
|
364 | error(HTTP_OK, 'ssl required') | |
972 | changeid = req.form['filenode'][0] |
|
365 | return False | |
973 | try: |
|
|||
974 | ctx = self.repo.changectx(changeid) |
|
|||
975 | fctx = ctx.filectx(path) |
|
|||
976 | except RepoError: |
|
|||
977 | fctx = self.repo.filectx(path, fileid=changeid) |
|
|||
978 |
|
||||
979 | return fctx |
|
|||
980 |
|
||||
981 | def check_perm(self, req, op, default): |
|
|||
982 | '''check permission for operation based on user auth. |
|
|||
983 | return true if op allowed, else false. |
|
|||
984 | default is policy to use if no config given.''' |
|
|||
985 |
|
366 | |||
986 | user = req.env.get('REMOTE_USER') |
|
367 | user = req.env.get('REMOTE_USER') | |
987 |
|
368 | |||
988 |
deny = self.configlist('web', 'deny_' |
|
369 | deny = self.configlist('web', 'deny_push') | |
989 | if deny and (not user or deny == ['*'] or user in deny): |
|
370 | if deny and (not user or deny == ['*'] or user in deny): | |
|
371 | error('401 Unauthorized', 'push not authorized') | |||
990 | return False |
|
372 | return False | |
991 |
|
373 | |||
992 |
allow = self.configlist('web', 'allow_' |
|
374 | allow = self.configlist('web', 'allow_push') | |
993 |
re |
|
375 | result = allow and (allow == ['*'] or user in allow) | |
|
376 | if not result: | |||
|
377 | error('401 Unauthorized', 'push not authorized') | |||
|
378 | ||||
|
379 | return result |
@@ -6,7 +6,7 | |||||
6 | # This software may be used and distributed according to the terms |
|
6 | # This software may be used and distributed according to the terms | |
7 | # of the GNU General Public License, incorporated herein by reference. |
|
7 | # of the GNU General Public License, incorporated herein by reference. | |
8 |
|
8 | |||
9 | import os, mimetools, cStringIO |
|
9 | import os | |
10 | from mercurial.i18n import gettext as _ |
|
10 | from mercurial.i18n import gettext as _ | |
11 | from mercurial.repo import RepoError |
|
11 | from mercurial.repo import RepoError | |
12 | from mercurial import ui, hg, util, templater, templatefilters |
|
12 | from mercurial import ui, hg, util, templater, templatefilters | |
@@ -19,8 +19,8 from request import wsgirequest | |||||
19 | class hgwebdir(object): |
|
19 | class hgwebdir(object): | |
20 | def __init__(self, config, parentui=None): |
|
20 | def __init__(self, config, parentui=None): | |
21 | def cleannames(items): |
|
21 | def cleannames(items): | |
22 | return [(util.pconvert(name).strip('/'), path) |
|
22 | return util.sort([(util.pconvert(name).strip('/'), path) | |
23 | for name, path in items] |
|
23 | for name, path in items]) | |
24 |
|
24 | |||
25 | self.parentui = parentui or ui.ui(report_untrusted=False, |
|
25 | self.parentui = parentui or ui.ui(report_untrusted=False, | |
26 | interactive = False) |
|
26 | interactive = False) | |
@@ -34,7 +34,6 class hgwebdir(object): | |||||
34 | self.repos_sorted = ('', False) |
|
34 | self.repos_sorted = ('', False) | |
35 | elif isinstance(config, dict): |
|
35 | elif isinstance(config, dict): | |
36 | self.repos = cleannames(config.items()) |
|
36 | self.repos = cleannames(config.items()) | |
37 | self.repos.sort() |
|
|||
38 | else: |
|
37 | else: | |
39 | if isinstance(config, util.configparser): |
|
38 | if isinstance(config, util.configparser): | |
40 | cp = config |
|
39 | cp = config | |
@@ -71,8 +70,7 class hgwebdir(object): | |||||
71 |
|
70 | |||
72 | def __call__(self, env, respond): |
|
71 | def __call__(self, env, respond): | |
73 | req = wsgirequest(env, respond) |
|
72 | req = wsgirequest(env, respond) | |
74 | self.run_wsgi(req) |
|
73 | return self.run_wsgi(req) | |
75 | return req |
|
|||
76 |
|
74 | |||
77 | def run_wsgi(self, req): |
|
75 | def run_wsgi(self, req): | |
78 |
|
76 | |||
@@ -81,17 +79,8 class hgwebdir(object): | |||||
81 |
|
79 | |||
82 | virtual = req.env.get("PATH_INFO", "").strip('/') |
|
80 | virtual = req.env.get("PATH_INFO", "").strip('/') | |
83 | tmpl = self.templater(req) |
|
81 | tmpl = self.templater(req) | |
84 | try: |
|
|||
85 |
|
|
82 | ctype = tmpl('mimetype', encoding=util._encoding) | |
86 |
|
|
83 | ctype = templater.stringify(ctype) | |
87 | except KeyError: |
|
|||
88 | # old templates with inline HTTP headers? |
|
|||
89 | if 'mimetype' in tmpl: |
|
|||
90 | raise |
|
|||
91 | header = tmpl('header', encoding=util._encoding) |
|
|||
92 | header_file = cStringIO.StringIO(templater.stringify(header)) |
|
|||
93 | msg = mimetools.Message(header_file, 0) |
|
|||
94 | ctype = msg['content-type'] |
|
|||
95 |
|
84 | |||
96 | # a static file |
|
85 | # a static file | |
97 | if virtual.startswith('static/') or 'static' in req.form: |
|
86 | if virtual.startswith('static/') or 'static' in req.form: | |
@@ -101,13 +90,13 class hgwebdir(object): | |||||
101 | else: |
|
90 | else: | |
102 | fname = req.form['static'][0] |
|
91 | fname = req.form['static'][0] | |
103 | req.write(staticfile(static, fname, req)) |
|
92 | req.write(staticfile(static, fname, req)) | |
104 | return |
|
93 | return [] | |
105 |
|
94 | |||
106 | # top-level index |
|
95 | # top-level index | |
107 | elif not virtual: |
|
96 | elif not virtual: | |
108 | req.respond(HTTP_OK, ctype) |
|
97 | req.respond(HTTP_OK, ctype) | |
109 | req.write(self.makeindex(req, tmpl)) |
|
98 | req.write(self.makeindex(req, tmpl)) | |
110 | return |
|
99 | return [] | |
111 |
|
100 | |||
112 | # nested indexes and hgwebs |
|
101 | # nested indexes and hgwebs | |
113 |
|
102 | |||
@@ -118,8 +107,7 class hgwebdir(object): | |||||
118 | req.env['REPO_NAME'] = virtual |
|
107 | req.env['REPO_NAME'] = virtual | |
119 | try: |
|
108 | try: | |
120 | repo = hg.repository(self.parentui, real) |
|
109 | repo = hg.repository(self.parentui, real) | |
121 | hgweb(repo).run_wsgi(req) |
|
110 | return hgweb(repo).run_wsgi(req) | |
122 | return |
|
|||
123 | except IOError, inst: |
|
111 | except IOError, inst: | |
124 | msg = inst.strerror |
|
112 | msg = inst.strerror | |
125 | raise ErrorResponse(HTTP_SERVER_ERROR, msg) |
|
113 | raise ErrorResponse(HTTP_SERVER_ERROR, msg) | |
@@ -131,7 +119,7 class hgwebdir(object): | |||||
131 | if [r for r in repos if r.startswith(subdir)]: |
|
119 | if [r for r in repos if r.startswith(subdir)]: | |
132 | req.respond(HTTP_OK, ctype) |
|
120 | req.respond(HTTP_OK, ctype) | |
133 | req.write(self.makeindex(req, tmpl, subdir)) |
|
121 | req.write(self.makeindex(req, tmpl, subdir)) | |
134 | return |
|
122 | return [] | |
135 |
|
123 | |||
136 | up = virtual.rfind('/') |
|
124 | up = virtual.rfind('/') | |
137 | if up < 0: |
|
125 | if up < 0: | |
@@ -141,10 +129,12 class hgwebdir(object): | |||||
141 | # prefixes not found |
|
129 | # prefixes not found | |
142 | req.respond(HTTP_NOT_FOUND, ctype) |
|
130 | req.respond(HTTP_NOT_FOUND, ctype) | |
143 | req.write(tmpl("notfound", repo=virtual)) |
|
131 | req.write(tmpl("notfound", repo=virtual)) | |
|
132 | return [] | |||
144 |
|
133 | |||
145 | except ErrorResponse, err: |
|
134 | except ErrorResponse, err: | |
146 | req.respond(err.code, ctype) |
|
135 | req.respond(err.code, ctype) | |
147 | req.write(tmpl('error', error=err.message or '')) |
|
136 | req.write(tmpl('error', error=err.message or '')) | |
|
137 | return [] | |||
148 | finally: |
|
138 | finally: | |
149 | tmpl = None |
|
139 | tmpl = None | |
150 |
|
140 | |||
@@ -257,13 +247,7 class hgwebdir(object): | |||||
257 | def templater(self, req): |
|
247 | def templater(self, req): | |
258 |
|
248 | |||
259 | def header(**map): |
|
249 | def header(**map): | |
260 |
|
|
250 | yield tmpl('header', encoding=util._encoding, **map) | |
261 | if 'mimetype' not in tmpl: |
|
|||
262 | # old template with inline HTTP headers |
|
|||
263 | header_file = cStringIO.StringIO(templater.stringify(header)) |
|
|||
264 | msg = mimetools.Message(header_file, 0) |
|
|||
265 | header = header_file.read() |
|
|||
266 | yield header |
|
|||
267 |
|
251 | |||
268 | def footer(**map): |
|
252 | def footer(**map): | |
269 | yield tmpl("footer", **map) |
|
253 | yield tmpl("footer", **map) |
@@ -21,69 +21,65 from common import HTTP_OK, HTTP_NOT_FOU | |||||
21 |
|
21 | |||
22 | HGTYPE = 'application/mercurial-0.1' |
|
22 | HGTYPE = 'application/mercurial-0.1' | |
23 |
|
23 | |||
24 |
def lookup( |
|
24 | def lookup(repo, req): | |
25 | try: |
|
25 | try: | |
26 |
r = hex( |
|
26 | r = hex(repo.lookup(req.form['key'][0])) | |
27 | success = 1 |
|
27 | success = 1 | |
28 | except Exception,inst: |
|
28 | except Exception,inst: | |
29 | r = str(inst) |
|
29 | r = str(inst) | |
30 | success = 0 |
|
30 | success = 0 | |
31 | resp = "%s %s\n" % (success, r) |
|
31 | resp = "%s %s\n" % (success, r) | |
32 | req.respond(HTTP_OK, HGTYPE, length=len(resp)) |
|
32 | req.respond(HTTP_OK, HGTYPE, length=len(resp)) | |
33 | req.write(resp) |
|
33 | yield resp | |
34 |
|
34 | |||
35 |
def heads( |
|
35 | def heads(repo, req): | |
36 |
resp = " ".join(map(hex, |
|
36 | resp = " ".join(map(hex, repo.heads())) + "\n" | |
37 | req.respond(HTTP_OK, HGTYPE, length=len(resp)) |
|
37 | req.respond(HTTP_OK, HGTYPE, length=len(resp)) | |
38 | req.write(resp) |
|
38 | yield resp | |
39 |
|
39 | |||
40 |
def branches( |
|
40 | def branches(repo, req): | |
41 | nodes = [] |
|
41 | nodes = [] | |
42 | if 'nodes' in req.form: |
|
42 | if 'nodes' in req.form: | |
43 | nodes = map(bin, req.form['nodes'][0].split(" ")) |
|
43 | nodes = map(bin, req.form['nodes'][0].split(" ")) | |
44 | resp = cStringIO.StringIO() |
|
44 | resp = cStringIO.StringIO() | |
45 |
for b in |
|
45 | for b in repo.branches(nodes): | |
46 | resp.write(" ".join(map(hex, b)) + "\n") |
|
46 | resp.write(" ".join(map(hex, b)) + "\n") | |
47 | resp = resp.getvalue() |
|
47 | resp = resp.getvalue() | |
48 | req.respond(HTTP_OK, HGTYPE, length=len(resp)) |
|
48 | req.respond(HTTP_OK, HGTYPE, length=len(resp)) | |
49 | req.write(resp) |
|
49 | yield resp | |
50 |
|
50 | |||
51 |
def between( |
|
51 | def between(repo, req): | |
52 | if 'pairs' in req.form: |
|
52 | if 'pairs' in req.form: | |
53 | pairs = [map(bin, p.split("-")) |
|
53 | pairs = [map(bin, p.split("-")) | |
54 | for p in req.form['pairs'][0].split(" ")] |
|
54 | for p in req.form['pairs'][0].split(" ")] | |
55 | resp = cStringIO.StringIO() |
|
55 | resp = cStringIO.StringIO() | |
56 |
for b in |
|
56 | for b in repo.between(pairs): | |
57 | resp.write(" ".join(map(hex, b)) + "\n") |
|
57 | resp.write(" ".join(map(hex, b)) + "\n") | |
58 | resp = resp.getvalue() |
|
58 | resp = resp.getvalue() | |
59 | req.respond(HTTP_OK, HGTYPE, length=len(resp)) |
|
59 | req.respond(HTTP_OK, HGTYPE, length=len(resp)) | |
60 | req.write(resp) |
|
60 | yield resp | |
61 |
|
61 | |||
62 |
def changegroup( |
|
62 | def changegroup(repo, req): | |
63 | req.respond(HTTP_OK, HGTYPE) |
|
63 | req.respond(HTTP_OK, HGTYPE) | |
64 | nodes = [] |
|
64 | nodes = [] | |
65 | if not web.allowpull: |
|
|||
66 | return |
|
|||
67 |
|
65 | |||
68 | if 'roots' in req.form: |
|
66 | if 'roots' in req.form: | |
69 | nodes = map(bin, req.form['roots'][0].split(" ")) |
|
67 | nodes = map(bin, req.form['roots'][0].split(" ")) | |
70 |
|
68 | |||
71 | z = zlib.compressobj() |
|
69 | z = zlib.compressobj() | |
72 |
f = |
|
70 | f = repo.changegroup(nodes, 'serve') | |
73 | while 1: |
|
71 | while 1: | |
74 | chunk = f.read(4096) |
|
72 | chunk = f.read(4096) | |
75 | if not chunk: |
|
73 | if not chunk: | |
76 | break |
|
74 | break | |
77 |
|
|
75 | yield z.compress(chunk) | |
78 |
|
76 | |||
79 |
|
|
77 | yield z.flush() | |
80 |
|
78 | |||
81 |
def changegroupsubset( |
|
79 | def changegroupsubset(repo, req): | |
82 | req.respond(HTTP_OK, HGTYPE) |
|
80 | req.respond(HTTP_OK, HGTYPE) | |
83 | bases = [] |
|
81 | bases = [] | |
84 | heads = [] |
|
82 | heads = [] | |
85 | if not web.allowpull: |
|
|||
86 | return |
|
|||
87 |
|
83 | |||
88 | if 'bases' in req.form: |
|
84 | if 'bases' in req.form: | |
89 | bases = [bin(x) for x in req.form['bases'][0].split(' ')] |
|
85 | bases = [bin(x) for x in req.form['bases'][0].split(' ')] | |
@@ -91,67 +87,44 def changegroupsubset(web, req): | |||||
91 | heads = [bin(x) for x in req.form['heads'][0].split(' ')] |
|
87 | heads = [bin(x) for x in req.form['heads'][0].split(' ')] | |
92 |
|
88 | |||
93 | z = zlib.compressobj() |
|
89 | z = zlib.compressobj() | |
94 |
f = |
|
90 | f = repo.changegroupsubset(bases, heads, 'serve') | |
95 | while 1: |
|
91 | while 1: | |
96 | chunk = f.read(4096) |
|
92 | chunk = f.read(4096) | |
97 | if not chunk: |
|
93 | if not chunk: | |
98 | break |
|
94 | break | |
99 |
|
|
95 | yield z.compress(chunk) | |
100 |
|
96 | |||
101 |
|
|
97 | yield z.flush() | |
102 |
|
98 | |||
103 |
def capabilities( |
|
99 | def capabilities(repo, req): | |
104 | resp = ' '.join(web.capabilities()) |
|
100 | caps = ['lookup', 'changegroupsubset'] | |
105 | req.respond(HTTP_OK, HGTYPE, length=len(resp)) |
|
101 | if repo.ui.configbool('server', 'uncompressed', untrusted=True): | |
106 | req.write(resp) |
|
102 | caps.append('stream=%d' % repo.changelog.version) | |
|
103 | if changegroupmod.bundlepriority: | |||
|
104 | caps.append('unbundle=%s' % ','.join(changegroupmod.bundlepriority)) | |||
|
105 | rsp = ' '.join(caps) | |||
|
106 | req.respond(HTTP_OK, HGTYPE, length=len(rsp)) | |||
|
107 | yield rsp | |||
107 |
|
108 | |||
108 |
def unbundle( |
|
109 | def unbundle(repo, req): | |
|
110 | ||||
|
111 | errorfmt = '0\n%s\n' | |||
|
112 | proto = req.env.get('wsgi.url_scheme') or 'http' | |||
|
113 | their_heads = req.form['heads'][0].split(' ') | |||
109 |
|
114 | |||
110 | def bail(response, headers={}): |
|
115 | def check_heads(): | |
|
116 | heads = map(hex, repo.heads()) | |||
|
117 | return their_heads == [hex('force')] or their_heads == heads | |||
|
118 | ||||
|
119 | # fail early if possible | |||
|
120 | if not check_heads(): | |||
111 | length = int(req.env.get('CONTENT_LENGTH', 0)) |
|
121 | length = int(req.env.get('CONTENT_LENGTH', 0)) | |
112 | for s in util.filechunkiter(req, limit=length): |
|
122 | for s in util.filechunkiter(req, limit=length): | |
113 | # drain incoming bundle, else client will not see |
|
123 | # drain incoming bundle, else client will not see | |
114 | # response when run outside cgi script |
|
124 | # response when run outside cgi script | |
115 | pass |
|
125 | pass | |
116 |
|
126 | req.respond(HTTP_OK, HGTYPE) | ||
117 | status = headers.pop('status', HTTP_OK) |
|
127 | return errorfmt % 'unsynced changes', | |
118 | req.header(headers.items()) |
|
|||
119 | req.respond(status, HGTYPE) |
|
|||
120 | req.write('0\n') |
|
|||
121 | req.write(response) |
|
|||
122 |
|
||||
123 | # enforce that you can only unbundle with POST requests |
|
|||
124 | if req.env['REQUEST_METHOD'] != 'POST': |
|
|||
125 | headers = {'status': '405 Method Not Allowed'} |
|
|||
126 | bail('unbundle requires POST request\n', headers) |
|
|||
127 | return |
|
|||
128 |
|
||||
129 | # require ssl by default, auth info cannot be sniffed and |
|
|||
130 | # replayed |
|
|||
131 | ssl_req = web.configbool('web', 'push_ssl', True) |
|
|||
132 | if ssl_req: |
|
|||
133 | if req.env.get('wsgi.url_scheme') != 'https': |
|
|||
134 | bail('ssl required\n') |
|
|||
135 | return |
|
|||
136 | proto = 'https' |
|
|||
137 | else: |
|
|||
138 | proto = 'http' |
|
|||
139 |
|
||||
140 | # do not allow push unless explicitly allowed |
|
|||
141 | if not web.check_perm(req, 'push', False): |
|
|||
142 | bail('push not authorized\n', headers={'status': '401 Unauthorized'}) |
|
|||
143 | return |
|
|||
144 |
|
||||
145 | their_heads = req.form['heads'][0].split(' ') |
|
|||
146 |
|
||||
147 | def check_heads(): |
|
|||
148 | heads = map(hex, web.repo.heads()) |
|
|||
149 | return their_heads == [hex('force')] or their_heads == heads |
|
|||
150 |
|
||||
151 | # fail early if possible |
|
|||
152 | if not check_heads(): |
|
|||
153 | bail('unsynced changes\n') |
|
|||
154 | return |
|
|||
155 |
|
128 | |||
156 | req.respond(HTTP_OK, HGTYPE) |
|
129 | req.respond(HTTP_OK, HGTYPE) | |
157 |
|
130 | |||
@@ -166,12 +139,10 def unbundle(web, req): | |||||
166 | fp.write(s) |
|
139 | fp.write(s) | |
167 |
|
140 | |||
168 | try: |
|
141 | try: | |
169 |
lock = |
|
142 | lock = repo.lock() | |
170 | try: |
|
143 | try: | |
171 | if not check_heads(): |
|
144 | if not check_heads(): | |
172 | req.write('0\n') |
|
145 | return errorfmt % 'unsynced changes', | |
173 | req.write('unsynced changes\n') |
|
|||
174 | return |
|
|||
175 |
|
146 | |||
176 | fp.seek(0) |
|
147 | fp.seek(0) | |
177 | header = fp.read(6) |
|
148 | header = fp.read(6) | |
@@ -190,26 +161,23 def unbundle(web, req): | |||||
190 | url = 'remote:%s:%s' % (proto, |
|
161 | url = 'remote:%s:%s' % (proto, | |
191 | req.env.get('REMOTE_HOST', '')) |
|
162 | req.env.get('REMOTE_HOST', '')) | |
192 | try: |
|
163 | try: | |
193 |
ret = |
|
164 | ret = repo.addchangegroup(gen, 'serve', url) | |
194 | except util.Abort, inst: |
|
165 | except util.Abort, inst: | |
195 | sys.stdout.write("abort: %s\n" % inst) |
|
166 | sys.stdout.write("abort: %s\n" % inst) | |
196 | ret = 0 |
|
167 | ret = 0 | |
197 | finally: |
|
168 | finally: | |
198 | val = sys.stdout.getvalue() |
|
169 | val = sys.stdout.getvalue() | |
199 | sys.stdout, sys.stderr = oldio |
|
170 | sys.stdout, sys.stderr = oldio | |
200 |
re |
|
171 | return '%d\n%s' % (ret, val), | |
201 | req.write(val) |
|
|||
202 | finally: |
|
172 | finally: | |
203 | del lock |
|
173 | del lock | |
204 | except ValueError, inst: |
|
174 | except ValueError, inst: | |
205 | req.write('0\n') |
|
175 | return errorfmt % inst, | |
206 | req.write(str(inst) + '\n') |
|
|||
207 | except (OSError, IOError), inst: |
|
176 | except (OSError, IOError), inst: | |
208 | req.write('0\n') |
|
|||
209 | filename = getattr(inst, 'filename', '') |
|
177 | filename = getattr(inst, 'filename', '') | |
210 | # Don't send our filesystem layout to the client |
|
178 | # Don't send our filesystem layout to the client | |
211 |
if filename.startswith( |
|
179 | if filename.startswith(repo.root): | |
212 |
filename = filename[len( |
|
180 | filename = filename[len(repo.root)+1:] | |
213 | else: |
|
181 | else: | |
214 | filename = '' |
|
182 | filename = '' | |
215 | error = getattr(inst, 'strerror', 'Unknown error') |
|
183 | error = getattr(inst, 'strerror', 'Unknown error') | |
@@ -218,13 +186,12 def unbundle(web, req): | |||||
218 | else: |
|
186 | else: | |
219 | code = HTTP_SERVER_ERROR |
|
187 | code = HTTP_SERVER_ERROR | |
220 | req.respond(code) |
|
188 | req.respond(code) | |
221 |
re |
|
189 | return '0\n%s: %s\n' % (error, filename), | |
222 | finally: |
|
190 | finally: | |
223 | fp.close() |
|
191 | fp.close() | |
224 | os.unlink(tempname) |
|
192 | os.unlink(tempname) | |
225 |
|
193 | |||
226 |
def stream_out( |
|
194 | def stream_out(repo, req): | |
227 | if not web.allowpull: |
|
|||
228 | return |
|
|||
229 | req.respond(HTTP_OK, HGTYPE) |
|
195 | req.respond(HTTP_OK, HGTYPE) | |
230 |
streamclone.stream_out( |
|
196 | streamclone.stream_out(repo, req, untrusted=True) | |
|
197 | return [] |
@@ -9,6 +9,31 | |||||
9 | import socket, cgi, errno |
|
9 | import socket, cgi, errno | |
10 | from common import ErrorResponse, statusmessage |
|
10 | from common import ErrorResponse, statusmessage | |
11 |
|
11 | |||
|
12 | shortcuts = { | |||
|
13 | 'cl': [('cmd', ['changelog']), ('rev', None)], | |||
|
14 | 'sl': [('cmd', ['shortlog']), ('rev', None)], | |||
|
15 | 'cs': [('cmd', ['changeset']), ('node', None)], | |||
|
16 | 'f': [('cmd', ['file']), ('filenode', None)], | |||
|
17 | 'fl': [('cmd', ['filelog']), ('filenode', None)], | |||
|
18 | 'fd': [('cmd', ['filediff']), ('node', None)], | |||
|
19 | 'fa': [('cmd', ['annotate']), ('filenode', None)], | |||
|
20 | 'mf': [('cmd', ['manifest']), ('manifest', None)], | |||
|
21 | 'ca': [('cmd', ['archive']), ('node', None)], | |||
|
22 | 'tags': [('cmd', ['tags'])], | |||
|
23 | 'tip': [('cmd', ['changeset']), ('node', ['tip'])], | |||
|
24 | 'static': [('cmd', ['static']), ('file', None)] | |||
|
25 | } | |||
|
26 | ||||
|
27 | def expand(form): | |||
|
28 | for k in shortcuts.iterkeys(): | |||
|
29 | if k in form: | |||
|
30 | for name, value in shortcuts[k]: | |||
|
31 | if value is None: | |||
|
32 | value = form[k] | |||
|
33 | form[name] = value | |||
|
34 | del form[k] | |||
|
35 | return form | |||
|
36 | ||||
12 | class wsgirequest(object): |
|
37 | class wsgirequest(object): | |
13 | def __init__(self, wsgienv, start_response): |
|
38 | def __init__(self, wsgienv, start_response): | |
14 | version = wsgienv['wsgi.version'] |
|
39 | version = wsgienv['wsgi.version'] | |
@@ -21,7 +46,7 class wsgirequest(object): | |||||
21 | self.multiprocess = wsgienv['wsgi.multiprocess'] |
|
46 | self.multiprocess = wsgienv['wsgi.multiprocess'] | |
22 | self.run_once = wsgienv['wsgi.run_once'] |
|
47 | self.run_once = wsgienv['wsgi.run_once'] | |
23 | self.env = wsgienv |
|
48 | self.env = wsgienv | |
24 | self.form = cgi.parse(self.inp, self.env, keep_blank_values=1) |
|
49 | self.form = expand(cgi.parse(self.inp, self.env, keep_blank_values=1)) | |
25 | self._start_response = start_response |
|
50 | self._start_response = start_response | |
26 | self.server_write = None |
|
51 | self.server_write = None | |
27 | self.headers = [] |
|
52 | self.headers = [] |
@@ -122,7 +122,8 class _hgwebhandler(object, BaseHTTPServ | |||||
122 | self.saved_headers = [] |
|
122 | self.saved_headers = [] | |
123 | self.sent_headers = False |
|
123 | self.sent_headers = False | |
124 | self.length = None |
|
124 | self.length = None | |
125 | self.server.application(env, self._start_response) |
|
125 | for chunk in self.server.application(env, self._start_response): | |
|
126 | self._write(chunk) | |||
126 |
|
127 | |||
127 | def send_headers(self): |
|
128 | def send_headers(self): | |
128 | if not self.saved_status: |
|
129 | if not self.saved_status: | |
@@ -268,12 +269,7 def create_server(ui, repo): | |||||
268 |
|
269 | |||
269 | self.addr, self.port = self.socket.getsockname()[0:2] |
|
270 | self.addr, self.port = self.socket.getsockname()[0:2] | |
270 | self.prefix = prefix |
|
271 | self.prefix = prefix | |
271 |
|
||||
272 | self.fqaddr = socket.getfqdn(address) |
|
272 | self.fqaddr = socket.getfqdn(address) | |
273 | try: |
|
|||
274 | socket.getaddrbyhost(self.fqaddr) |
|
|||
275 | except: |
|
|||
276 | fqaddr = address |
|
|||
277 |
|
273 | |||
278 | class IPv6HTTPServer(MercurialHTTPServer): |
|
274 | class IPv6HTTPServer(MercurialHTTPServer): | |
279 | address_family = getattr(socket, 'AF_INET6', None) |
|
275 | address_family = getattr(socket, 'AF_INET6', None) |
This diff has been collapsed as it changes many lines, (537 lines changed) Show them Hide them | |||||
@@ -5,10 +5,15 | |||||
5 | # This software may be used and distributed according to the terms |
|
5 | # This software may be used and distributed according to the terms | |
6 | # of the GNU General Public License, incorporated herein by reference. |
|
6 | # of the GNU General Public License, incorporated herein by reference. | |
7 |
|
7 | |||
8 | import os, mimetypes |
|
8 | import os, mimetypes, re, cgi | |
9 | from mercurial import revlog, util |
|
9 | import webutil | |
|
10 | from mercurial import revlog, archival, templatefilters | |||
|
11 | from mercurial.node import short, hex, nullid | |||
|
12 | from mercurial.util import binary, datestr | |||
10 | from mercurial.repo import RepoError |
|
13 | from mercurial.repo import RepoError | |
11 |
from common import staticfile, |
|
14 | from common import paritygen, staticfile, get_contact, ErrorResponse | |
|
15 | from common import HTTP_OK, HTTP_NOT_FOUND | |||
|
16 | from mercurial import graphmod, util | |||
12 |
|
17 | |||
13 | # __all__ is populated with the allowed commands. Be sure to add to it if |
|
18 | # __all__ is populated with the allowed commands. Be sure to add to it if | |
14 | # you're adding a new command, or the new command won't work. |
|
19 | # you're adding a new command, or the new command won't work. | |
@@ -16,7 +21,7 from common import staticfile, ErrorResp | |||||
16 | __all__ = [ |
|
21 | __all__ = [ | |
17 | 'log', 'rawfile', 'file', 'changelog', 'shortlog', 'changeset', 'rev', |
|
22 | 'log', 'rawfile', 'file', 'changelog', 'shortlog', 'changeset', 'rev', | |
18 | 'manifest', 'tags', 'summary', 'filediff', 'diff', 'annotate', 'filelog', |
|
23 | 'manifest', 'tags', 'summary', 'filediff', 'diff', 'annotate', 'filelog', | |
19 | 'archive', 'static', |
|
24 | 'archive', 'static', 'graph', | |
20 | ] |
|
25 | ] | |
21 |
|
26 | |||
22 | def log(web, req, tmpl): |
|
27 | def log(web, req, tmpl): | |
@@ -26,17 +31,17 def log(web, req, tmpl): | |||||
26 | return changelog(web, req, tmpl) |
|
31 | return changelog(web, req, tmpl) | |
27 |
|
32 | |||
28 | def rawfile(web, req, tmpl): |
|
33 | def rawfile(web, req, tmpl): | |
29 | path = web.cleanpath(req.form.get('file', [''])[0]) |
|
34 | path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0]) | |
30 | if not path: |
|
35 | if not path: | |
31 |
content = |
|
36 | content = manifest(web, req, tmpl) | |
32 | req.respond(HTTP_OK, web.ctype) |
|
37 | req.respond(HTTP_OK, web.ctype) | |
33 | return content |
|
38 | return content | |
34 |
|
39 | |||
35 | try: |
|
40 | try: | |
36 | fctx = web.filectx(req) |
|
41 | fctx = webutil.filectx(web.repo, req) | |
37 | except revlog.LookupError, inst: |
|
42 | except revlog.LookupError, inst: | |
38 | try: |
|
43 | try: | |
39 |
content = |
|
44 | content = manifest(web, req, tmpl) | |
40 | req.respond(HTTP_OK, web.ctype) |
|
45 | req.respond(HTTP_OK, web.ctype) | |
41 | return content |
|
46 | return content | |
42 | except ErrorResponse: |
|
47 | except ErrorResponse: | |
@@ -45,76 +50,514 def rawfile(web, req, tmpl): | |||||
45 | path = fctx.path() |
|
50 | path = fctx.path() | |
46 | text = fctx.data() |
|
51 | text = fctx.data() | |
47 | mt = mimetypes.guess_type(path)[0] |
|
52 | mt = mimetypes.guess_type(path)[0] | |
48 |
if mt is None or |
|
53 | if mt is None or binary(text): | |
49 | mt = mt or 'application/octet-stream' |
|
54 | mt = mt or 'application/octet-stream' | |
50 |
|
55 | |||
51 | req.respond(HTTP_OK, mt, path, len(text)) |
|
56 | req.respond(HTTP_OK, mt, path, len(text)) | |
52 | return [text] |
|
57 | return [text] | |
53 |
|
58 | |||
|
59 | def _filerevision(web, tmpl, fctx): | |||
|
60 | f = fctx.path() | |||
|
61 | text = fctx.data() | |||
|
62 | fl = fctx.filelog() | |||
|
63 | n = fctx.filenode() | |||
|
64 | parity = paritygen(web.stripecount) | |||
|
65 | ||||
|
66 | if binary(text): | |||
|
67 | mt = mimetypes.guess_type(f)[0] or 'application/octet-stream' | |||
|
68 | text = '(binary:%s)' % mt | |||
|
69 | ||||
|
70 | def lines(): | |||
|
71 | for lineno, t in enumerate(text.splitlines(1)): | |||
|
72 | yield {"line": t, | |||
|
73 | "lineid": "l%d" % (lineno + 1), | |||
|
74 | "linenumber": "% 6d" % (lineno + 1), | |||
|
75 | "parity": parity.next()} | |||
|
76 | ||||
|
77 | return tmpl("filerevision", | |||
|
78 | file=f, | |||
|
79 | path=webutil.up(f), | |||
|
80 | text=lines(), | |||
|
81 | rev=fctx.rev(), | |||
|
82 | node=hex(fctx.node()), | |||
|
83 | author=fctx.user(), | |||
|
84 | date=fctx.date(), | |||
|
85 | desc=fctx.description(), | |||
|
86 | branch=webutil.nodebranchnodefault(fctx), | |||
|
87 | parent=webutil.siblings(fctx.parents()), | |||
|
88 | child=webutil.siblings(fctx.children()), | |||
|
89 | rename=webutil.renamelink(fctx), | |||
|
90 | permissions=fctx.manifest().flags(f)) | |||
|
91 | ||||
54 | def file(web, req, tmpl): |
|
92 | def file(web, req, tmpl): | |
55 | path = web.cleanpath(req.form.get('file', [''])[0]) |
|
93 | path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0]) | |
56 | if not path: |
|
94 | if not path: | |
57 | return web.manifest(tmpl, web.changectx(req), path) |
|
95 | return manifest(web, req, tmpl) | |
58 | try: |
|
96 | try: | |
59 |
return web |
|
97 | return _filerevision(web, tmpl, webutil.filectx(web.repo, req)) | |
60 | except revlog.LookupError, inst: |
|
98 | except revlog.LookupError, inst: | |
61 | try: |
|
99 | try: | |
62 |
return |
|
100 | return manifest(web, req, tmpl) | |
63 | except ErrorResponse: |
|
101 | except ErrorResponse: | |
64 | raise inst |
|
102 | raise inst | |
65 |
|
103 | |||
|
104 | def _search(web, tmpl, query): | |||
|
105 | ||||
|
106 | def changelist(**map): | |||
|
107 | cl = web.repo.changelog | |||
|
108 | count = 0 | |||
|
109 | qw = query.lower().split() | |||
|
110 | ||||
|
111 | def revgen(): | |||
|
112 | for i in xrange(len(cl) - 1, 0, -100): | |||
|
113 | l = [] | |||
|
114 | for j in xrange(max(0, i - 100), i + 1): | |||
|
115 | ctx = web.repo[j] | |||
|
116 | l.append(ctx) | |||
|
117 | l.reverse() | |||
|
118 | for e in l: | |||
|
119 | yield e | |||
|
120 | ||||
|
121 | for ctx in revgen(): | |||
|
122 | miss = 0 | |||
|
123 | for q in qw: | |||
|
124 | if not (q in ctx.user().lower() or | |||
|
125 | q in ctx.description().lower() or | |||
|
126 | q in " ".join(ctx.files()).lower()): | |||
|
127 | miss = 1 | |||
|
128 | break | |||
|
129 | if miss: | |||
|
130 | continue | |||
|
131 | ||||
|
132 | count += 1 | |||
|
133 | n = ctx.node() | |||
|
134 | showtags = webutil.showtag(web.repo, tmpl, 'changelogtag', n) | |||
|
135 | ||||
|
136 | yield tmpl('searchentry', | |||
|
137 | parity=parity.next(), | |||
|
138 | author=ctx.user(), | |||
|
139 | parent=webutil.siblings(ctx.parents()), | |||
|
140 | child=webutil.siblings(ctx.children()), | |||
|
141 | changelogtag=showtags, | |||
|
142 | desc=ctx.description(), | |||
|
143 | date=ctx.date(), | |||
|
144 | files=web.listfilediffs(tmpl, ctx.files(), n), | |||
|
145 | rev=ctx.rev(), | |||
|
146 | node=hex(n), | |||
|
147 | tags=webutil.nodetagsdict(web.repo, n), | |||
|
148 | inbranch=webutil.nodeinbranch(web.repo, ctx), | |||
|
149 | branches=webutil.nodebranchdict(web.repo, ctx)) | |||
|
150 | ||||
|
151 | if count >= web.maxchanges: | |||
|
152 | break | |||
|
153 | ||||
|
154 | cl = web.repo.changelog | |||
|
155 | parity = paritygen(web.stripecount) | |||
|
156 | ||||
|
157 | return tmpl('search', | |||
|
158 | query=query, | |||
|
159 | node=hex(cl.tip()), | |||
|
160 | entries=changelist, | |||
|
161 | archives=web.archivelist("tip")) | |||
|
162 | ||||
66 | def changelog(web, req, tmpl, shortlog = False): |
|
163 | def changelog(web, req, tmpl, shortlog = False): | |
67 | if 'node' in req.form: |
|
164 | if 'node' in req.form: | |
68 | ctx = web.changectx(req) |
|
165 | ctx = webutil.changectx(web.repo, req) | |
69 | else: |
|
166 | else: | |
70 | if 'rev' in req.form: |
|
167 | if 'rev' in req.form: | |
71 | hi = req.form['rev'][0] |
|
168 | hi = req.form['rev'][0] | |
72 | else: |
|
169 | else: | |
73 |
hi = web.repo |
|
170 | hi = len(web.repo) - 1 | |
74 | try: |
|
171 | try: | |
75 |
ctx = web.repo |
|
172 | ctx = web.repo[hi] | |
76 | except RepoError: |
|
173 | except RepoError: | |
77 |
return web |
|
174 | return _search(web, tmpl, hi) # XXX redirect to 404 page? | |
|
175 | ||||
|
176 | def changelist(limit=0, **map): | |||
|
177 | cl = web.repo.changelog | |||
|
178 | l = [] # build a list in forward order for efficiency | |||
|
179 | for i in xrange(start, end): | |||
|
180 | ctx = web.repo[i] | |||
|
181 | n = ctx.node() | |||
|
182 | showtags = webutil.showtag(web.repo, tmpl, 'changelogtag', n) | |||
|
183 | ||||
|
184 | l.insert(0, {"parity": parity.next(), | |||
|
185 | "author": ctx.user(), | |||
|
186 | "parent": webutil.siblings(ctx.parents(), i - 1), | |||
|
187 | "child": webutil.siblings(ctx.children(), i + 1), | |||
|
188 | "changelogtag": showtags, | |||
|
189 | "desc": ctx.description(), | |||
|
190 | "date": ctx.date(), | |||
|
191 | "files": web.listfilediffs(tmpl, ctx.files(), n), | |||
|
192 | "rev": i, | |||
|
193 | "node": hex(n), | |||
|
194 | "tags": webutil.nodetagsdict(web.repo, n), | |||
|
195 | "inbranch": webutil.nodeinbranch(web.repo, ctx), | |||
|
196 | "branches": webutil.nodebranchdict(web.repo, ctx) | |||
|
197 | }) | |||
78 |
|
198 | |||
79 | return web.changelog(tmpl, ctx, shortlog = shortlog) |
|
199 | if limit > 0: | |
|
200 | l = l[:limit] | |||
|
201 | ||||
|
202 | for e in l: | |||
|
203 | yield e | |||
|
204 | ||||
|
205 | maxchanges = shortlog and web.maxshortchanges or web.maxchanges | |||
|
206 | cl = web.repo.changelog | |||
|
207 | count = len(cl) | |||
|
208 | pos = ctx.rev() | |||
|
209 | start = max(0, pos - maxchanges + 1) | |||
|
210 | end = min(count, start + maxchanges) | |||
|
211 | pos = end - 1 | |||
|
212 | parity = paritygen(web.stripecount, offset=start-end) | |||
|
213 | ||||
|
214 | changenav = webutil.revnavgen(pos, maxchanges, count, web.repo.changectx) | |||
|
215 | ||||
|
216 | return tmpl(shortlog and 'shortlog' or 'changelog', | |||
|
217 | changenav=changenav, | |||
|
218 | node=hex(ctx.node()), | |||
|
219 | rev=pos, changesets=count, | |||
|
220 | entries=lambda **x: changelist(limit=0,**x), | |||
|
221 | latestentry=lambda **x: changelist(limit=1,**x), | |||
|
222 | archives=web.archivelist("tip")) | |||
80 |
|
223 | |||
81 | def shortlog(web, req, tmpl): |
|
224 | def shortlog(web, req, tmpl): | |
82 | return changelog(web, req, tmpl, shortlog = True) |
|
225 | return changelog(web, req, tmpl, shortlog = True) | |
83 |
|
226 | |||
84 | def changeset(web, req, tmpl): |
|
227 | def changeset(web, req, tmpl): | |
85 | return web.changeset(tmpl, web.changectx(req)) |
|
228 | ctx = webutil.changectx(web.repo, req) | |
|
229 | n = ctx.node() | |||
|
230 | showtags = webutil.showtag(web.repo, tmpl, 'changesettag', n) | |||
|
231 | parents = ctx.parents() | |||
|
232 | p1 = parents[0].node() | |||
|
233 | ||||
|
234 | files = [] | |||
|
235 | parity = paritygen(web.stripecount) | |||
|
236 | for f in ctx.files(): | |||
|
237 | files.append(tmpl("filenodelink", | |||
|
238 | node=hex(n), file=f, | |||
|
239 | parity=parity.next())) | |||
|
240 | ||||
|
241 | diffs = web.diff(tmpl, p1, n, None) | |||
|
242 | return tmpl('changeset', | |||
|
243 | diff=diffs, | |||
|
244 | rev=ctx.rev(), | |||
|
245 | node=hex(n), | |||
|
246 | parent=webutil.siblings(parents), | |||
|
247 | child=webutil.siblings(ctx.children()), | |||
|
248 | changesettag=showtags, | |||
|
249 | author=ctx.user(), | |||
|
250 | desc=ctx.description(), | |||
|
251 | date=ctx.date(), | |||
|
252 | files=files, | |||
|
253 | archives=web.archivelist(hex(n)), | |||
|
254 | tags=webutil.nodetagsdict(web.repo, n), | |||
|
255 | branch=webutil.nodebranchnodefault(ctx), | |||
|
256 | inbranch=webutil.nodeinbranch(web.repo, ctx), | |||
|
257 | branches=webutil.nodebranchdict(web.repo, ctx)) | |||
86 |
|
258 | |||
87 | rev = changeset |
|
259 | rev = changeset | |
88 |
|
260 | |||
89 | def manifest(web, req, tmpl): |
|
261 | def manifest(web, req, tmpl): | |
90 | return web.manifest(tmpl, web.changectx(req), |
|
262 | ctx = webutil.changectx(web.repo, req) | |
91 | web.cleanpath(req.form['path'][0])) |
|
263 | path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0]) | |
|
264 | mf = ctx.manifest() | |||
|
265 | node = ctx.node() | |||
|
266 | ||||
|
267 | files = {} | |||
|
268 | parity = paritygen(web.stripecount) | |||
|
269 | ||||
|
270 | if path and path[-1] != "/": | |||
|
271 | path += "/" | |||
|
272 | l = len(path) | |||
|
273 | abspath = "/" + path | |||
|
274 | ||||
|
275 | for f, n in mf.items(): | |||
|
276 | if f[:l] != path: | |||
|
277 | continue | |||
|
278 | remain = f[l:] | |||
|
279 | if "/" in remain: | |||
|
280 | short = remain[:remain.index("/") + 1] # bleah | |||
|
281 | files[short] = (f, None) | |||
|
282 | else: | |||
|
283 | short = os.path.basename(remain) | |||
|
284 | files[short] = (f, n) | |||
|
285 | ||||
|
286 | if not files: | |||
|
287 | raise ErrorResponse(HTTP_NOT_FOUND, 'path not found: ' + path) | |||
|
288 | ||||
|
289 | def filelist(**map): | |||
|
290 | for f in util.sort(files): | |||
|
291 | full, fnode = files[f] | |||
|
292 | if not fnode: | |||
|
293 | continue | |||
|
294 | ||||
|
295 | fctx = ctx.filectx(full) | |||
|
296 | yield {"file": full, | |||
|
297 | "parity": parity.next(), | |||
|
298 | "basename": f, | |||
|
299 | "date": fctx.date(), | |||
|
300 | "size": fctx.size(), | |||
|
301 | "permissions": mf.flags(full)} | |||
|
302 | ||||
|
303 | def dirlist(**map): | |||
|
304 | for f in util.sort(files): | |||
|
305 | full, fnode = files[f] | |||
|
306 | if fnode: | |||
|
307 | continue | |||
|
308 | ||||
|
309 | yield {"parity": parity.next(), | |||
|
310 | "path": "%s%s" % (abspath, f), | |||
|
311 | "basename": f[:-1]} | |||
|
312 | ||||
|
313 | return tmpl("manifest", | |||
|
314 | rev=ctx.rev(), | |||
|
315 | node=hex(node), | |||
|
316 | path=abspath, | |||
|
317 | up=webutil.up(abspath), | |||
|
318 | upparity=parity.next(), | |||
|
319 | fentries=filelist, | |||
|
320 | dentries=dirlist, | |||
|
321 | archives=web.archivelist(hex(node)), | |||
|
322 | tags=webutil.nodetagsdict(web.repo, node), | |||
|
323 | inbranch=webutil.nodeinbranch(web.repo, ctx), | |||
|
324 | branches=webutil.nodebranchdict(web.repo, ctx)) | |||
92 |
|
325 | |||
93 | def tags(web, req, tmpl): |
|
326 | def tags(web, req, tmpl): | |
94 | return web.tags(tmpl) |
|
327 | i = web.repo.tagslist() | |
|
328 | i.reverse() | |||
|
329 | parity = paritygen(web.stripecount) | |||
|
330 | ||||
|
331 | def entries(notip=False,limit=0, **map): | |||
|
332 | count = 0 | |||
|
333 | for k, n in i: | |||
|
334 | if notip and k == "tip": | |||
|
335 | continue | |||
|
336 | if limit > 0 and count >= limit: | |||
|
337 | continue | |||
|
338 | count = count + 1 | |||
|
339 | yield {"parity": parity.next(), | |||
|
340 | "tag": k, | |||
|
341 | "date": web.repo[n].date(), | |||
|
342 | "node": hex(n)} | |||
|
343 | ||||
|
344 | return tmpl("tags", | |||
|
345 | node=hex(web.repo.changelog.tip()), | |||
|
346 | entries=lambda **x: entries(False,0, **x), | |||
|
347 | entriesnotip=lambda **x: entries(True,0, **x), | |||
|
348 | latestentry=lambda **x: entries(True,1, **x)) | |||
95 |
|
349 | |||
96 | def summary(web, req, tmpl): |
|
350 | def summary(web, req, tmpl): | |
97 | return web.summary(tmpl) |
|
351 | i = web.repo.tagslist() | |
|
352 | i.reverse() | |||
|
353 | ||||
|
354 | def tagentries(**map): | |||
|
355 | parity = paritygen(web.stripecount) | |||
|
356 | count = 0 | |||
|
357 | for k, n in i: | |||
|
358 | if k == "tip": # skip tip | |||
|
359 | continue | |||
|
360 | ||||
|
361 | count += 1 | |||
|
362 | if count > 10: # limit to 10 tags | |||
|
363 | break | |||
|
364 | ||||
|
365 | yield tmpl("tagentry", | |||
|
366 | parity=parity.next(), | |||
|
367 | tag=k, | |||
|
368 | node=hex(n), | |||
|
369 | date=web.repo[n].date()) | |||
|
370 | ||||
|
371 | def branches(**map): | |||
|
372 | parity = paritygen(web.stripecount) | |||
|
373 | ||||
|
374 | b = web.repo.branchtags() | |||
|
375 | l = [(-web.repo.changelog.rev(n), n, t) for t, n in b.items()] | |||
|
376 | for r,n,t in util.sort(l): | |||
|
377 | yield {'parity': parity.next(), | |||
|
378 | 'branch': t, | |||
|
379 | 'node': hex(n), | |||
|
380 | 'date': web.repo[n].date()} | |||
|
381 | ||||
|
382 | def changelist(**map): | |||
|
383 | parity = paritygen(web.stripecount, offset=start-end) | |||
|
384 | l = [] # build a list in forward order for efficiency | |||
|
385 | for i in xrange(start, end): | |||
|
386 | ctx = web.repo[i] | |||
|
387 | n = ctx.node() | |||
|
388 | hn = hex(n) | |||
|
389 | ||||
|
390 | l.insert(0, tmpl( | |||
|
391 | 'shortlogentry', | |||
|
392 | parity=parity.next(), | |||
|
393 | author=ctx.user(), | |||
|
394 | desc=ctx.description(), | |||
|
395 | date=ctx.date(), | |||
|
396 | rev=i, | |||
|
397 | node=hn, | |||
|
398 | tags=webutil.nodetagsdict(web.repo, n), | |||
|
399 | inbranch=webutil.nodeinbranch(web.repo, ctx), | |||
|
400 | branches=webutil.nodebranchdict(web.repo, ctx))) | |||
|
401 | ||||
|
402 | yield l | |||
|
403 | ||||
|
404 | cl = web.repo.changelog | |||
|
405 | count = len(cl) | |||
|
406 | start = max(0, count - web.maxchanges) | |||
|
407 | end = min(count, start + web.maxchanges) | |||
|
408 | ||||
|
409 | return tmpl("summary", | |||
|
410 | desc=web.config("web", "description", "unknown"), | |||
|
411 | owner=get_contact(web.config) or "unknown", | |||
|
412 | lastchange=cl.read(cl.tip())[2], | |||
|
413 | tags=tagentries, | |||
|
414 | branches=branches, | |||
|
415 | shortlog=changelist, | |||
|
416 | node=hex(cl.tip()), | |||
|
417 | archives=web.archivelist("tip")) | |||
98 |
|
418 | |||
99 | def filediff(web, req, tmpl): |
|
419 | def filediff(web, req, tmpl): | |
100 | return web.filediff(tmpl, web.filectx(req)) |
|
420 | fctx = webutil.filectx(web.repo, req) | |
|
421 | n = fctx.node() | |||
|
422 | path = fctx.path() | |||
|
423 | parents = fctx.parents() | |||
|
424 | p1 = parents and parents[0].node() or nullid | |||
|
425 | ||||
|
426 | diffs = web.diff(tmpl, p1, n, [path]) | |||
|
427 | return tmpl("filediff", | |||
|
428 | file=path, | |||
|
429 | node=hex(n), | |||
|
430 | rev=fctx.rev(), | |||
|
431 | date=fctx.date(), | |||
|
432 | desc=fctx.description(), | |||
|
433 | author=fctx.user(), | |||
|
434 | rename=webutil.renamelink(fctx), | |||
|
435 | branch=webutil.nodebranchnodefault(fctx), | |||
|
436 | parent=webutil.siblings(parents), | |||
|
437 | child=webutil.siblings(fctx.children()), | |||
|
438 | diff=diffs) | |||
101 |
|
439 | |||
102 | diff = filediff |
|
440 | diff = filediff | |
103 |
|
441 | |||
104 | def annotate(web, req, tmpl): |
|
442 | def annotate(web, req, tmpl): | |
105 | return web.fileannotate(tmpl, web.filectx(req)) |
|
443 | fctx = webutil.filectx(web.repo, req) | |
|
444 | f = fctx.path() | |||
|
445 | n = fctx.filenode() | |||
|
446 | fl = fctx.filelog() | |||
|
447 | parity = paritygen(web.stripecount) | |||
|
448 | ||||
|
449 | def annotate(**map): | |||
|
450 | last = None | |||
|
451 | if binary(fctx.data()): | |||
|
452 | mt = (mimetypes.guess_type(fctx.path())[0] | |||
|
453 | or 'application/octet-stream') | |||
|
454 | lines = enumerate([((fctx.filectx(fctx.filerev()), 1), | |||
|
455 | '(binary:%s)' % mt)]) | |||
|
456 | else: | |||
|
457 | lines = enumerate(fctx.annotate(follow=True, linenumber=True)) | |||
|
458 | for lineno, ((f, targetline), l) in lines: | |||
|
459 | fnode = f.filenode() | |||
|
460 | ||||
|
461 | if last != fnode: | |||
|
462 | last = fnode | |||
|
463 | ||||
|
464 | yield {"parity": parity.next(), | |||
|
465 | "node": hex(f.node()), | |||
|
466 | "rev": f.rev(), | |||
|
467 | "author": f.user(), | |||
|
468 | "desc": f.description(), | |||
|
469 | "file": f.path(), | |||
|
470 | "targetline": targetline, | |||
|
471 | "line": l, | |||
|
472 | "lineid": "l%d" % (lineno + 1), | |||
|
473 | "linenumber": "% 6d" % (lineno + 1)} | |||
|
474 | ||||
|
475 | return tmpl("fileannotate", | |||
|
476 | file=f, | |||
|
477 | annotate=annotate, | |||
|
478 | path=webutil.up(f), | |||
|
479 | rev=fctx.rev(), | |||
|
480 | node=hex(fctx.node()), | |||
|
481 | author=fctx.user(), | |||
|
482 | date=fctx.date(), | |||
|
483 | desc=fctx.description(), | |||
|
484 | rename=webutil.renamelink(fctx), | |||
|
485 | branch=webutil.nodebranchnodefault(fctx), | |||
|
486 | parent=webutil.siblings(fctx.parents()), | |||
|
487 | child=webutil.siblings(fctx.children()), | |||
|
488 | permissions=fctx.manifest().flags(f)) | |||
106 |
|
489 | |||
107 | def filelog(web, req, tmpl): |
|
490 | def filelog(web, req, tmpl): | |
108 | return web.filelog(tmpl, web.filectx(req)) |
|
491 | fctx = webutil.filectx(web.repo, req) | |
|
492 | f = fctx.path() | |||
|
493 | fl = fctx.filelog() | |||
|
494 | count = len(fl) | |||
|
495 | pagelen = web.maxshortchanges | |||
|
496 | pos = fctx.filerev() | |||
|
497 | start = max(0, pos - pagelen + 1) | |||
|
498 | end = min(count, start + pagelen) | |||
|
499 | pos = end - 1 | |||
|
500 | parity = paritygen(web.stripecount, offset=start-end) | |||
|
501 | ||||
|
502 | def entries(limit=0, **map): | |||
|
503 | l = [] | |||
|
504 | ||||
|
505 | for i in xrange(start, end): | |||
|
506 | ctx = fctx.filectx(i) | |||
|
507 | n = fl.node(i) | |||
|
508 | ||||
|
509 | l.insert(0, {"parity": parity.next(), | |||
|
510 | "filerev": i, | |||
|
511 | "file": f, | |||
|
512 | "node": hex(ctx.node()), | |||
|
513 | "author": ctx.user(), | |||
|
514 | "date": ctx.date(), | |||
|
515 | "rename": webutil.renamelink(fctx), | |||
|
516 | "parent": webutil.siblings(fctx.parents()), | |||
|
517 | "child": webutil.siblings(fctx.children()), | |||
|
518 | "desc": ctx.description()}) | |||
|
519 | ||||
|
520 | if limit > 0: | |||
|
521 | l = l[:limit] | |||
|
522 | ||||
|
523 | for e in l: | |||
|
524 | yield e | |||
|
525 | ||||
|
526 | nodefunc = lambda x: fctx.filectx(fileid=x) | |||
|
527 | nav = webutil.revnavgen(pos, pagelen, count, nodefunc) | |||
|
528 | return tmpl("filelog", file=f, node=hex(fctx.node()), nav=nav, | |||
|
529 | entries=lambda **x: entries(limit=0, **x), | |||
|
530 | latestentry=lambda **x: entries(limit=1, **x)) | |||
|
531 | ||||
109 |
|
532 | |||
110 | def archive(web, req, tmpl): |
|
533 | def archive(web, req, tmpl): | |
111 | type_ = req.form.get('type', [None])[0] |
|
534 | type_ = req.form.get('type', [None])[0] | |
112 | allowed = web.configlist("web", "allow_archive") |
|
535 | allowed = web.configlist("web", "allow_archive") | |
113 | if (type_ in web.archives and (type_ in allowed or |
|
536 | key = req.form['node'][0] | |
|
537 | ||||
|
538 | if not (type_ in web.archives and (type_ in allowed or | |||
114 | web.configbool("web", "allow" + type_, False))): |
|
539 | web.configbool("web", "allow" + type_, False))): | |
115 | web.archive(tmpl, req, req.form['node'][0], type_) |
|
540 | msg = 'Unsupported archive type: %s' % type_ | |
|
541 | raise ErrorResponse(HTTP_NOT_FOUND, msg) | |||
|
542 | ||||
|
543 | reponame = re.sub(r"\W+", "-", os.path.basename(web.reponame)) | |||
|
544 | cnode = web.repo.lookup(key) | |||
|
545 | arch_version = key | |||
|
546 | if cnode == key or key == 'tip': | |||
|
547 | arch_version = short(cnode) | |||
|
548 | name = "%s-%s" % (reponame, arch_version) | |||
|
549 | mimetype, artype, extension, encoding = web.archive_specs[type_] | |||
|
550 | headers = [ | |||
|
551 | ('Content-Type', mimetype), | |||
|
552 | ('Content-Disposition', 'attachment; filename=%s%s' % (name, extension)) | |||
|
553 | ] | |||
|
554 | if encoding: | |||
|
555 | headers.append(('Content-Encoding', encoding)) | |||
|
556 | req.header(headers) | |||
|
557 | req.respond(HTTP_OK) | |||
|
558 | archival.archive(web.repo, req, cnode, artype, prefix=name) | |||
116 |
|
|
559 | return [] | |
117 | raise ErrorResponse(HTTP_NOT_FOUND, 'unsupported archive type: %s' % type_) |
|
560 | ||
118 |
|
561 | |||
119 | def static(web, req, tmpl): |
|
562 | def static(web, req, tmpl): | |
120 | fname = req.form['file'][0] |
|
563 | fname = req.form['file'][0] | |
@@ -124,3 +567,39 def static(web, req, tmpl): | |||||
124 | os.path.join(web.templatepath, "static"), |
|
567 | os.path.join(web.templatepath, "static"), | |
125 | untrusted=False) |
|
568 | untrusted=False) | |
126 | return [staticfile(static, fname, req)] |
|
569 | return [staticfile(static, fname, req)] | |
|
570 | ||||
|
571 | def graph(web, req, tmpl): | |||
|
572 | rev = webutil.changectx(web.repo, req).rev() | |||
|
573 | bg_height = 39 | |||
|
574 | ||||
|
575 | max_rev = len(web.repo) - 1 | |||
|
576 | revcount = min(max_rev, int(req.form.get('revcount', [25])[0])) | |||
|
577 | revnode = web.repo.changelog.node(rev) | |||
|
578 | revnode_hex = hex(revnode) | |||
|
579 | uprev = min(max_rev, rev + revcount) | |||
|
580 | downrev = max(0, rev - revcount) | |||
|
581 | lessrev = max(0, rev - revcount / 2) | |||
|
582 | ||||
|
583 | maxchanges = web.maxshortchanges or web.maxchanges | |||
|
584 | count = len(web.repo) | |||
|
585 | changenav = webutil.revnavgen(rev, maxchanges, count, web.repo.changectx) | |||
|
586 | ||||
|
587 | tree = list(graphmod.graph(web.repo, rev, rev - revcount)) | |||
|
588 | canvasheight = (len(tree) + 1) * bg_height - 27; | |||
|
589 | ||||
|
590 | data = [] | |||
|
591 | for i, (ctx, vtx, edges) in enumerate(tree): | |||
|
592 | node = short(ctx.node()) | |||
|
593 | age = templatefilters.age(ctx.date()) | |||
|
594 | desc = templatefilters.firstline(ctx.description()) | |||
|
595 | desc = cgi.escape(desc) | |||
|
596 | user = cgi.escape(templatefilters.person(ctx.user())) | |||
|
597 | branch = ctx.branch() | |||
|
598 | branch = branch, web.repo.branchtags().get(branch) == ctx.node() | |||
|
599 | data.append((node, vtx, edges, desc, user, age, branch, ctx.tags())) | |||
|
600 | ||||
|
601 | return tmpl('graph', rev=rev, revcount=revcount, uprev=uprev, | |||
|
602 | lessrev=lessrev, revcountmore=revcount and 2 * revcount or 1, | |||
|
603 | revcountless=revcount / 2, downrev=downrev, | |||
|
604 | canvasheight=canvasheight, bg_height=bg_height, | |||
|
605 | jsdata=data, node=revnode_hex, changenav=changenav) |
@@ -96,10 +96,9 def hook(ui, repo, name, throw=False, ** | |||||
96 | oldstdout = os.dup(sys.__stdout__.fileno()) |
|
96 | oldstdout = os.dup(sys.__stdout__.fileno()) | |
97 | os.dup2(sys.__stderr__.fileno(), sys.__stdout__.fileno()) |
|
97 | os.dup2(sys.__stderr__.fileno(), sys.__stdout__.fileno()) | |
98 |
|
98 | |||
99 |
|
|
99 | for hname, cmd in util.sort(ui.configitems('hooks')): | |
100 |
|
|
100 | if hname.split('.')[0] != name or not cmd: | |
101 | hooks.sort() |
|
101 | continue | |
102 | for hname, cmd in hooks: |
|
|||
103 | if callable(cmd): |
|
102 | if callable(cmd): | |
104 | r = _pythonhook(ui, repo, name, hname, cmd, args, throw) or r |
|
103 | r = _pythonhook(ui, repo, name, hname, cmd, args, throw) or r | |
105 | elif cmd.startswith('python:'): |
|
104 | elif cmd.startswith('python:'): |
@@ -268,6 +268,7 class httprepository(repo.repository): | |||||
268 |
|
268 | |||
269 | # 1.0 here is the _protocol_ version |
|
269 | # 1.0 here is the _protocol_ version | |
270 | opener.addheaders = [('User-agent', 'mercurial/proto-1.0')] |
|
270 | opener.addheaders = [('User-agent', 'mercurial/proto-1.0')] | |
|
271 | opener.addheaders.append(('Accept', 'application/mercurial-0.1')) | |||
271 | urllib2.install_opener(opener) |
|
272 | urllib2.install_opener(opener) | |
272 |
|
273 | |||
273 | def url(self): |
|
274 | def url(self): |
@@ -19,6 +19,8 | |||||
19 |
|
19 | |||
20 | # Modified by Benoit Boissinot: |
|
20 | # Modified by Benoit Boissinot: | |
21 | # - fix for digest auth (inspired from urllib2.py @ Python v2.4) |
|
21 | # - fix for digest auth (inspired from urllib2.py @ Python v2.4) | |
|
22 | # Modified by Dirkjan Ochtman: | |||
|
23 | # - import md5 function from a local util module | |||
22 |
|
24 | |||
23 | """An HTTP handler for urllib2 that supports HTTP 1.1 and keepalive. |
|
25 | """An HTTP handler for urllib2 that supports HTTP 1.1 and keepalive. | |
24 |
|
26 | |||
@@ -450,7 +452,7 def error_handler(url): | |||||
450 | keepalive_handler.close_all() |
|
452 | keepalive_handler.close_all() | |
451 |
|
453 | |||
452 | def continuity(url): |
|
454 | def continuity(url): | |
453 | import md5 |
|
455 | from util import md5 | |
454 | format = '%25s: %s' |
|
456 | format = '%25s: %s' | |
455 |
|
457 | |||
456 | # first fetch the file with the normal http handler |
|
458 | # first fetch the file with the normal http handler |
@@ -9,8 +9,9 from node import bin, hex, nullid, nullr | |||||
9 | from i18n import _ |
|
9 | from i18n import _ | |
10 | import repo, changegroup |
|
10 | import repo, changegroup | |
11 | import changelog, dirstate, filelog, manifest, context, weakref |
|
11 | import changelog, dirstate, filelog, manifest, context, weakref | |
12 | import lock, transaction, stat, errno, ui |
|
12 | import lock, transaction, stat, errno, ui, store | |
13 | import os, revlog, time, util, extensions, hook, inspect |
|
13 | import os, revlog, time, util, extensions, hook, inspect | |
|
14 | import match as match_ | |||
14 |
|
15 | |||
15 | class localrepository(repo.repository): |
|
16 | class localrepository(repo.repository): | |
16 | capabilities = util.set(('lookup', 'changegroupsubset')) |
|
17 | capabilities = util.set(('lookup', 'changegroupsubset')) | |
@@ -59,30 +60,13 class localrepository(repo.repository): | |||||
59 | if r not in self.supported: |
|
60 | if r not in self.supported: | |
60 | raise repo.RepoError(_("requirement '%s' not supported") % r) |
|
61 | raise repo.RepoError(_("requirement '%s' not supported") % r) | |
61 |
|
62 | |||
62 | # setup store |
|
63 | self.store = store.store(requirements, self.path) | |
63 | if "store" in requirements: |
|
|||
64 | self.encodefn = util.encodefilename |
|
|||
65 | self.decodefn = util.decodefilename |
|
|||
66 | self.spath = os.path.join(self.path, "store") |
|
|||
67 | else: |
|
|||
68 | self.encodefn = lambda x: x |
|
|||
69 | self.decodefn = lambda x: x |
|
|||
70 | self.spath = self.path |
|
|||
71 |
|
64 | |||
72 | try: |
|
65 | self.spath = self.store.path | |
73 | # files in .hg/ will be created using this mode |
|
66 | self.sopener = self.store.opener | |
74 | mode = os.stat(self.spath).st_mode |
|
67 | self.sjoin = self.store.join | |
75 | # avoid some useless chmods |
|
68 | self._createmode = self.store.createmode | |
76 | if (0777 & ~util._umask) == (0777 & mode): |
|
69 | self.opener.createmode = self.store.createmode | |
77 | mode = None |
|
|||
78 | except OSError: |
|
|||
79 | mode = None |
|
|||
80 |
|
||||
81 | self._createmode = mode |
|
|||
82 | self.opener.createmode = mode |
|
|||
83 | sopener = util.opener(self.spath) |
|
|||
84 | sopener.createmode = mode |
|
|||
85 | self.sopener = util.encodedopener(sopener, self.encodefn) |
|
|||
86 |
|
70 | |||
87 | self.ui = ui.ui(parentui=parentui) |
|
71 | self.ui = ui.ui(parentui=parentui) | |
88 | try: |
|
72 | try: | |
@@ -116,6 +100,21 class localrepository(repo.repository): | |||||
116 | else: |
|
100 | else: | |
117 | raise AttributeError, name |
|
101 | raise AttributeError, name | |
118 |
|
102 | |||
|
103 | def __getitem__(self, changeid): | |||
|
104 | if changeid == None: | |||
|
105 | return context.workingctx(self) | |||
|
106 | return context.changectx(self, changeid) | |||
|
107 | ||||
|
108 | def __nonzero__(self): | |||
|
109 | return True | |||
|
110 | ||||
|
111 | def __len__(self): | |||
|
112 | return len(self.changelog) | |||
|
113 | ||||
|
114 | def __iter__(self): | |||
|
115 | for i in xrange(len(self)): | |||
|
116 | yield i | |||
|
117 | ||||
119 | def url(self): |
|
118 | def url(self): | |
120 | return 'file:' + self.root |
|
119 | return 'file:' + self.root | |
121 |
|
120 | |||
@@ -146,7 +145,11 class localrepository(repo.repository): | |||||
146 | if prevtags and prevtags[-1] != '\n': |
|
145 | if prevtags and prevtags[-1] != '\n': | |
147 | fp.write('\n') |
|
146 | fp.write('\n') | |
148 | for name in names: |
|
147 | for name in names: | |
149 |
|
|
148 | m = munge and munge(name) or name | |
|
149 | if self._tagstypecache and name in self._tagstypecache: | |||
|
150 | old = self.tagscache.get(name, nullid) | |||
|
151 | fp.write('%s %s\n' % (hex(old), m)) | |||
|
152 | fp.write('%s %s\n' % (hex(node), m)) | |||
150 | fp.close() |
|
153 | fp.close() | |
151 |
|
154 | |||
152 | prevtags = '' |
|
155 | prevtags = '' | |
@@ -304,7 +307,6 class localrepository(repo.repository): | |||||
304 | self.tagscache[k] = n |
|
307 | self.tagscache[k] = n | |
305 |
|
|
308 | self._tagstypecache[k] = tagtypes[k] | |
306 | self.tagscache['tip'] = self.changelog.tip() |
|
309 | self.tagscache['tip'] = self.changelog.tip() | |
307 |
|
||||
308 | return self.tagscache |
|
310 | return self.tagscache | |
309 |
|
311 | |||
310 | def tagtype(self, tagname): |
|
312 | def tagtype(self, tagname): | |
@@ -326,7 +328,7 class localrepository(repo.repository): | |||||
326 | last = {} |
|
328 | last = {} | |
327 | ret = [] |
|
329 | ret = [] | |
328 | for node in heads: |
|
330 | for node in heads: | |
329 |
c = self |
|
331 | c = self[node] | |
330 | rev = c.rev() |
|
332 | rev = c.rev() | |
331 | try: |
|
333 | try: | |
332 | fnode = c.filenode('.hgtags') |
|
334 | fnode = c.filenode('.hgtags') | |
@@ -347,8 +349,7 class localrepository(repo.repository): | |||||
347 | except: |
|
349 | except: | |
348 | r = -2 # sort to the beginning of the list if unknown |
|
350 | r = -2 # sort to the beginning of the list if unknown | |
349 | l.append((r, t, n)) |
|
351 | l.append((r, t, n)) | |
350 | l.sort() |
|
352 | return [(t, n) for r, t, n in util.sort(l)] | |
351 | return [(t, n) for r, t, n in l] |
|
|||
352 |
|
353 | |||
353 | def nodetags(self, node): |
|
354 | def nodetags(self, node): | |
354 | '''return the tags associated with a node''' |
|
355 | '''return the tags associated with a node''' | |
@@ -359,7 +360,7 class localrepository(repo.repository): | |||||
359 | return self.nodetagscache.get(node, []) |
|
360 | return self.nodetagscache.get(node, []) | |
360 |
|
361 | |||
361 | def _branchtags(self, partial, lrev): |
|
362 | def _branchtags(self, partial, lrev): | |
362 |
tiprev = self |
|
363 | tiprev = len(self) - 1 | |
363 | if lrev != tiprev: |
|
364 | if lrev != tiprev: | |
364 | self._updatebranchcache(partial, lrev+1, tiprev+1) |
|
365 | self._updatebranchcache(partial, lrev+1, tiprev+1) | |
365 | self._writebranchcache(partial, self.changelog.tip(), tiprev) |
|
366 | self._writebranchcache(partial, self.changelog.tip(), tiprev) | |
@@ -404,8 +405,7 class localrepository(repo.repository): | |||||
404 | try: |
|
405 | try: | |
405 | last, lrev = lines.pop(0).split(" ", 1) |
|
406 | last, lrev = lines.pop(0).split(" ", 1) | |
406 | last, lrev = bin(last), int(lrev) |
|
407 | last, lrev = bin(last), int(lrev) | |
407 | if not (lrev < self.changelog.count() and |
|
408 | if lrev >= len(self) or self[lrev].node() != last: | |
408 | self.changelog.node(lrev) == last): # sanity check |
|
|||
409 | # invalidate the cache |
|
409 | # invalidate the cache | |
410 | raise ValueError('invalidating branch cache (tip differs)') |
|
410 | raise ValueError('invalidating branch cache (tip differs)') | |
411 | for l in lines: |
|
411 | for l in lines: | |
@@ -432,18 +432,13 class localrepository(repo.repository): | |||||
432 |
|
432 | |||
433 | def _updatebranchcache(self, partial, start, end): |
|
433 | def _updatebranchcache(self, partial, start, end): | |
434 | for r in xrange(start, end): |
|
434 | for r in xrange(start, end): | |
435 |
c = self |
|
435 | c = self[r] | |
436 | b = c.branch() |
|
436 | b = c.branch() | |
437 | partial[b] = c.node() |
|
437 | partial[b] = c.node() | |
438 |
|
438 | |||
439 | def lookup(self, key): |
|
439 | def lookup(self, key): | |
440 | if key == '.': |
|
440 | if key == '.': | |
441 |
|
|
441 | return self.dirstate.parents()[0] | |
442 | if key == nullid: |
|
|||
443 | raise repo.RepoError(_("no revision checked out")) |
|
|||
444 | if second != nullid: |
|
|||
445 | self.ui.warn(_("warning: working directory has two parents, " |
|
|||
446 | "tag '.' uses the first\n")) |
|
|||
447 | elif key == 'null': |
|
442 | elif key == 'null': | |
448 | return nullid |
|
443 | return nullid | |
449 | n = self.changelog._match(key) |
|
444 | n = self.changelog._match(key) | |
@@ -469,36 +464,23 class localrepository(repo.repository): | |||||
469 | def join(self, f): |
|
464 | def join(self, f): | |
470 | return os.path.join(self.path, f) |
|
465 | return os.path.join(self.path, f) | |
471 |
|
466 | |||
472 | def sjoin(self, f): |
|
|||
473 | f = self.encodefn(f) |
|
|||
474 | return os.path.join(self.spath, f) |
|
|||
475 |
|
||||
476 | def wjoin(self, f): |
|
467 | def wjoin(self, f): | |
477 | return os.path.join(self.root, f) |
|
468 | return os.path.join(self.root, f) | |
478 |
|
469 | |||
|
470 | def rjoin(self, f): | |||
|
471 | return os.path.join(self.root, util.pconvert(f)) | |||
|
472 | ||||
479 | def file(self, f): |
|
473 | def file(self, f): | |
480 | if f[0] == '/': |
|
474 | if f[0] == '/': | |
481 | f = f[1:] |
|
475 | f = f[1:] | |
482 | return filelog.filelog(self.sopener, f) |
|
476 | return filelog.filelog(self.sopener, f) | |
483 |
|
477 | |||
484 |
def changectx(self, changeid |
|
478 | def changectx(self, changeid): | |
485 |
return |
|
479 | return self[changeid] | |
486 |
|
||||
487 | def workingctx(self): |
|
|||
488 | return context.workingctx(self) |
|
|||
489 |
|
480 | |||
490 | def parents(self, changeid=None): |
|
481 | def parents(self, changeid=None): | |
491 | ''' |
|
482 | '''get list of changectxs for parents of changeid''' | |
492 | get list of changectxs for parents of changeid or working directory |
|
483 | return self[changeid].parents() | |
493 | ''' |
|
|||
494 | if changeid is None: |
|
|||
495 | pl = self.dirstate.parents() |
|
|||
496 | else: |
|
|||
497 | n = self.changelog.lookup(changeid) |
|
|||
498 | pl = self.changelog.parents(n) |
|
|||
499 | if pl[1] == nullid: |
|
|||
500 | return [self.changectx(pl[0])] |
|
|||
501 | return [self.changectx(pl[0]), self.changectx(pl[1])] |
|
|||
502 |
|
484 | |||
503 | def filectx(self, path, changeid=None, fileid=None): |
|
485 | def filectx(self, path, changeid=None, fileid=None): | |
504 | """changeid can be a changeset revision, node, or tag. |
|
486 | """changeid can be a changeset revision, node, or tag. | |
@@ -676,19 +658,20 class localrepository(repo.repository): | |||||
676 | self._wlockref = weakref.ref(l) |
|
658 | self._wlockref = weakref.ref(l) | |
677 | return l |
|
659 | return l | |
678 |
|
660 | |||
679 |
def filecommit(self, f |
|
661 | def filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist): | |
680 | """ |
|
662 | """ | |
681 | commit an individual file as part of a larger transaction |
|
663 | commit an individual file as part of a larger transaction | |
682 | """ |
|
664 | """ | |
683 |
|
665 | |||
684 |
|
|
666 | fn = fctx.path() | |
|
667 | t = fctx.data() | |||
685 | fl = self.file(fn) |
|
668 | fl = self.file(fn) | |
686 | fp1 = manifest1.get(fn, nullid) |
|
669 | fp1 = manifest1.get(fn, nullid) | |
687 | fp2 = manifest2.get(fn, nullid) |
|
670 | fp2 = manifest2.get(fn, nullid) | |
688 |
|
671 | |||
689 | meta = {} |
|
672 | meta = {} | |
690 | cf = self.dirstate.copied(fn) |
|
673 | cp = fctx.renamed() | |
691 |
if c |
|
674 | if cp and cp[0] != fn: | |
692 | # Mark the new revision of this file as a copy of another |
|
675 | # Mark the new revision of this file as a copy of another | |
693 | # file. This copy data will effectively act as a parent |
|
676 | # file. This copy data will effectively act as a parent | |
694 | # of this new revision. If this is a merge, the first |
|
677 | # of this new revision. If this is a merge, the first | |
@@ -708,6 +691,7 class localrepository(repo.repository): | |||||
708 | # \- 2 --- 4 as the merge base |
|
691 | # \- 2 --- 4 as the merge base | |
709 | # |
|
692 | # | |
710 |
|
693 | |||
|
694 | cf = cp[0] | |||
711 | cr = manifest1.get(cf) |
|
695 | cr = manifest1.get(cf) | |
712 | nfp = fp2 |
|
696 | nfp = fp2 | |
713 |
|
697 | |||
@@ -721,18 +705,9 class localrepository(repo.repository): | |||||
721 | if not cr: |
|
705 | if not cr: | |
722 | self.ui.debug(_(" %s: searching for copy revision for %s\n") % |
|
706 | self.ui.debug(_(" %s: searching for copy revision for %s\n") % | |
723 | (fn, cf)) |
|
707 | (fn, cf)) | |
724 | p1 = self.dirstate.parents()[0] |
|
708 | for a in self['.'].ancestors(): | |
725 | rev = self.changelog.rev(p1) |
|
709 | if cf in a: | |
726 | seen = {-1:None} |
|
710 | cr = a[cf].filenode() | |
727 | visit = [rev] |
|
|||
728 | while visit: |
|
|||
729 | for p in self.changelog.parentrevs(visit.pop(0)): |
|
|||
730 | if p not in seen: |
|
|||
731 | seen[p] = True |
|
|||
732 | visit.append(p) |
|
|||
733 | ctx = self.changectx(p) |
|
|||
734 | if cf in ctx: |
|
|||
735 | cr = ctx[cf].filenode() |
|
|||
736 |
|
|
711 | break | |
737 |
|
712 | |||
738 | self.ui.debug(_(" %s: copy %s:%s\n") % (fn, cf, hex(cr))) |
|
713 | self.ui.debug(_(" %s: copy %s:%s\n") % (fn, cf, hex(cr))) | |
@@ -761,66 +736,80 class localrepository(repo.repository): | |||||
761 | p1=p1, p2=p2, extra=extra, empty_ok=True) |
|
736 | p1=p1, p2=p2, extra=extra, empty_ok=True) | |
762 |
|
737 | |||
763 | def commit(self, files=None, text="", user=None, date=None, |
|
738 | def commit(self, files=None, text="", user=None, date=None, | |
764 |
match= |
|
739 | match=None, force=False, force_editor=False, | |
765 | p1=None, p2=None, extra={}, empty_ok=False): |
|
740 | p1=None, p2=None, extra={}, empty_ok=False): | |
766 |
wlock = lock = |
|
741 | wlock = lock = None | |
767 | valid = 0 # don't save the dirstate if this isn't set |
|
|||
768 | if files: |
|
742 | if files: | |
769 | files = util.unique(files) |
|
743 | files = util.unique(files) | |
770 | try: |
|
744 | try: | |
771 | wlock = self.wlock() |
|
745 | wlock = self.wlock() | |
772 | lock = self.lock() |
|
746 | lock = self.lock() | |
773 | commit = [] |
|
|||
774 | remove = [] |
|
|||
775 | changed = [] |
|
|||
776 | use_dirstate = (p1 is None) # not rawcommit |
|
747 | use_dirstate = (p1 is None) # not rawcommit | |
777 | extra = extra.copy() |
|
|||
778 |
|
||||
779 | if use_dirstate: |
|
|||
780 | if files: |
|
|||
781 | for f in files: |
|
|||
782 | s = self.dirstate[f] |
|
|||
783 | if s in 'nma': |
|
|||
784 | commit.append(f) |
|
|||
785 | elif s == 'r': |
|
|||
786 | remove.append(f) |
|
|||
787 | else: |
|
|||
788 | self.ui.warn(_("%s not tracked!\n") % f) |
|
|||
789 | else: |
|
|||
790 | changes = self.status(match=match)[:5] |
|
|||
791 | modified, added, removed, deleted, unknown = changes |
|
|||
792 | commit = modified + added |
|
|||
793 | remove = removed |
|
|||
794 | else: |
|
|||
795 | commit = files |
|
|||
796 |
|
748 | |||
797 | if use_dirstate: |
|
749 | if use_dirstate: | |
798 | p1, p2 = self.dirstate.parents() |
|
750 | p1, p2 = self.dirstate.parents() | |
799 | update_dirstate = True |
|
751 | update_dirstate = True | |
800 |
|
752 | |||
801 | if (not force and p2 != nullid and |
|
753 | if (not force and p2 != nullid and | |
802 |
(files or match |
|
754 | (match and (match.files() or match.anypats()))): | |
803 | raise util.Abort(_('cannot partially commit a merge ' |
|
755 | raise util.Abort(_('cannot partially commit a merge ' | |
804 | '(do not specify files or patterns)')) |
|
756 | '(do not specify files or patterns)')) | |
|
757 | ||||
|
758 | if files: | |||
|
759 | modified, removed = [], [] | |||
|
760 | for f in files: | |||
|
761 | s = self.dirstate[f] | |||
|
762 | if s in 'nma': | |||
|
763 | modified.append(f) | |||
|
764 | elif s == 'r': | |||
|
765 | removed.append(f) | |||
|
766 | else: | |||
|
767 | self.ui.warn(_("%s not tracked!\n") % f) | |||
|
768 | changes = [modified, [], removed, [], []] | |||
|
769 | else: | |||
|
770 | changes = self.status(match=match) | |||
805 | else: |
|
771 | else: | |
806 | p1, p2 = p1, p2 or nullid |
|
772 | p1, p2 = p1, p2 or nullid | |
807 | update_dirstate = (self.dirstate.parents()[0] == p1) |
|
773 | update_dirstate = (self.dirstate.parents()[0] == p1) | |
|
774 | changes = [files, [], [], [], []] | |||
808 |
|
775 | |||
|
776 | wctx = context.workingctx(self, (p1, p2), text, user, date, | |||
|
777 | extra, changes) | |||
|
778 | return self._commitctx(wctx, force, force_editor, empty_ok, | |||
|
779 | use_dirstate, update_dirstate) | |||
|
780 | finally: | |||
|
781 | del lock, wlock | |||
|
782 | ||||
|
783 | def commitctx(self, ctx): | |||
|
784 | wlock = lock = None | |||
|
785 | try: | |||
|
786 | wlock = self.wlock() | |||
|
787 | lock = self.lock() | |||
|
788 | return self._commitctx(ctx, force=True, force_editor=False, | |||
|
789 | empty_ok=True, use_dirstate=False, | |||
|
790 | update_dirstate=False) | |||
|
791 | finally: | |||
|
792 | del lock, wlock | |||
|
793 | ||||
|
794 | def _commitctx(self, wctx, force=False, force_editor=False, empty_ok=False, | |||
|
795 | use_dirstate=True, update_dirstate=True): | |||
|
796 | tr = None | |||
|
797 | valid = 0 # don't save the dirstate if this isn't set | |||
|
798 | try: | |||
|
799 | commit = util.sort(wctx.modified() + wctx.added()) | |||
|
800 | remove = wctx.removed() | |||
|
801 | extra = wctx.extra().copy() | |||
|
802 | branchname = extra['branch'] | |||
|
803 | user = wctx.user() | |||
|
804 | text = wctx.description() | |||
|
805 | ||||
|
806 | p1, p2 = [p.node() for p in wctx.parents()] | |||
809 | c1 = self.changelog.read(p1) |
|
807 | c1 = self.changelog.read(p1) | |
810 | c2 = self.changelog.read(p2) |
|
808 | c2 = self.changelog.read(p2) | |
811 | m1 = self.manifest.read(c1[0]).copy() |
|
809 | m1 = self.manifest.read(c1[0]).copy() | |
812 | m2 = self.manifest.read(c2[0]) |
|
810 | m2 = self.manifest.read(c2[0]) | |
813 |
|
811 | |||
814 | if use_dirstate: |
|
812 | if use_dirstate: | |
815 | branchname = self.workingctx().branch() |
|
|||
816 | try: |
|
|||
817 | branchname = branchname.decode('UTF-8').encode('UTF-8') |
|
|||
818 | except UnicodeDecodeError: |
|
|||
819 | raise util.Abort(_('branch name not in UTF-8!')) |
|
|||
820 | else: |
|
|||
821 | branchname = "" |
|
|||
822 |
|
||||
823 | if use_dirstate: |
|
|||
824 | oldname = c1[5].get("branch") # stored in UTF-8 |
|
813 | oldname = c1[5].get("branch") # stored in UTF-8 | |
825 | if (not commit and not remove and not force and p2 == nullid |
|
814 | if (not commit and not remove and not force and p2 == nullid | |
826 | and branchname == oldname): |
|
815 | and branchname == oldname): | |
@@ -838,26 +827,22 class localrepository(repo.repository): | |||||
838 |
|
827 | |||
839 | # check in files |
|
828 | # check in files | |
840 | new = {} |
|
829 | new = {} | |
841 | linkrev = self.changelog.count() |
|
830 | changed = [] | |
842 | commit.sort() |
|
831 | linkrev = len(self) | |
843 | is_exec = util.execfunc(self.root, m1.execf) |
|
|||
844 | is_link = util.linkfunc(self.root, m1.linkf) |
|
|||
845 | for f in commit: |
|
832 | for f in commit: | |
846 | self.ui.note(f + "\n") |
|
833 | self.ui.note(f + "\n") | |
847 | try: |
|
834 | try: | |
848 | new[f] = self.filecommit(f, m1, m2, linkrev, trp, changed) |
|
835 | fctx = wctx.filectx(f) | |
849 |
new |
|
836 | newflags = fctx.flags() | |
850 | new_link = is_link(f) |
|
837 | new[f] = self.filecommit(fctx, m1, m2, linkrev, trp, changed) | |
851 | if ((not changed or changed[-1] != f) and |
|
838 | if ((not changed or changed[-1] != f) and | |
852 | m2.get(f) != new[f]): |
|
839 | m2.get(f) != new[f]): | |
853 | # mention the file in the changelog if some |
|
840 | # mention the file in the changelog if some | |
854 | # flag changed, even if there was no content |
|
841 | # flag changed, even if there was no content | |
855 | # change. |
|
842 | # change. | |
856 |
|
|
843 | if m1.flags(f) != newflags: | |
857 | old_link = m1.linkf(f) |
|
|||
858 | if old_exec != new_exec or old_link != new_link: |
|
|||
859 | changed.append(f) |
|
844 | changed.append(f) | |
860 |
m1.set(f, new |
|
845 | m1.set(f, newflags) | |
861 | if use_dirstate: |
|
846 | if use_dirstate: | |
862 | self.dirstate.normal(f) |
|
847 | self.dirstate.normal(f) | |
863 |
|
848 | |||
@@ -870,10 +855,9 class localrepository(repo.repository): | |||||
870 |
|
855 | |||
871 | # update manifest |
|
856 | # update manifest | |
872 | m1.update(new) |
|
857 | m1.update(new) | |
873 | remove.sort() |
|
|||
874 | removed = [] |
|
858 | removed = [] | |
875 |
|
859 | |||
876 | for f in remove: |
|
860 | for f in util.sort(remove): | |
877 | if f in m1: |
|
861 | if f in m1: | |
878 | del m1[f] |
|
862 | del m1[f] | |
879 | removed.append(f) |
|
863 | removed.append(f) | |
@@ -883,10 +867,6 class localrepository(repo.repository): | |||||
883 | (new, removed)) |
|
867 | (new, removed)) | |
884 |
|
868 | |||
885 | # add changeset |
|
869 | # add changeset | |
886 | new = new.keys() |
|
|||
887 | new.sort() |
|
|||
888 |
|
||||
889 | user = user or self.ui.username() |
|
|||
890 | if (not empty_ok and not text) or force_editor: |
|
870 | if (not empty_ok and not text) or force_editor: | |
891 | edittext = [] |
|
871 | edittext = [] | |
892 | if text: |
|
872 | if text: | |
@@ -911,9 +891,6 class localrepository(repo.repository): | |||||
911 | text = self.ui.edit("\n".join(edittext), user) |
|
891 | text = self.ui.edit("\n".join(edittext), user) | |
912 | os.chdir(olddir) |
|
892 | os.chdir(olddir) | |
913 |
|
893 | |||
914 | if branchname: |
|
|||
915 | extra["branch"] = branchname |
|
|||
916 |
|
||||
917 | lines = [line.rstrip() for line in text.rstrip().splitlines()] |
|
894 | lines = [line.rstrip() for line in text.rstrip().splitlines()] | |
918 | while lines and not lines[0]: |
|
895 | while lines and not lines[0]: | |
919 | del lines[0] |
|
896 | del lines[0] | |
@@ -922,7 +899,7 class localrepository(repo.repository): | |||||
922 | text = '\n'.join(lines) |
|
899 | text = '\n'.join(lines) | |
923 |
|
900 | |||
924 | n = self.changelog.add(mn, changed + removed, text, trp, p1, p2, |
|
901 | n = self.changelog.add(mn, changed + removed, text, trp, p1, p2, | |
925 | user, date, extra) |
|
902 | user, wctx.date(), extra) | |
926 | self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1, |
|
903 | self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1, | |
927 | parent2=xp2) |
|
904 | parent2=xp2) | |
928 | tr.close() |
|
905 | tr.close() | |
@@ -942,110 +919,55 class localrepository(repo.repository): | |||||
942 | finally: |
|
919 | finally: | |
943 | if not valid: # don't save our updated dirstate |
|
920 | if not valid: # don't save our updated dirstate | |
944 | self.dirstate.invalidate() |
|
921 | self.dirstate.invalidate() | |
945 |
del tr |
|
922 | del tr | |
946 |
|
923 | |||
947 | def walk(self, node=None, files=[], match=util.always, badmatch=None): |
|
924 | def walk(self, match, node=None): | |
948 | ''' |
|
925 | ''' | |
949 | walk recursively through the directory tree or a given |
|
926 | walk recursively through the directory tree or a given | |
950 | changeset, finding all files matched by the match |
|
927 | changeset, finding all files matched by the match | |
951 | function |
|
928 | function | |
952 |
|
||||
953 | results are yielded in a tuple (src, filename), where src |
|
|||
954 | is one of: |
|
|||
955 | 'f' the file was found in the directory tree |
|
|||
956 | 'm' the file was only in the dirstate and not in the tree |
|
|||
957 | 'b' file was not found and matched badmatch |
|
|||
958 | ''' |
|
929 | ''' | |
|
930 | return self[node].walk(match) | |||
959 |
|
931 | |||
960 | if node: |
|
932 | def status(self, node1='.', node2=None, match=None, | |
961 | fdict = dict.fromkeys(files) |
|
933 | ignored=False, clean=False, unknown=False): | |
962 | # for dirstate.walk, files=['.'] means "walk the whole tree". |
|
|||
963 | # follow that here, too |
|
|||
964 | fdict.pop('.', None) |
|
|||
965 | mdict = self.manifest.read(self.changelog.read(node)[0]) |
|
|||
966 | mfiles = mdict.keys() |
|
|||
967 | mfiles.sort() |
|
|||
968 | for fn in mfiles: |
|
|||
969 | for ffn in fdict: |
|
|||
970 | # match if the file is the exact name or a directory |
|
|||
971 | if ffn == fn or fn.startswith("%s/" % ffn): |
|
|||
972 | del fdict[ffn] |
|
|||
973 | break |
|
|||
974 | if match(fn): |
|
|||
975 | yield 'm', fn |
|
|||
976 | ffiles = fdict.keys() |
|
|||
977 | ffiles.sort() |
|
|||
978 | for fn in ffiles: |
|
|||
979 | if badmatch and badmatch(fn): |
|
|||
980 | if match(fn): |
|
|||
981 | yield 'b', fn |
|
|||
982 | else: |
|
|||
983 | self.ui.warn(_('%s: No such file in rev %s\n') |
|
|||
984 | % (self.pathto(fn), short(node))) |
|
|||
985 | else: |
|
|||
986 | for src, fn in self.dirstate.walk(files, match, badmatch=badmatch): |
|
|||
987 | yield src, fn |
|
|||
988 |
|
||||
989 | def status(self, node1=None, node2=None, files=[], match=util.always, |
|
|||
990 | list_ignored=False, list_clean=False, list_unknown=True): |
|
|||
991 | """return status of files between two nodes or node and working directory |
|
934 | """return status of files between two nodes or node and working directory | |
992 |
|
935 | |||
993 | If node1 is None, use the first dirstate parent instead. |
|
936 | If node1 is None, use the first dirstate parent instead. | |
994 | If node2 is None, compare node1 with working directory. |
|
937 | If node2 is None, compare node1 with working directory. | |
995 | """ |
|
938 | """ | |
996 |
|
939 | |||
997 |
def |
|
940 | def mfmatches(ctx): | |
998 | t1 = self.wread(fn) |
|
941 | mf = ctx.manifest().copy() | |
999 | return self.file(fn).cmp(getnode(fn), t1) |
|
|||
1000 |
|
||||
1001 | def mfmatches(node): |
|
|||
1002 | change = self.changelog.read(node) |
|
|||
1003 | mf = self.manifest.read(change[0]).copy() |
|
|||
1004 | for fn in mf.keys(): |
|
942 | for fn in mf.keys(): | |
1005 | if not match(fn): |
|
943 | if not match(fn): | |
1006 | del mf[fn] |
|
944 | del mf[fn] | |
1007 | return mf |
|
945 | return mf | |
1008 |
|
946 | |||
1009 | modified, added, removed, deleted, unknown = [], [], [], [], [] |
|
947 | ctx1 = self[node1] | |
1010 | ignored, clean = [], [] |
|
948 | ctx2 = self[node2] | |
1011 |
|
949 | working = ctx2 == self[None] | ||
1012 |
|
|
950 | parentworking = working and ctx1 == self['.'] | |
1013 | if not node1 or (not node2 and node1 == self.dirstate.parents()[0]): |
|
951 | match = match or match_.always(self.root, self.getcwd()) | |
1014 | compareworking = True |
|
952 | listignored, listclean, listunknown = ignored, clean, unknown | |
1015 |
|
953 | |||
1016 | if not compareworking: |
|
954 | if working: # we need to scan the working dir | |
1017 | # read the manifest from node1 before the manifest from node2, |
|
955 | s = self.dirstate.status(match, listignored, listclean, listunknown) | |
1018 | # so that we'll hit the manifest cache if we're going through |
|
956 | cmp, modified, added, removed, deleted, unknown, ignored, clean = s | |
1019 | # all the revisions in parent->child order. |
|
|||
1020 | mf1 = mfmatches(node1) |
|
|||
1021 |
|
957 | |||
1022 | # are we comparing the working directory? |
|
958 | # check for any possibly clean files | |
1023 | if not node2: |
|
959 | if parentworking and cmp: | |
1024 | (lookup, modified, added, removed, deleted, unknown, |
|
|||
1025 | ignored, clean) = self.dirstate.status(files, match, |
|
|||
1026 | list_ignored, list_clean, |
|
|||
1027 | list_unknown) |
|
|||
1028 |
|
||||
1029 | # are we comparing working dir against its parent? |
|
|||
1030 | if compareworking: |
|
|||
1031 | if lookup: |
|
|||
1032 |
|
|
960 | fixup = [] | |
1033 |
|
|
961 | # do a full compare of any files that might have changed | |
1034 | ctx = self.changectx() |
|
962 | for f in cmp: | |
1035 | mexec = lambda f: 'x' in ctx.fileflags(f) |
|
963 | if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f) | |
1036 | mlink = lambda f: 'l' in ctx.fileflags(f) |
|
964 | or ctx1[f].cmp(ctx2[f].data())): | |
1037 | is_exec = util.execfunc(self.root, mexec) |
|
|||
1038 | is_link = util.linkfunc(self.root, mlink) |
|
|||
1039 | def flags(f): |
|
|||
1040 | return is_link(f) and 'l' or is_exec(f) and 'x' or '' |
|
|||
1041 | for f in lookup: |
|
|||
1042 | if (f not in ctx or flags(f) != ctx.fileflags(f) |
|
|||
1043 | or ctx[f].cmp(self.wread(f))): |
|
|||
1044 |
|
|
965 | modified.append(f) | |
1045 |
|
|
966 | else: | |
1046 |
|
|
967 | fixup.append(f) | |
1047 | if list_clean: |
|
968 | ||
1048 | clean.append(f) |
|
969 | if listclean: | |
|
970 | clean += fixup | |||
1049 |
|
971 | |||
1050 |
|
|
972 | # update dirstate for files that are actually clean | |
1051 |
|
|
973 | if fixup: | |
@@ -1053,58 +975,47 class localrepository(repo.repository): | |||||
1053 |
|
|
975 | try: | |
1054 |
|
|
976 | try: | |
1055 |
|
|
977 | wlock = self.wlock(False) | |
|
978 | for f in fixup: | |||
|
979 | self.dirstate.normal(f) | |||
1056 |
|
|
980 | except lock.LockException: | |
1057 |
|
|
981 | pass | |
1058 | if wlock: |
|
|||
1059 | for f in fixup: |
|
|||
1060 | self.dirstate.normal(f) |
|
|||
1061 |
|
|
982 | finally: | |
1062 |
|
|
983 | del wlock | |
1063 | else: |
|
984 | ||
|
985 | if not parentworking: | |||
|
986 | mf1 = mfmatches(ctx1) | |||
|
987 | if working: | |||
1064 | # we are comparing working dir against non-parent |
|
988 | # we are comparing working dir against non-parent | |
1065 | # generate a pseudo-manifest for the working dir |
|
989 | # generate a pseudo-manifest for the working dir | |
1066 | # XXX: create it in dirstate.py ? |
|
990 | mf2 = mfmatches(self['.']) | |
1067 | mf2 = mfmatches(self.dirstate.parents()[0]) |
|
991 | for f in cmp + modified + added: | |
1068 | is_exec = util.execfunc(self.root, mf2.execf) |
|
992 | mf2[f] = None | |
1069 | is_link = util.linkfunc(self.root, mf2.linkf) |
|
993 | mf2.set(f, ctx2.flags(f)) | |
1070 | for f in lookup + modified + added: |
|
|||
1071 | mf2[f] = "" |
|
|||
1072 | mf2.set(f, is_exec(f), is_link(f)) |
|
|||
1073 | for f in removed: |
|
994 | for f in removed: | |
1074 | if f in mf2: |
|
995 | if f in mf2: | |
1075 | del mf2[f] |
|
996 | del mf2[f] | |
1076 |
|
||||
1077 | else: |
|
997 | else: | |
1078 | # we are comparing two revisions |
|
998 | # we are comparing two revisions | |
1079 | mf2 = mfmatches(node2) |
|
999 | deleted, unknown, ignored = [], [], [] | |
1080 |
|
1000 | mf2 = mfmatches(ctx2) | ||
1081 | if not compareworking: |
|
|||
1082 | # flush lists from dirstate before comparing manifests |
|
|||
1083 | modified, added, clean = [], [], [] |
|
|||
1084 |
|
1001 | |||
1085 | # make sure to sort the files so we talk to the disk in a |
|
1002 | modified, added, clean = [], [], [] | |
1086 | # reasonable order |
|
1003 | for fn in mf2: | |
1087 | mf2keys = mf2.keys() |
|
|||
1088 | mf2keys.sort() |
|
|||
1089 | getnode = lambda fn: mf1.get(fn, nullid) |
|
|||
1090 | for fn in mf2keys: |
|
|||
1091 | if fn in mf1: |
|
1004 | if fn in mf1: | |
1092 | if (mf1.flags(fn) != mf2.flags(fn) or |
|
1005 | if (mf1.flags(fn) != mf2.flags(fn) or | |
1093 | (mf1[fn] != mf2[fn] and |
|
1006 | (mf1[fn] != mf2[fn] and | |
1094 |
(mf2[fn] |
|
1007 | (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))): | |
1095 | modified.append(fn) |
|
1008 | modified.append(fn) | |
1096 |
elif list |
|
1009 | elif listclean: | |
1097 | clean.append(fn) |
|
1010 | clean.append(fn) | |
1098 | del mf1[fn] |
|
1011 | del mf1[fn] | |
1099 | else: |
|
1012 | else: | |
1100 | added.append(fn) |
|
1013 | added.append(fn) | |
1101 |
|
||||
1102 | removed = mf1.keys() |
|
1014 | removed = mf1.keys() | |
1103 |
|
1015 | |||
1104 | # sort and return results: |
|
1016 | r = modified, added, removed, deleted, unknown, ignored, clean | |
1105 | for l in modified, added, removed, deleted, unknown, ignored, clean: |
|
1017 | [l.sort() for l in r] | |
1106 | l.sort() |
|
1018 | return r | |
1107 | return (modified, added, removed, deleted, unknown, ignored, clean) |
|
|||
1108 |
|
1019 | |||
1109 | def add(self, list): |
|
1020 | def add(self, list): | |
1110 | wlock = self.wlock() |
|
1021 | wlock = self.wlock() | |
@@ -1209,10 +1120,11 class localrepository(repo.repository): | |||||
1209 | heads = self.changelog.heads(start) |
|
1120 | heads = self.changelog.heads(start) | |
1210 | # sort the output in rev descending order |
|
1121 | # sort the output in rev descending order | |
1211 | heads = [(-self.changelog.rev(h), h) for h in heads] |
|
1122 | heads = [(-self.changelog.rev(h), h) for h in heads] | |
1212 | heads.sort() |
|
1123 | return [n for (r, n) in util.sort(heads)] | |
1213 | return [n for (r, n) in heads] |
|
|||
1214 |
|
1124 | |||
1215 | def branchheads(self, branch, start=None): |
|
1125 | def branchheads(self, branch=None, start=None): | |
|
1126 | if branch is None: | |||
|
1127 | branch = self[None].branch() | |||
1216 | branches = self.branchtags() |
|
1128 | branches = self.branchtags() | |
1217 | if branch not in branches: |
|
1129 | if branch not in branches: | |
1218 | return [] |
|
1130 | return [] | |
@@ -1250,7 +1162,7 class localrepository(repo.repository): | |||||
1250 | if rev in ancestors: |
|
1162 | if rev in ancestors: | |
1251 | ancestors.update(self.changelog.parentrevs(rev)) |
|
1163 | ancestors.update(self.changelog.parentrevs(rev)) | |
1252 | ancestors.remove(rev) |
|
1164 | ancestors.remove(rev) | |
1253 |
elif self |
|
1165 | elif self[rev].branch() == branch: | |
1254 | heads.append(rev) |
|
1166 | heads.append(rev) | |
1255 | ancestors.update(self.changelog.parentrevs(rev)) |
|
1167 | ancestors.update(self.changelog.parentrevs(rev)) | |
1256 | heads = [self.changelog.node(rev) for rev in heads] |
|
1168 | heads = [self.changelog.node(rev) for rev in heads] | |
@@ -1665,7 +1577,7 class localrepository(repo.repository): | |||||
1665 | # Nor do we know which filenodes are missing. |
|
1577 | # Nor do we know which filenodes are missing. | |
1666 | msng_filenode_set = {} |
|
1578 | msng_filenode_set = {} | |
1667 |
|
1579 | |||
1668 |
junk = mnfst.index[mnfst |
|
1580 | junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex | |
1669 | junk = None |
|
1581 | junk = None | |
1670 |
|
1582 | |||
1671 | # A changeset always belongs to itself, so the changenode lookup |
|
1583 | # A changeset always belongs to itself, so the changenode lookup | |
@@ -1860,12 +1772,10 class localrepository(repo.repository): | |||||
1860 | add_extra_nodes(fname, |
|
1772 | add_extra_nodes(fname, | |
1861 | msng_filenode_set.setdefault(fname, {})) |
|
1773 | msng_filenode_set.setdefault(fname, {})) | |
1862 | changedfiles[fname] = 1 |
|
1774 | changedfiles[fname] = 1 | |
1863 | changedfiles = changedfiles.keys() |
|
|||
1864 | changedfiles.sort() |
|
|||
1865 | # Go through all our files in order sorted by name. |
|
1775 | # Go through all our files in order sorted by name. | |
1866 | for fname in changedfiles: |
|
1776 | for fname in util.sort(changedfiles): | |
1867 | filerevlog = self.file(fname) |
|
1777 | filerevlog = self.file(fname) | |
1868 |
if filerevlog |
|
1778 | if not len(filerevlog): | |
1869 | raise util.Abort(_("empty or missing revlog for %s") % fname) |
|
1779 | raise util.Abort(_("empty or missing revlog for %s") % fname) | |
1870 | # Toss out the filenodes that the recipient isn't really |
|
1780 | # Toss out the filenodes that the recipient isn't really | |
1871 | # missing. |
|
1781 | # missing. | |
@@ -1916,10 +1826,10 class localrepository(repo.repository): | |||||
1916 | def identity(x): |
|
1826 | def identity(x): | |
1917 | return x |
|
1827 | return x | |
1918 |
|
1828 | |||
1919 |
def gennodelst( |
|
1829 | def gennodelst(log): | |
1920 |
for r in |
|
1830 | for r in log: | |
1921 |
n = |
|
1831 | n = log.node(r) | |
1922 |
if |
|
1832 | if log.linkrev(n) in revset: | |
1923 | yield n |
|
1833 | yield n | |
1924 |
|
1834 | |||
1925 | def changed_file_collector(changedfileset): |
|
1835 | def changed_file_collector(changedfileset): | |
@@ -1941,17 +1851,15 class localrepository(repo.repository): | |||||
1941 | for chnk in cl.group(nodes, identity, |
|
1851 | for chnk in cl.group(nodes, identity, | |
1942 | changed_file_collector(changedfiles)): |
|
1852 | changed_file_collector(changedfiles)): | |
1943 | yield chnk |
|
1853 | yield chnk | |
1944 | changedfiles = changedfiles.keys() |
|
|||
1945 | changedfiles.sort() |
|
|||
1946 |
|
1854 | |||
1947 | mnfst = self.manifest |
|
1855 | mnfst = self.manifest | |
1948 | nodeiter = gennodelst(mnfst) |
|
1856 | nodeiter = gennodelst(mnfst) | |
1949 | for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)): |
|
1857 | for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)): | |
1950 | yield chnk |
|
1858 | yield chnk | |
1951 |
|
1859 | |||
1952 | for fname in changedfiles: |
|
1860 | for fname in util.sort(changedfiles): | |
1953 | filerevlog = self.file(fname) |
|
1861 | filerevlog = self.file(fname) | |
1954 |
if filerevlog |
|
1862 | if not len(filerevlog): | |
1955 | raise util.Abort(_("empty or missing revlog for %s") % fname) |
|
1863 | raise util.Abort(_("empty or missing revlog for %s") % fname) | |
1956 | nodeiter = gennodelst(filerevlog) |
|
1864 | nodeiter = gennodelst(filerevlog) | |
1957 | nodeiter = list(nodeiter) |
|
1865 | nodeiter = list(nodeiter) | |
@@ -1980,7 +1888,7 class localrepository(repo.repository): | |||||
1980 | """ |
|
1888 | """ | |
1981 | def csmap(x): |
|
1889 | def csmap(x): | |
1982 | self.ui.debug(_("add changeset %s\n") % short(x)) |
|
1890 | self.ui.debug(_("add changeset %s\n") % short(x)) | |
1983 |
return cl |
|
1891 | return len(cl) | |
1984 |
|
1892 | |||
1985 | def revmap(x): |
|
1893 | def revmap(x): | |
1986 | return cl.rev(x) |
|
1894 | return cl.rev(x) | |
@@ -2003,11 +1911,11 class localrepository(repo.repository): | |||||
2003 | trp = weakref.proxy(tr) |
|
1911 | trp = weakref.proxy(tr) | |
2004 | # pull off the changeset group |
|
1912 | # pull off the changeset group | |
2005 | self.ui.status(_("adding changesets\n")) |
|
1913 | self.ui.status(_("adding changesets\n")) | |
2006 |
cor = cl |
|
1914 | cor = len(cl) - 1 | |
2007 | chunkiter = changegroup.chunkiter(source) |
|
1915 | chunkiter = changegroup.chunkiter(source) | |
2008 |
if cl.addgroup(chunkiter, csmap, trp |
|
1916 | if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok: | |
2009 | raise util.Abort(_("received changelog group is empty")) |
|
1917 | raise util.Abort(_("received changelog group is empty")) | |
2010 |
cnr = cl |
|
1918 | cnr = len(cl) - 1 | |
2011 | changesets = cnr - cor |
|
1919 | changesets = cnr - cor | |
2012 |
|
1920 | |||
2013 | # pull off the manifest group |
|
1921 | # pull off the manifest group | |
@@ -2027,11 +1935,11 class localrepository(repo.repository): | |||||
2027 | break |
|
1935 | break | |
2028 | self.ui.debug(_("adding %s revisions\n") % f) |
|
1936 | self.ui.debug(_("adding %s revisions\n") % f) | |
2029 | fl = self.file(f) |
|
1937 | fl = self.file(f) | |
2030 |
o = fl |
|
1938 | o = len(fl) | |
2031 | chunkiter = changegroup.chunkiter(source) |
|
1939 | chunkiter = changegroup.chunkiter(source) | |
2032 | if fl.addgroup(chunkiter, revmap, trp) is None: |
|
1940 | if fl.addgroup(chunkiter, revmap, trp) is None: | |
2033 | raise util.Abort(_("received file revlog group is empty")) |
|
1941 | raise util.Abort(_("received file revlog group is empty")) | |
2034 |
revisions += fl |
|
1942 | revisions += len(fl) - o | |
2035 | files += 1 |
|
1943 | files += 1 | |
2036 |
|
1944 | |||
2037 | # make changelog see real files again |
|
1945 | # make changelog see real files again | |
@@ -2139,6 +2047,25 class localrepository(repo.repository): | |||||
2139 | return self.stream_in(remote) |
|
2047 | return self.stream_in(remote) | |
2140 | return self.pull(remote, heads) |
|
2048 | return self.pull(remote, heads) | |
2141 |
|
2049 | |||
|
2050 | def storefiles(self): | |||
|
2051 | '''get all *.i and *.d files in the store | |||
|
2052 | ||||
|
2053 | Returns (list of (filename, size), total_bytes)''' | |||
|
2054 | ||||
|
2055 | lock = None | |||
|
2056 | try: | |||
|
2057 | self.ui.debug('scanning\n') | |||
|
2058 | entries = [] | |||
|
2059 | total_bytes = 0 | |||
|
2060 | # get consistent snapshot of repo, lock during scan | |||
|
2061 | lock = self.lock() | |||
|
2062 | for name, size in self.store.walk(): | |||
|
2063 | entries.append((name, size)) | |||
|
2064 | total_bytes += size | |||
|
2065 | return entries, total_bytes | |||
|
2066 | finally: | |||
|
2067 | del lock | |||
|
2068 | ||||
2142 | # used to avoid circular references so destructors work |
|
2069 | # used to avoid circular references so destructors work | |
2143 | def aftertrans(files): |
|
2070 | def aftertrans(files): | |
2144 | renamefiles = [tuple(t) for t in files] |
|
2071 | renamefiles = [tuple(t) for t in files] |
@@ -6,7 +6,8 | |||||
6 | # of the GNU General Public License, incorporated herein by reference. |
|
6 | # of the GNU General Public License, incorporated herein by reference. | |
7 |
|
7 | |||
8 | from i18n import _ |
|
8 | from i18n import _ | |
9 |
import os, smtplib, |
|
9 | import os, smtplib, socket | |
|
10 | import util | |||
10 |
|
11 | |||
11 | def _smtp(ui): |
|
12 | def _smtp(ui): | |
12 | '''build an smtp connection and return a function to send mail''' |
|
13 | '''build an smtp connection and return a function to send mail''' | |
@@ -53,7 +54,7 def _sendmail(ui, sender, recipients, ms | |||||
53 | cmdline = '%s -f %s %s' % (program, util.email(sender), |
|
54 | cmdline = '%s -f %s %s' % (program, util.email(sender), | |
54 | ' '.join(map(util.email, recipients))) |
|
55 | ' '.join(map(util.email, recipients))) | |
55 | ui.note(_('sending mail: %s\n') % cmdline) |
|
56 | ui.note(_('sending mail: %s\n') % cmdline) | |
56 |
fp = |
|
57 | fp = util.popen(cmdline, 'w') | |
57 | fp.write(msg) |
|
58 | fp.write(msg) | |
58 | ret = fp.close() |
|
59 | ret = fp.close() | |
59 | if ret: |
|
60 | if ret: |
@@ -8,7 +8,7 | |||||
8 | from node import bin, hex, nullid |
|
8 | from node import bin, hex, nullid | |
9 | from revlog import revlog, RevlogError |
|
9 | from revlog import revlog, RevlogError | |
10 | from i18n import _ |
|
10 | from i18n import _ | |
11 | import array, struct, mdiff |
|
11 | import array, struct, mdiff, parsers, util | |
12 |
|
12 | |||
13 | class manifestdict(dict): |
|
13 | class manifestdict(dict): | |
14 | def __init__(self, mapping=None, flags=None): |
|
14 | def __init__(self, mapping=None, flags=None): | |
@@ -18,16 +18,8 class manifestdict(dict): | |||||
18 | self._flags = flags |
|
18 | self._flags = flags | |
19 | def flags(self, f): |
|
19 | def flags(self, f): | |
20 | return self._flags.get(f, "") |
|
20 | return self._flags.get(f, "") | |
21 |
def |
|
21 | def set(self, f, flags): | |
22 | "test for executable in manifest flags" |
|
22 | self._flags[f] = flags | |
23 | return "x" in self.flags(f) |
|
|||
24 | def linkf(self, f): |
|
|||
25 | "test for symlink in manifest flags" |
|
|||
26 | return "l" in self.flags(f) |
|
|||
27 | def set(self, f, execf=False, linkf=False): |
|
|||
28 | if linkf: self._flags[f] = "l" |
|
|||
29 | elif execf: self._flags[f] = "x" |
|
|||
30 | else: self._flags[f] = "" |
|
|||
31 | def copy(self): |
|
23 | def copy(self): | |
32 | return manifestdict(dict.copy(self), dict.copy(self._flags)) |
|
24 | return manifestdict(dict.copy(self), dict.copy(self._flags)) | |
33 |
|
25 | |||
@@ -39,14 +31,7 class manifest(revlog): | |||||
39 |
|
31 | |||
40 | def parse(self, lines): |
|
32 | def parse(self, lines): | |
41 | mfdict = manifestdict() |
|
33 | mfdict = manifestdict() | |
42 |
|
|
34 | parsers.parse_manifest(mfdict, mfdict._flags, lines) | |
43 | for l in lines.splitlines(): |
|
|||
44 | f, n = l.split('\0') |
|
|||
45 | if len(n) > 40: |
|
|||
46 | fdict[f] = n[40:] |
|
|||
47 | mfdict[f] = bin(n[:40]) |
|
|||
48 | else: |
|
|||
49 | mfdict[f] = bin(n) |
|
|||
50 | return mfdict |
|
35 | return mfdict | |
51 |
|
36 | |||
52 | def readdelta(self, node): |
|
37 | def readdelta(self, node): | |
@@ -134,18 +119,16 class manifest(revlog): | |||||
134 | return "".join([struct.pack(">lll", d[0], d[1], len(d[2])) + d[2] |
|
119 | return "".join([struct.pack(">lll", d[0], d[1], len(d[2])) + d[2] | |
135 | for d in x ]) |
|
120 | for d in x ]) | |
136 |
|
121 | |||
137 |
def checkforbidden( |
|
122 | def checkforbidden(l): | |
|
123 | for f in l: | |||
138 | if '\n' in f or '\r' in f: |
|
124 | if '\n' in f or '\r' in f: | |
139 | raise RevlogError(_("'\\n' and '\\r' disallowed in filenames")) |
|
125 | raise RevlogError(_("'\\n' and '\\r' disallowed in filenames")) | |
140 |
|
126 | |||
141 | # if we're using the listcache, make sure it is valid and |
|
127 | # if we're using the listcache, make sure it is valid and | |
142 | # parented by the same node we're diffing against |
|
128 | # parented by the same node we're diffing against | |
143 | if not (changed and self.listcache and p1 and self.mapcache[0] == p1): |
|
129 | if not (changed and self.listcache and p1 and self.mapcache[0] == p1): | |
144 |
files = map |
|
130 | files = util.sort(map) | |
145 |
files |
|
131 | checkforbidden(files) | |
146 |
|
||||
147 | for f in files: |
|
|||
148 | checkforbidden(f) |
|
|||
149 |
|
132 | |||
150 | # if this is changed to support newlines in filenames, |
|
133 | # if this is changed to support newlines in filenames, | |
151 | # be sure to check the templates/ dir again (especially *-raw.tmpl) |
|
134 | # be sure to check the templates/ dir again (especially *-raw.tmpl) | |
@@ -156,8 +139,7 class manifest(revlog): | |||||
156 | else: |
|
139 | else: | |
157 | addlist = self.listcache |
|
140 | addlist = self.listcache | |
158 |
|
141 | |||
159 |
|
|
142 | checkforbidden(changed[0]) | |
160 | checkforbidden(f) |
|
|||
161 | # combine the changed lists into one list for sorting |
|
143 | # combine the changed lists into one list for sorting | |
162 | work = [[x, 0] for x in changed[0]] |
|
144 | work = [[x, 0] for x in changed[0]] | |
163 | work[len(work):] = [[x, 1] for x in changed[1]] |
|
145 | work[len(work):] = [[x, 1] for x in changed[1]] |
@@ -6,7 +6,7 | |||||
6 | # of the GNU General Public License, incorporated herein by reference. |
|
6 | # of the GNU General Public License, incorporated herein by reference. | |
7 |
|
7 | |||
8 | from i18n import _ |
|
8 | from i18n import _ | |
9 |
import bdiff, mpatch, re, struct, util |
|
9 | import bdiff, mpatch, re, struct, util | |
10 |
|
10 | |||
11 | def splitnewlines(text): |
|
11 | def splitnewlines(text): | |
12 | '''like str.splitlines, but only split on newlines.''' |
|
12 | '''like str.splitlines, but only split on newlines.''' | |
@@ -78,10 +78,7 def unidiff(a, ad, b, bd, fn1, fn2, r=No | |||||
78 | epoch = util.datestr((0, 0)) |
|
78 | epoch = util.datestr((0, 0)) | |
79 |
|
79 | |||
80 | if not opts.text and (util.binary(a) or util.binary(b)): |
|
80 | if not opts.text and (util.binary(a) or util.binary(b)): | |
81 | def h(v): |
|
81 | if a and b and len(a) == len(b) and a == b: | |
82 | # md5 is used instead of sha1 because md5 is supposedly faster |
|
|||
83 | return md5.new(v).digest() |
|
|||
84 | if a and b and len(a) == len(b) and h(a) == h(b): |
|
|||
85 | return "" |
|
82 | return "" | |
86 | l = ['Binary file %s has changed\n' % fn1] |
|
83 | l = ['Binary file %s has changed\n' % fn1] | |
87 | elif not a: |
|
84 | elif not a: |
@@ -5,9 +5,70 | |||||
5 | # This software may be used and distributed according to the terms |
|
5 | # This software may be used and distributed according to the terms | |
6 | # of the GNU General Public License, incorporated herein by reference. |
|
6 | # of the GNU General Public License, incorporated herein by reference. | |
7 |
|
7 | |||
8 | from node import nullid, nullrev |
|
8 | from node import nullid, nullrev, hex, bin | |
9 | from i18n import _ |
|
9 | from i18n import _ | |
10 | import errno, util, os, filemerge, copies |
|
10 | import errno, util, os, filemerge, copies, shutil | |
|
11 | ||||
|
12 | class mergestate(object): | |||
|
13 | '''track 3-way merge state of individual files''' | |||
|
14 | def __init__(self, repo): | |||
|
15 | self._repo = repo | |||
|
16 | self._read() | |||
|
17 | def reset(self, node): | |||
|
18 | self._state = {} | |||
|
19 | self._local = node | |||
|
20 | shutil.rmtree(self._repo.join("merge"), True) | |||
|
21 | def _read(self): | |||
|
22 | self._state = {} | |||
|
23 | try: | |||
|
24 | localnode = None | |||
|
25 | f = self._repo.opener("merge/state") | |||
|
26 | for i, l in enumerate(f): | |||
|
27 | if i == 0: | |||
|
28 | localnode = l[:-1] | |||
|
29 | else: | |||
|
30 | bits = l[:-1].split("\0") | |||
|
31 | self._state[bits[0]] = bits[1:] | |||
|
32 | self._local = bin(localnode) | |||
|
33 | except IOError, err: | |||
|
34 | if err.errno != errno.ENOENT: | |||
|
35 | raise | |||
|
36 | def _write(self): | |||
|
37 | f = self._repo.opener("merge/state", "w") | |||
|
38 | f.write(hex(self._local) + "\n") | |||
|
39 | for d, v in self._state.items(): | |||
|
40 | f.write("\0".join([d] + v) + "\n") | |||
|
41 | def add(self, fcl, fco, fca, fd, flags): | |||
|
42 | hash = util.sha1(fcl.path()).hexdigest() | |||
|
43 | self._repo.opener("merge/" + hash, "w").write(fcl.data()) | |||
|
44 | self._state[fd] = ['u', hash, fcl.path(), fca.path(), | |||
|
45 | hex(fca.filenode()), fco.path(), flags] | |||
|
46 | self._write() | |||
|
47 | def __contains__(self, dfile): | |||
|
48 | return dfile in self._state | |||
|
49 | def __getitem__(self, dfile): | |||
|
50 | return self._state[dfile][0] | |||
|
51 | def __iter__(self): | |||
|
52 | l = self._state.keys() | |||
|
53 | l.sort() | |||
|
54 | for f in l: | |||
|
55 | yield f | |||
|
56 | def mark(self, dfile, state): | |||
|
57 | self._state[dfile][0] = state | |||
|
58 | self._write() | |||
|
59 | def resolve(self, dfile, wctx, octx): | |||
|
60 | if self[dfile] == 'r': | |||
|
61 | return 0 | |||
|
62 | state, hash, lfile, afile, anode, ofile, flags = self._state[dfile] | |||
|
63 | f = self._repo.opener("merge/" + hash) | |||
|
64 | self._repo.wwrite(dfile, f.read(), flags) | |||
|
65 | fcd = wctx[dfile] | |||
|
66 | fco = octx[ofile] | |||
|
67 | fca = self._repo.filectx(afile, fileid=anode) | |||
|
68 | r = filemerge.filemerge(self._repo, self._local, lfile, fcd, fco, fca) | |||
|
69 | if not r: | |||
|
70 | self.mark(dfile, 'r') | |||
|
71 | return r | |||
11 |
|
72 | |||
12 | def _checkunknown(wctx, mctx): |
|
73 | def _checkunknown(wctx, mctx): | |
13 | "check for collisions between unknown files and files in mctx" |
|
74 | "check for collisions between unknown files and files in mctx" | |
@@ -197,19 +258,44 def manifestmerge(repo, p1, p2, pa, over | |||||
197 |
|
258 | |||
198 | return action |
|
259 | return action | |
199 |
|
260 | |||
|
261 | def actioncmp(a1, a2): | |||
|
262 | m1 = a1[1] | |||
|
263 | m2 = a2[1] | |||
|
264 | if m1 == m2: | |||
|
265 | return cmp(a1, a2) | |||
|
266 | if m1 == 'r': | |||
|
267 | return -1 | |||
|
268 | if m2 == 'r': | |||
|
269 | return 1 | |||
|
270 | return cmp(a1, a2) | |||
|
271 | ||||
200 | def applyupdates(repo, action, wctx, mctx): |
|
272 | def applyupdates(repo, action, wctx, mctx): | |
201 | "apply the merge action list to the working directory" |
|
273 | "apply the merge action list to the working directory" | |
202 |
|
274 | |||
203 | updated, merged, removed, unresolved = 0, 0, 0, 0 |
|
275 | updated, merged, removed, unresolved = 0, 0, 0, 0 | |
204 | action.sort() |
|
276 | ms = mergestate(repo) | |
205 | # prescan for copy/renames |
|
277 | ms.reset(wctx.parents()[0].node()) | |
|
278 | moves = [] | |||
|
279 | action.sort(actioncmp) | |||
|
280 | ||||
|
281 | # prescan for merges | |||
206 | for a in action: |
|
282 | for a in action: | |
207 | f, m = a[:2] |
|
283 | f, m = a[:2] | |
208 | if m == 'm': # merge |
|
284 | if m == 'm': # merge | |
209 | f2, fd, flags, move = a[2:] |
|
285 | f2, fd, flags, move = a[2:] | |
210 | if f != fd: |
|
286 | repo.ui.debug(_("preserving %s for resolve of %s\n") % (f, fd)) | |
211 | repo.ui.debug(_("copying %s to %s\n") % (f, fd)) |
|
287 | fcl = wctx[f] | |
212 | repo.wwrite(fd, repo.wread(f), flags) |
|
288 | fco = mctx[f2] | |
|
289 | fca = fcl.ancestor(fco) or repo.filectx(f, fileid=nullrev) | |||
|
290 | ms.add(fcl, fco, fca, fd, flags) | |||
|
291 | if f != fd and move: | |||
|
292 | moves.append(f) | |||
|
293 | ||||
|
294 | # remove renamed files after safely stored | |||
|
295 | for f in moves: | |||
|
296 | if util.lexists(repo.wjoin(f)): | |||
|
297 | repo.ui.debug(_("removing %s\n") % f) | |||
|
298 | os.unlink(repo.wjoin(f)) | |||
213 |
|
299 | |||
214 | audit_path = util.path_auditor(repo.root) |
|
300 | audit_path = util.path_auditor(repo.root) | |
215 |
|
301 | |||
@@ -229,7 +315,7 def applyupdates(repo, action, wctx, mct | |||||
229 | removed += 1 |
|
315 | removed += 1 | |
230 | elif m == "m": # merge |
|
316 | elif m == "m": # merge | |
231 | f2, fd, flags, move = a[2:] |
|
317 | f2, fd, flags, move = a[2:] | |
232 |
r = |
|
318 | r = ms.resolve(fd, wctx, mctx) | |
233 | if r > 0: |
|
319 | if r > 0: | |
234 | unresolved += 1 |
|
320 | unresolved += 1 | |
235 | else: |
|
321 | else: | |
@@ -237,10 +323,6 def applyupdates(repo, action, wctx, mct | |||||
237 | updated += 1 |
|
323 | updated += 1 | |
238 | else: |
|
324 | else: | |
239 | merged += 1 |
|
325 | merged += 1 | |
240 | util.set_flags(repo.wjoin(fd), flags) |
|
|||
241 | if f != fd and move and util.lexists(repo.wjoin(f)): |
|
|||
242 | repo.ui.debug(_("removing %s\n") % f) |
|
|||
243 | os.unlink(repo.wjoin(f)) |
|
|||
244 | elif m == "g": # get |
|
326 | elif m == "g": # get | |
245 | flags = a[2] |
|
327 | flags = a[2] | |
246 | repo.ui.note(_("getting %s\n") % f) |
|
328 | repo.ui.note(_("getting %s\n") % f) | |
@@ -337,7 +419,7 def update(repo, node, branchmerge, forc | |||||
337 |
|
419 | |||
338 | wlock = repo.wlock() |
|
420 | wlock = repo.wlock() | |
339 | try: |
|
421 | try: | |
340 |
wc = repo |
|
422 | wc = repo[None] | |
341 | if node is None: |
|
423 | if node is None: | |
342 | # tip of current branch |
|
424 | # tip of current branch | |
343 | try: |
|
425 | try: | |
@@ -349,7 +431,7 def update(repo, node, branchmerge, forc | |||||
349 | raise util.Abort(_("branch %s not found") % wc.branch()) |
|
431 | raise util.Abort(_("branch %s not found") % wc.branch()) | |
350 | overwrite = force and not branchmerge |
|
432 | overwrite = force and not branchmerge | |
351 | pl = wc.parents() |
|
433 | pl = wc.parents() | |
352 |
p1, p2 = pl[0], repo |
|
434 | p1, p2 = pl[0], repo[node] | |
353 | pa = p1.ancestor(p2) |
|
435 | pa = p1.ancestor(p2) | |
354 | fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2) |
|
436 | fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2) | |
355 | fastforward = False |
|
437 | fastforward = False | |
@@ -388,7 +470,7 def update(repo, node, branchmerge, forc | |||||
388 | action = [] |
|
470 | action = [] | |
389 | if not force: |
|
471 | if not force: | |
390 | _checkunknown(wc, p2) |
|
472 | _checkunknown(wc, p2) | |
391 |
if not util.check |
|
473 | if not util.checkcase(repo.path): | |
392 | _checkcollision(p2) |
|
474 | _checkcollision(p2) | |
393 | action += _forgetremoved(wc, p2, branchmerge) |
|
475 | action += _forgetremoved(wc, p2, branchmerge) | |
394 | action += manifestmerge(repo, wc, p2, pa, overwrite, partial) |
|
476 | action += manifestmerge(repo, wc, p2, pa, overwrite, partial) |
@@ -8,8 +8,8 | |||||
8 |
|
8 | |||
9 | from i18n import _ |
|
9 | from i18n import _ | |
10 | from node import hex, nullid, short |
|
10 | from node import hex, nullid, short | |
11 |
import base85, cmdutil, mdiff, util, |
|
11 | import base85, cmdutil, mdiff, util, revlog, diffhelpers, copies | |
12 |
import cStringIO, email.Parser, os, popen2, re, |
|
12 | import cStringIO, email.Parser, os, popen2, re, errno | |
13 | import sys, tempfile, zlib |
|
13 | import sys, tempfile, zlib | |
14 |
|
14 | |||
15 | class PatchError(Exception): |
|
15 | class PatchError(Exception): | |
@@ -1094,8 +1094,7 def updatedir(ui, repo, patches): | |||||
1094 | repo.copy(src, dst) |
|
1094 | repo.copy(src, dst) | |
1095 | removes = removes.keys() |
|
1095 | removes = removes.keys() | |
1096 | if removes: |
|
1096 | if removes: | |
1097 |
remove |
|
1097 | repo.remove(util.sort(removes), True) | |
1098 | repo.remove(removes, True) |
|
|||
1099 | for f in patches: |
|
1098 | for f in patches: | |
1100 | ctype, gp = patches[f] |
|
1099 | ctype, gp = patches[f] | |
1101 | if gp and gp.mode: |
|
1100 | if gp and gp.mode: | |
@@ -1113,9 +1112,7 def updatedir(ui, repo, patches): | |||||
1113 | cmdutil.addremove(repo, cfiles) |
|
1112 | cmdutil.addremove(repo, cfiles) | |
1114 | files = patches.keys() |
|
1113 | files = patches.keys() | |
1115 | files.extend([r for r in removes if r not in files]) |
|
1114 | files.extend([r for r in removes if r not in files]) | |
1116 |
|
|
1115 | return util.sort(files) | |
1117 |
|
||||
1118 | return files |
|
|||
1119 |
|
1116 | |||
1120 | def b85diff(to, tn): |
|
1117 | def b85diff(to, tn): | |
1121 | '''print base85-encoded binary diff''' |
|
1118 | '''print base85-encoded binary diff''' | |
@@ -1123,7 +1120,7 def b85diff(to, tn): | |||||
1123 | if not text: |
|
1120 | if not text: | |
1124 | return '0' * 40 |
|
1121 | return '0' * 40 | |
1125 | l = len(text) |
|
1122 | l = len(text) | |
1126 |
s = |
|
1123 | s = util.sha1('blob %d\0' % l) | |
1127 | s.update(text) |
|
1124 | s.update(text) | |
1128 | return s.hexdigest() |
|
1125 | return s.hexdigest() | |
1129 |
|
1126 | |||
@@ -1155,7 +1152,7 def b85diff(to, tn): | |||||
1155 | ret.append('\n') |
|
1152 | ret.append('\n') | |
1156 | return ''.join(ret) |
|
1153 | return ''.join(ret) | |
1157 |
|
1154 | |||
1158 |
def diff(repo, node1=None, node2=None, |
|
1155 | def diff(repo, node1=None, node2=None, match=None, | |
1159 | fp=None, changes=None, opts=None): |
|
1156 | fp=None, changes=None, opts=None): | |
1160 | '''print diff of changes to files between two nodes, or node and |
|
1157 | '''print diff of changes to files between two nodes, or node and | |
1161 | working directory. |
|
1158 | working directory. | |
@@ -1163,6 +1160,9 def diff(repo, node1=None, node2=None, f | |||||
1163 | if node1 is None, use first dirstate parent instead. |
|
1160 | if node1 is None, use first dirstate parent instead. | |
1164 | if node2 is None, compare node1 with working directory.''' |
|
1161 | if node2 is None, compare node1 with working directory.''' | |
1165 |
|
1162 | |||
|
1163 | if not match: | |||
|
1164 | match = cmdutil.matchall(repo) | |||
|
1165 | ||||
1166 | if opts is None: |
|
1166 | if opts is None: | |
1167 | opts = mdiff.defaultopts |
|
1167 | opts = mdiff.defaultopts | |
1168 | if fp is None: |
|
1168 | if fp is None: | |
@@ -1171,12 +1171,6 def diff(repo, node1=None, node2=None, f | |||||
1171 | if not node1: |
|
1171 | if not node1: | |
1172 | node1 = repo.dirstate.parents()[0] |
|
1172 | node1 = repo.dirstate.parents()[0] | |
1173 |
|
1173 | |||
1174 | ccache = {} |
|
|||
1175 | def getctx(r): |
|
|||
1176 | if r not in ccache: |
|
|||
1177 | ccache[r] = context.changectx(repo, r) |
|
|||
1178 | return ccache[r] |
|
|||
1179 |
|
||||
1180 | flcache = {} |
|
1174 | flcache = {} | |
1181 | def getfilectx(f, ctx): |
|
1175 | def getfilectx(f, ctx): | |
1182 | flctx = ctx.filectx(f, filelog=flcache.get(f)) |
|
1176 | flctx = ctx.filectx(f, filelog=flcache.get(f)) | |
@@ -1186,30 +1180,19 def diff(repo, node1=None, node2=None, f | |||||
1186 |
|
1180 | |||
1187 | # reading the data for node1 early allows it to play nicely |
|
1181 | # reading the data for node1 early allows it to play nicely | |
1188 | # with repo.status and the revlog cache. |
|
1182 | # with repo.status and the revlog cache. | |
1189 |
ctx1 = |
|
1183 | ctx1 = repo[node1] | |
1190 | # force manifest reading |
|
1184 | # force manifest reading | |
1191 | man1 = ctx1.manifest() |
|
1185 | man1 = ctx1.manifest() | |
1192 | date1 = util.datestr(ctx1.date()) |
|
1186 | date1 = util.datestr(ctx1.date()) | |
1193 |
|
1187 | |||
1194 | if not changes: |
|
1188 | if not changes: | |
1195 |
changes = repo.status(node1, node2, |
|
1189 | changes = repo.status(node1, node2, match=match) | |
1196 |
modified, added, removed |
|
1190 | modified, added, removed = changes[:3] | |
1197 |
|
1191 | |||
1198 | if not modified and not added and not removed: |
|
1192 | if not modified and not added and not removed: | |
1199 | return |
|
1193 | return | |
1200 |
|
1194 | |||
1201 | if node2: |
|
1195 | ctx2 = repo[node2] | |
1202 | ctx2 = context.changectx(repo, node2) |
|
|||
1203 | execf2 = ctx2.manifest().execf |
|
|||
1204 | linkf2 = ctx2.manifest().linkf |
|
|||
1205 | else: |
|
|||
1206 | ctx2 = context.workingctx(repo) |
|
|||
1207 | execf2 = util.execfunc(repo.root, None) |
|
|||
1208 | linkf2 = util.linkfunc(repo.root, None) |
|
|||
1209 | if execf2 is None: |
|
|||
1210 | mc = ctx2.parents()[0].manifest().copy() |
|
|||
1211 | execf2 = mc.execf |
|
|||
1212 | linkf2 = mc.linkf |
|
|||
1213 |
|
1196 | |||
1214 | if repo.ui.quiet: |
|
1197 | if repo.ui.quiet: | |
1215 | r = None |
|
1198 | r = None | |
@@ -1218,15 +1201,14 def diff(repo, node1=None, node2=None, f | |||||
1218 | r = [hexfunc(node) for node in [node1, node2] if node] |
|
1201 | r = [hexfunc(node) for node in [node1, node2] if node] | |
1219 |
|
1202 | |||
1220 | if opts.git: |
|
1203 | if opts.git: | |
1221 |
copy, diverge = copies.copies(repo, ctx1, ctx2, repo |
|
1204 | copy, diverge = copies.copies(repo, ctx1, ctx2, repo[nullid]) | |
1222 | for k, v in copy.items(): |
|
1205 | for k, v in copy.items(): | |
1223 | copy[v] = k |
|
1206 | copy[v] = k | |
1224 |
|
1207 | |||
1225 | all = modified + added + removed |
|
|||
1226 | all.sort() |
|
|||
1227 | gone = {} |
|
1208 | gone = {} | |
|
1209 | gitmode = {'l': '120000', 'x': '100755', '': '100644'} | |||
1228 |
|
1210 | |||
1229 | for f in all: |
|
1211 | for f in util.sort(modified + added + removed): | |
1230 | to = None |
|
1212 | to = None | |
1231 | tn = None |
|
1213 | tn = None | |
1232 | dodiff = True |
|
1214 | dodiff = True | |
@@ -1237,18 +1219,16 def diff(repo, node1=None, node2=None, f | |||||
1237 | tn = getfilectx(f, ctx2).data() |
|
1219 | tn = getfilectx(f, ctx2).data() | |
1238 | a, b = f, f |
|
1220 | a, b = f, f | |
1239 | if opts.git: |
|
1221 | if opts.git: | |
1240 | def gitmode(x, l): |
|
|||
1241 | return l and '120000' or (x and '100755' or '100644') |
|
|||
1242 | def addmodehdr(header, omode, nmode): |
|
1222 | def addmodehdr(header, omode, nmode): | |
1243 | if omode != nmode: |
|
1223 | if omode != nmode: | |
1244 | header.append('old mode %s\n' % omode) |
|
1224 | header.append('old mode %s\n' % omode) | |
1245 | header.append('new mode %s\n' % nmode) |
|
1225 | header.append('new mode %s\n' % nmode) | |
1246 |
|
1226 | |||
1247 | if f in added: |
|
1227 | if f in added: | |
1248 |
mode = gitmode |
|
1228 | mode = gitmode[ctx2.flags(f)] | |
1249 | if f in copy: |
|
1229 | if f in copy: | |
1250 | a = copy[f] |
|
1230 | a = copy[f] | |
1251 |
omode = gitmode |
|
1231 | omode = gitmode[man1.flags(a)] | |
1252 | addmodehdr(header, omode, mode) |
|
1232 | addmodehdr(header, omode, mode) | |
1253 | if a in removed and a not in gone: |
|
1233 | if a in removed and a not in gone: | |
1254 | op = 'rename' |
|
1234 | op = 'rename' | |
@@ -1267,11 +1247,11 def diff(repo, node1=None, node2=None, f | |||||
1267 | if f in copy and copy[f] in added and copy[copy[f]] == f: |
|
1247 | if f in copy and copy[f] in added and copy[copy[f]] == f: | |
1268 | dodiff = False |
|
1248 | dodiff = False | |
1269 | else: |
|
1249 | else: | |
1270 | mode = gitmode(man1.execf(f), man1.linkf(f)) |
|
1250 | header.append('deleted file mode %s\n' % | |
1271 | header.append('deleted file mode %s\n' % mode) |
|
1251 | gitmode[man1.flags(f)]) | |
1272 | else: |
|
1252 | else: | |
1273 |
omode = gitmode |
|
1253 | omode = gitmode[man1.flags(f)] | |
1274 |
nmode = gitmode |
|
1254 | nmode = gitmode[ctx2.flags(f)] | |
1275 | addmodehdr(header, omode, nmode) |
|
1255 | addmodehdr(header, omode, nmode) | |
1276 | if util.binary(to) or util.binary(tn): |
|
1256 | if util.binary(to) or util.binary(tn): | |
1277 | dodiff = 'binary' |
|
1257 | dodiff = 'binary' | |
@@ -1297,7 +1277,7 def export(repo, revs, template='hg-%h.p | |||||
1297 | revwidth = max([len(str(rev)) for rev in revs]) |
|
1277 | revwidth = max([len(str(rev)) for rev in revs]) | |
1298 |
|
1278 | |||
1299 | def single(rev, seqno, fp): |
|
1279 | def single(rev, seqno, fp): | |
1300 |
ctx = repo |
|
1280 | ctx = repo[rev] | |
1301 | node = ctx.node() |
|
1281 | node = ctx.node() | |
1302 | parents = [p.node() for p in ctx.parents() if p] |
|
1282 | parents = [p.node() for p in ctx.parents() if p] | |
1303 | branch = ctx.branch() |
|
1283 | branch = ctx.branch() |
@@ -23,8 +23,8 def _collectfiles(repo, striprev): | |||||
23 | """find out the filelogs affected by the strip""" |
|
23 | """find out the filelogs affected by the strip""" | |
24 | files = {} |
|
24 | files = {} | |
25 |
|
25 | |||
26 |
for x in xrange(striprev, repo |
|
26 | for x in xrange(striprev, len(repo)): | |
27 |
for name in repo |
|
27 | for name in repo[x].files(): | |
28 | if name in files: |
|
28 | if name in files: | |
29 | continue |
|
29 | continue | |
30 | files[name] = 1 |
|
30 | files[name] = 1 | |
@@ -37,7 +37,7 def _collectextranodes(repo, files, link | |||||
37 | """return the nodes that have to be saved before the strip""" |
|
37 | """return the nodes that have to be saved before the strip""" | |
38 | def collectone(revlog): |
|
38 | def collectone(revlog): | |
39 | extra = [] |
|
39 | extra = [] | |
40 |
startrev = count = revlog |
|
40 | startrev = count = len(revlog) | |
41 | # find the truncation point of the revlog |
|
41 | # find the truncation point of the revlog | |
42 | for i in xrange(0, count): |
|
42 | for i in xrange(0, count): | |
43 | node = revlog.node(i) |
|
43 | node = revlog.node(i) | |
@@ -72,7 +72,6 def _collectextranodes(repo, files, link | |||||
72 | def strip(ui, repo, node, backup="all"): |
|
72 | def strip(ui, repo, node, backup="all"): | |
73 | cl = repo.changelog |
|
73 | cl = repo.changelog | |
74 | # TODO delete the undo files, and handle undo of merge sets |
|
74 | # TODO delete the undo files, and handle undo of merge sets | |
75 | pp = cl.parents(node) |
|
|||
76 | striprev = cl.rev(node) |
|
75 | striprev = cl.rev(node) | |
77 |
|
76 | |||
78 | # Some revisions with rev > striprev may not be descendants of striprev. |
|
77 | # Some revisions with rev > striprev may not be descendants of striprev. | |
@@ -85,7 +84,7 def strip(ui, repo, node, backup="all"): | |||||
85 | tostrip = {striprev: 1} |
|
84 | tostrip = {striprev: 1} | |
86 | saveheads = {} |
|
85 | saveheads = {} | |
87 | savebases = [] |
|
86 | savebases = [] | |
88 |
for r in xrange(striprev + 1, cl |
|
87 | for r in xrange(striprev + 1, len(cl)): | |
89 | parents = cl.parentrevs(r) |
|
88 | parents = cl.parentrevs(r) | |
90 | if parents[0] in tostrip or parents[1] in tostrip: |
|
89 | if parents[0] in tostrip or parents[1] in tostrip: | |
91 | # r is a descendant of striprev |
|
90 | # r is a descendant of striprev |
@@ -40,3 +40,9 class repository(object): | |||||
40 |
|
40 | |||
41 | def cancopy(self): |
|
41 | def cancopy(self): | |
42 | return self.local() |
|
42 | return self.local() | |
|
43 | ||||
|
44 | def rjoin(self, path): | |||
|
45 | url = self.url() | |||
|
46 | if url.endswith('/'): | |||
|
47 | return url + path | |||
|
48 | return url + '/' + path |
@@ -13,13 +13,13 of the GNU General Public License, incor | |||||
13 | from node import bin, hex, nullid, nullrev, short |
|
13 | from node import bin, hex, nullid, nullrev, short | |
14 | from i18n import _ |
|
14 | from i18n import _ | |
15 | import changegroup, errno, ancestor, mdiff |
|
15 | import changegroup, errno, ancestor, mdiff | |
16 |
import |
|
16 | import struct, util, zlib | |
17 |
|
17 | |||
18 | _pack = struct.pack |
|
18 | _pack = struct.pack | |
19 | _unpack = struct.unpack |
|
19 | _unpack = struct.unpack | |
20 | _compress = zlib.compress |
|
20 | _compress = zlib.compress | |
21 | _decompress = zlib.decompress |
|
21 | _decompress = zlib.decompress | |
22 |
_sha = |
|
22 | _sha = util.sha1 | |
23 |
|
23 | |||
24 | # revlog flags |
|
24 | # revlog flags | |
25 | REVLOGV0 = 0 |
|
25 | REVLOGV0 = 0 | |
@@ -32,13 +32,16 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_ | |||||
32 | class RevlogError(Exception): |
|
32 | class RevlogError(Exception): | |
33 | pass |
|
33 | pass | |
34 |
|
34 | |||
35 | class LookupError(RevlogError): |
|
35 | class LookupError(RevlogError, KeyError): | |
36 | def __init__(self, name, index, message): |
|
36 | def __init__(self, name, index, message): | |
37 | self.name = name |
|
37 | self.name = name | |
38 | if isinstance(name, str) and len(name) == 20: |
|
38 | if isinstance(name, str) and len(name) == 20: | |
39 | name = short(name) |
|
39 | name = short(name) | |
40 | RevlogError.__init__(self, _('%s@%s: %s') % (index, name, message)) |
|
40 | RevlogError.__init__(self, _('%s@%s: %s') % (index, name, message)) | |
41 |
|
41 | |||
|
42 | def __str__(self): | |||
|
43 | return RevlogError.__str__(self) | |||
|
44 | ||||
42 | def getoffset(q): |
|
45 | def getoffset(q): | |
43 | return int(q >> 16) |
|
46 | return int(q >> 16) | |
44 |
|
47 | |||
@@ -512,9 +515,11 class revlog(object): | |||||
512 |
|
515 | |||
513 | def tip(self): |
|
516 | def tip(self): | |
514 | return self.node(len(self.index) - 2) |
|
517 | return self.node(len(self.index) - 2) | |
515 |
def |
|
518 | def __len__(self): | |
516 | return len(self.index) - 1 |
|
519 | return len(self.index) - 1 | |
517 |
|
520 | def __iter__(self): | ||
|
521 | for i in xrange(len(self)): | |||
|
522 | yield i | |||
518 | def rev(self, node): |
|
523 | def rev(self, node): | |
519 | try: |
|
524 | try: | |
520 | return self.nodemap[node] |
|
525 | return self.nodemap[node] | |
@@ -591,6 +596,27 class revlog(object): | |||||
591 | visit.append(p) |
|
596 | visit.append(p) | |
592 | return reachable |
|
597 | return reachable | |
593 |
|
598 | |||
|
599 | def ancestors(self, *revs): | |||
|
600 | 'Generate the ancestors of revs using a breadth-first visit' | |||
|
601 | visit = list(revs) | |||
|
602 | seen = util.set([nullrev]) | |||
|
603 | while visit: | |||
|
604 | for parent in self.parentrevs(visit.pop(0)): | |||
|
605 | if parent not in seen: | |||
|
606 | visit.append(parent) | |||
|
607 | seen.add(parent) | |||
|
608 | yield parent | |||
|
609 | ||||
|
610 | def descendants(self, *revs): | |||
|
611 | 'Generate the descendants of revs in topological order' | |||
|
612 | seen = util.set(revs) | |||
|
613 | for i in xrange(min(revs) + 1, len(self)): | |||
|
614 | for x in self.parentrevs(i): | |||
|
615 | if x != nullrev and x in seen: | |||
|
616 | seen.add(i) | |||
|
617 | yield i | |||
|
618 | break | |||
|
619 | ||||
594 | def nodesbetween(self, roots=None, heads=None): |
|
620 | def nodesbetween(self, roots=None, heads=None): | |
595 | """Return a tuple containing three elements. Elements 1 and 2 contain |
|
621 | """Return a tuple containing three elements. Elements 1 and 2 contain | |
596 | a final list bases and heads after all the unreachable ones have been |
|
622 | a final list bases and heads after all the unreachable ones have been | |
@@ -617,12 +643,11 class revlog(object): | |||||
617 | lowestrev = nullrev |
|
643 | lowestrev = nullrev | |
618 | if (lowestrev == nullrev) and (heads is None): |
|
644 | if (lowestrev == nullrev) and (heads is None): | |
619 | # We want _all_ the nodes! |
|
645 | # We want _all_ the nodes! | |
620 |
return ([self.node(r) for r in |
|
646 | return ([self.node(r) for r in self], [nullid], list(self.heads())) | |
621 | [nullid], list(self.heads())) |
|
|||
622 | if heads is None: |
|
647 | if heads is None: | |
623 | # All nodes are ancestors, so the latest ancestor is the last |
|
648 | # All nodes are ancestors, so the latest ancestor is the last | |
624 | # node. |
|
649 | # node. | |
625 |
highestrev = self |
|
650 | highestrev = len(self) - 1 | |
626 | # Set ancestors to None to signal that every node is an ancestor. |
|
651 | # Set ancestors to None to signal that every node is an ancestor. | |
627 | ancestors = None |
|
652 | ancestors = None | |
628 | # Set heads to an empty dictionary for later discovery of heads |
|
653 | # Set heads to an empty dictionary for later discovery of heads | |
@@ -751,15 +776,15 class revlog(object): | |||||
751 | as if they had no children |
|
776 | as if they had no children | |
752 | """ |
|
777 | """ | |
753 | if start is None and stop is None: |
|
778 | if start is None and stop is None: | |
754 |
count = self |
|
779 | count = len(self) | |
755 | if not count: |
|
780 | if not count: | |
756 | return [nullid] |
|
781 | return [nullid] | |
757 | ishead = [1] * (count + 1) |
|
782 | ishead = [1] * (count + 1) | |
758 | index = self.index |
|
783 | index = self.index | |
759 |
for r in |
|
784 | for r in self: | |
760 | e = index[r] |
|
785 | e = index[r] | |
761 | ishead[e[5]] = ishead[e[6]] = 0 |
|
786 | ishead[e[5]] = ishead[e[6]] = 0 | |
762 |
return [self.node(r) for r in |
|
787 | return [self.node(r) for r in self if ishead[r]] | |
763 |
|
788 | |||
764 | if start is None: |
|
789 | if start is None: | |
765 | start = nullid |
|
790 | start = nullid | |
@@ -771,7 +796,7 class revlog(object): | |||||
771 | heads = {startrev: 1} |
|
796 | heads = {startrev: 1} | |
772 |
|
797 | |||
773 | parentrevs = self.parentrevs |
|
798 | parentrevs = self.parentrevs | |
774 |
for r in xrange(startrev + 1, self |
|
799 | for r in xrange(startrev + 1, len(self)): | |
775 | for p in parentrevs(r): |
|
800 | for p in parentrevs(r): | |
776 | if p in reachable: |
|
801 | if p in reachable: | |
777 | if r not in stoprevs: |
|
802 | if r not in stoprevs: | |
@@ -786,7 +811,7 class revlog(object): | |||||
786 | """find the children of a given node""" |
|
811 | """find the children of a given node""" | |
787 | c = [] |
|
812 | c = [] | |
788 | p = self.rev(node) |
|
813 | p = self.rev(node) | |
789 |
for r in range(p + 1, self |
|
814 | for r in range(p + 1, len(self)): | |
790 | prevs = [pr for pr in self.parentrevs(r) if pr != nullrev] |
|
815 | prevs = [pr for pr in self.parentrevs(r) if pr != nullrev] | |
791 | if prevs: |
|
816 | if prevs: | |
792 | for pr in prevs: |
|
817 | for pr in prevs: | |
@@ -815,8 +840,8 class revlog(object): | |||||
815 | if str(rev) != id: |
|
840 | if str(rev) != id: | |
816 | raise ValueError |
|
841 | raise ValueError | |
817 | if rev < 0: |
|
842 | if rev < 0: | |
818 |
rev = self |
|
843 | rev = len(self) + rev | |
819 |
if rev < 0 or rev >= self |
|
844 | if rev < 0 or rev >= len(self): | |
820 | raise ValueError |
|
845 | raise ValueError | |
821 | return self.node(rev) |
|
846 | return self.node(rev) | |
822 | except (ValueError, OverflowError): |
|
847 | except (ValueError, OverflowError): | |
@@ -979,7 +1004,7 class revlog(object): | |||||
979 | df = self.opener(self.datafile, 'w') |
|
1004 | df = self.opener(self.datafile, 'w') | |
980 | try: |
|
1005 | try: | |
981 | calc = self._io.size |
|
1006 | calc = self._io.size | |
982 |
for r in |
|
1007 | for r in self: | |
983 | start = self.start(r) + (r + 1) * calc |
|
1008 | start = self.start(r) + (r + 1) * calc | |
984 | length = self.length(r) |
|
1009 | length = self.length(r) | |
985 | fp.seek(start) |
|
1010 | fp.seek(start) | |
@@ -992,7 +1017,7 class revlog(object): | |||||
992 | fp = self.opener(self.indexfile, 'w', atomictemp=True) |
|
1017 | fp = self.opener(self.indexfile, 'w', atomictemp=True) | |
993 | self.version &= ~(REVLOGNGINLINEDATA) |
|
1018 | self.version &= ~(REVLOGNGINLINEDATA) | |
994 | self._inline = False |
|
1019 | self._inline = False | |
995 |
for i in |
|
1020 | for i in self: | |
996 | e = self._io.packentry(self.index[i], self.node, self.version, i) |
|
1021 | e = self._io.packentry(self.index[i], self.node, self.version, i) | |
997 | fp.write(e) |
|
1022 | fp.write(e) | |
998 |
|
1023 | |||
@@ -1028,7 +1053,7 class revlog(object): | |||||
1028 | if node in self.nodemap: |
|
1053 | if node in self.nodemap: | |
1029 | return node |
|
1054 | return node | |
1030 |
|
1055 | |||
1031 |
curr = self |
|
1056 | curr = len(self) | |
1032 | prev = curr - 1 |
|
1057 | prev = curr - 1 | |
1033 | base = self.base(prev) |
|
1058 | base = self.base(prev) | |
1034 | offset = self.end(prev) |
|
1059 | offset = self.end(prev) | |
@@ -1133,7 +1158,7 class revlog(object): | |||||
1133 |
|
1158 | |||
1134 | yield changegroup.closechunk() |
|
1159 | yield changegroup.closechunk() | |
1135 |
|
1160 | |||
1136 |
def addgroup(self, revs, linkmapper, transaction |
|
1161 | def addgroup(self, revs, linkmapper, transaction): | |
1137 | """ |
|
1162 | """ | |
1138 | add a delta group |
|
1163 | add a delta group | |
1139 |
|
1164 | |||
@@ -1143,7 +1168,7 class revlog(object): | |||||
1143 | """ |
|
1168 | """ | |
1144 |
|
1169 | |||
1145 | #track the base of the current delta log |
|
1170 | #track the base of the current delta log | |
1146 |
r = self |
|
1171 | r = len(self) | |
1147 | t = r - 1 |
|
1172 | t = r - 1 | |
1148 | node = None |
|
1173 | node = None | |
1149 |
|
1174 | |||
@@ -1170,8 +1195,6 class revlog(object): | |||||
1170 | link = linkmapper(cs) |
|
1195 | link = linkmapper(cs) | |
1171 | if node in self.nodemap: |
|
1196 | if node in self.nodemap: | |
1172 | # this can happen if two branches make the same change |
|
1197 | # this can happen if two branches make the same change | |
1173 | # if unique: |
|
|||
1174 | # raise RevlogError(_("already have %s") % hex(node[:4])) |
|
|||
1175 | chain = node |
|
1198 | chain = node | |
1176 | continue |
|
1199 | continue | |
1177 | delta = buffer(chunk, 80) |
|
1200 | delta = buffer(chunk, 80) | |
@@ -1264,13 +1287,13 class revlog(object): | |||||
1264 | trust that the caller has saved the revisions that shouldn't be |
|
1287 | trust that the caller has saved the revisions that shouldn't be | |
1265 | removed and that it'll readd them after this truncation. |
|
1288 | removed and that it'll readd them after this truncation. | |
1266 | """ |
|
1289 | """ | |
1267 |
if self |
|
1290 | if len(self) == 0: | |
1268 | return |
|
1291 | return | |
1269 |
|
1292 | |||
1270 | if isinstance(self.index, lazyindex): |
|
1293 | if isinstance(self.index, lazyindex): | |
1271 | self._loadindexmap() |
|
1294 | self._loadindexmap() | |
1272 |
|
1295 | |||
1273 |
for rev in |
|
1296 | for rev in self: | |
1274 | if self.index[rev][4] >= minlink: |
|
1297 | if self.index[rev][4] >= minlink: | |
1275 | break |
|
1298 | break | |
1276 | else: |
|
1299 | else: | |
@@ -1291,15 +1314,15 class revlog(object): | |||||
1291 | # then reset internal state in memory to forget those revisions |
|
1314 | # then reset internal state in memory to forget those revisions | |
1292 | self._cache = None |
|
1315 | self._cache = None | |
1293 | self._chunkcache = None |
|
1316 | self._chunkcache = None | |
1294 |
for x in xrange(rev, self |
|
1317 | for x in xrange(rev, len(self)): | |
1295 | del self.nodemap[self.node(x)] |
|
1318 | del self.nodemap[self.node(x)] | |
1296 |
|
1319 | |||
1297 | del self.index[rev:-1] |
|
1320 | del self.index[rev:-1] | |
1298 |
|
1321 | |||
1299 | def checksize(self): |
|
1322 | def checksize(self): | |
1300 | expected = 0 |
|
1323 | expected = 0 | |
1301 |
if self |
|
1324 | if len(self): | |
1302 |
expected = max(0, self.end(self |
|
1325 | expected = max(0, self.end(len(self) - 1)) | |
1303 |
|
1326 | |||
1304 | try: |
|
1327 | try: | |
1305 | f = self.opener(self.datafile) |
|
1328 | f = self.opener(self.datafile) | |
@@ -1320,10 +1343,10 class revlog(object): | |||||
1320 | di = actual - (i * s) |
|
1343 | di = actual - (i * s) | |
1321 | if self._inline: |
|
1344 | if self._inline: | |
1322 | databytes = 0 |
|
1345 | databytes = 0 | |
1323 |
for r in |
|
1346 | for r in self: | |
1324 | databytes += max(0, self.length(r)) |
|
1347 | databytes += max(0, self.length(r)) | |
1325 | dd = 0 |
|
1348 | dd = 0 | |
1326 |
di = actual - self |
|
1349 | di = actual - len(self) * s - databytes | |
1327 | except IOError, inst: |
|
1350 | except IOError, inst: | |
1328 | if inst.errno != errno.ENOENT: |
|
1351 | if inst.errno != errno.ENOENT: | |
1329 | raise |
|
1352 | raise |
@@ -9,7 +9,7 | |||||
9 |
|
9 | |||
10 | from i18n import _ |
|
10 | from i18n import _ | |
11 | import changelog, httprangereader |
|
11 | import changelog, httprangereader | |
12 | import repo, localrepo, manifest, util |
|
12 | import repo, localrepo, manifest, util, store | |
13 | import urllib, urllib2, errno |
|
13 | import urllib, urllib2, errno | |
14 |
|
14 | |||
15 | class rangereader(httprangereader.httprangereader): |
|
15 | class rangereader(httprangereader.httprangereader): | |
@@ -55,14 +55,13 class statichttprepository(localrepo.loc | |||||
55 |
|
55 | |||
56 | # setup store |
|
56 | # setup store | |
57 | if "store" in requirements: |
|
57 | if "store" in requirements: | |
58 | self.encodefn = util.encodefilename |
|
|||
59 | self.decodefn = util.decodefilename |
|
|||
60 | self.spath = self.path + "/store" |
|
58 | self.spath = self.path + "/store" | |
61 | else: |
|
59 | else: | |
62 | self.encodefn = lambda x: x |
|
|||
63 | self.decodefn = lambda x: x |
|
|||
64 | self.spath = self.path |
|
60 | self.spath = self.path | |
65 | self.sopener = util.encodedopener(opener(self.spath), self.encodefn) |
|
61 | self.encodefn = store.encodefn(requirements) | |
|
62 | so = opener(self.spath) | |||
|
63 | self.sopener = lambda path, *args, **kw: so( | |||
|
64 | self.encodefn(path), *args, **kw) | |||
66 |
|
65 | |||
67 | self.manifest = manifest.manifest(self.sopener) |
|
66 | self.manifest = manifest.manifest(self.sopener) | |
68 | self.changelog = changelog.changelog(self.sopener) |
|
67 | self.changelog = changelog.changelog(self.sopener) |
@@ -5,41 +5,12 | |||||
5 | # This software may be used and distributed according to the terms |
|
5 | # This software may be used and distributed according to the terms | |
6 | # of the GNU General Public License, incorporated herein by reference. |
|
6 | # of the GNU General Public License, incorporated herein by reference. | |
7 |
|
7 | |||
8 |
import |
|
8 | import util, lock | |
9 |
|
9 | |||
10 | # if server supports streaming clone, it advertises "stream" |
|
10 | # if server supports streaming clone, it advertises "stream" | |
11 | # capability with value that is version+flags of repo it is serving. |
|
11 | # capability with value that is version+flags of repo it is serving. | |
12 | # client only streams if it can read that repo format. |
|
12 | # client only streams if it can read that repo format. | |
13 |
|
13 | |||
14 | def walkrepo(root): |
|
|||
15 | '''iterate over metadata files in repository. |
|
|||
16 | walk in natural (sorted) order. |
|
|||
17 | yields 2-tuples: name of .d or .i file, size of file.''' |
|
|||
18 |
|
||||
19 | strip_count = len(root) + len(os.sep) |
|
|||
20 | def walk(path, recurse): |
|
|||
21 | for e, kind, st in osutil.listdir(path, stat=True): |
|
|||
22 | pe = os.path.join(path, e) |
|
|||
23 | if kind == stat.S_IFDIR: |
|
|||
24 | if recurse: |
|
|||
25 | for x in walk(pe, True): |
|
|||
26 | yield x |
|
|||
27 | else: |
|
|||
28 | if kind != stat.S_IFREG or len(e) < 2: |
|
|||
29 | continue |
|
|||
30 | sfx = e[-2:] |
|
|||
31 | if sfx in ('.d', '.i'): |
|
|||
32 | yield pe[strip_count:], st.st_size |
|
|||
33 | # write file data first |
|
|||
34 | for x in walk(os.path.join(root, 'data'), True): |
|
|||
35 | yield x |
|
|||
36 | # write manifest before changelog |
|
|||
37 | meta = list(walk(root, False)) |
|
|||
38 | meta.sort() |
|
|||
39 | meta.reverse() |
|
|||
40 | for x in meta: |
|
|||
41 | yield x |
|
|||
42 |
|
||||
43 | # stream file format is simple. |
|
14 | # stream file format is simple. | |
44 | # |
|
15 | # | |
45 | # server writes out line that says how many files, how many total |
|
16 | # server writes out line that says how many files, how many total | |
@@ -60,28 +31,14 def stream_out(repo, fileobj, untrusted= | |||||
60 | fileobj.write('1\n') |
|
31 | fileobj.write('1\n') | |
61 | return |
|
32 | return | |
62 |
|
33 | |||
63 | # get consistent snapshot of repo. lock during scan so lock not |
|
|||
64 | # needed while we stream, and commits can happen. |
|
|||
65 | repolock = None |
|
|||
66 | try: |
|
34 | try: | |
67 | try: |
|
35 | entries, total_bytes = repo.storefiles() | |
68 | repolock = repo.lock() |
|
|||
69 |
|
|
36 | except (lock.LockHeld, lock.LockUnavailable), inst: | |
70 |
|
|
37 | repo.ui.warn('locking the repository failed: %s\n' % (inst,)) | |
71 |
|
|
38 | fileobj.write('2\n') | |
72 |
|
|
39 | return | |
73 |
|
40 | |||
74 |
|
|
41 | fileobj.write('0\n') | |
75 | repo.ui.debug('scanning\n') |
|
|||
76 | entries = [] |
|
|||
77 | total_bytes = 0 |
|
|||
78 | for name, size in walkrepo(repo.spath): |
|
|||
79 | name = repo.decodefn(util.pconvert(name)) |
|
|||
80 | entries.append((name, size)) |
|
|||
81 | total_bytes += size |
|
|||
82 | finally: |
|
|||
83 | del repolock |
|
|||
84 |
|
||||
85 | repo.ui.debug('%d files, %d bytes to transfer\n' % |
|
42 | repo.ui.debug('%d files, %d bytes to transfer\n' % | |
86 | (len(entries), total_bytes)) |
|
43 | (len(entries), total_bytes)) | |
87 | fileobj.write('%d %d\n' % (len(entries), total_bytes)) |
|
44 | fileobj.write('%d %d\n' % (len(entries), total_bytes)) |
@@ -122,6 +122,36 def xmlescape(text): | |||||
122 | .replace("'", ''')) # ' invalid in HTML |
|
122 | .replace("'", ''')) # ' invalid in HTML | |
123 | return re.sub('[\x00-\x08\x0B\x0C\x0E-\x1F]', ' ', text) |
|
123 | return re.sub('[\x00-\x08\x0B\x0C\x0E-\x1F]', ' ', text) | |
124 |
|
124 | |||
|
125 | _escapes = [ | |||
|
126 | ('\\', '\\\\'), ('"', '\\"'), ('\t', '\\t'), ('\n', '\\n'), | |||
|
127 | ('\r', '\\r'), ('\f', '\\f'), ('\b', '\\b'), | |||
|
128 | ] | |||
|
129 | ||||
|
130 | def json(obj): | |||
|
131 | if obj is None or obj is False or obj is True: | |||
|
132 | return {None: 'null', False: 'false', True: 'true'}[obj] | |||
|
133 | elif isinstance(obj, int) or isinstance(obj, float): | |||
|
134 | return str(obj) | |||
|
135 | elif isinstance(obj, str): | |||
|
136 | for k, v in _escapes: | |||
|
137 | obj = obj.replace(k, v) | |||
|
138 | return '"%s"' % obj | |||
|
139 | elif isinstance(obj, unicode): | |||
|
140 | return json(obj.encode('utf-8')) | |||
|
141 | elif hasattr(obj, 'keys'): | |||
|
142 | out = [] | |||
|
143 | for k, v in obj.iteritems(): | |||
|
144 | s = '%s: %s' % (json(k), json(v)) | |||
|
145 | out.append(s) | |||
|
146 | return '{' + ', '.join(out) + '}' | |||
|
147 | elif hasattr(obj, '__iter__'): | |||
|
148 | out = [] | |||
|
149 | for i in obj: | |||
|
150 | out.append(json(i)) | |||
|
151 | return '[' + ', '.join(out) + ']' | |||
|
152 | else: | |||
|
153 | raise TypeError('cannot encode type %s' % obj.__class__.__name__) | |||
|
154 | ||||
125 | filters = { |
|
155 | filters = { | |
126 | "addbreaks": nl2br, |
|
156 | "addbreaks": nl2br, | |
127 | "basename": os.path.basename, |
|
157 | "basename": os.path.basename, | |
@@ -150,5 +180,5 filters = { | |||||
150 | "user": lambda x: util.shortuser(x), |
|
180 | "user": lambda x: util.shortuser(x), | |
151 | "stringescape": lambda x: x.encode('string_escape'), |
|
181 | "stringescape": lambda x: x.encode('string_escape'), | |
152 | "xmlescape": xmlescape, |
|
182 | "xmlescape": xmlescape, | |
|
183 | "json": json, | |||
153 | } |
|
184 | } | |
154 |
|
@@ -81,18 +81,18 class templater(object): | |||||
81 | def __contains__(self, key): |
|
81 | def __contains__(self, key): | |
82 | return key in self.cache or key in self.map |
|
82 | return key in self.cache or key in self.map | |
83 |
|
83 | |||
84 |
def _ |
|
84 | def _template(self, t): | |
85 | '''perform expansion. |
|
85 | '''Get the template for the given template name. Use a local cache.''' | |
86 | t is name of map element to expand. |
|
|||
87 | map is added elements to use during expansion.''' |
|
|||
88 | if not t in self.cache: |
|
86 | if not t in self.cache: | |
89 | try: |
|
87 | try: | |
90 | self.cache[t] = file(self.map[t]).read() |
|
88 | self.cache[t] = file(self.map[t]).read() | |
91 | except IOError, inst: |
|
89 | except IOError, inst: | |
92 | raise IOError(inst.args[0], _('template file %s: %s') % |
|
90 | raise IOError(inst.args[0], _('template file %s: %s') % | |
93 | (self.map[t], inst.args[1])) |
|
91 | (self.map[t], inst.args[1])) | |
94 |
|
|
92 | return self.cache[t] | |
95 |
|
93 | |||
|
94 | def _process(self, tmpl, map): | |||
|
95 | '''Render a template. Returns a generator.''' | |||
96 | while tmpl: |
|
96 | while tmpl: | |
97 | m = self.template_re.search(tmpl) |
|
97 | m = self.template_re.search(tmpl) | |
98 | if not m: |
|
98 | if not m: | |
@@ -114,18 +114,39 class templater(object): | |||||
114 | v = v(**map) |
|
114 | v = v(**map) | |
115 | if format: |
|
115 | if format: | |
116 | if not hasattr(v, '__iter__'): |
|
116 | if not hasattr(v, '__iter__'): | |
117 | raise SyntaxError(_("Error expanding '%s%s'") |
|
117 | raise SyntaxError(_("Error expanding '%s%%%s'") | |
118 | % (key, format)) |
|
118 | % (key, format)) | |
119 | lm = map.copy() |
|
119 | lm = map.copy() | |
120 | for i in v: |
|
120 | for i in v: | |
121 | lm.update(i) |
|
121 | lm.update(i) | |
122 |
|
|
122 | t = self._template(format) | |
|
123 | yield self._process(t, lm) | |||
123 | else: |
|
124 | else: | |
124 | if fl: |
|
125 | if fl: | |
125 | for f in fl.split("|")[1:]: |
|
126 | for f in fl.split("|")[1:]: | |
126 | v = self.filters[f](v) |
|
127 | v = self.filters[f](v) | |
127 | yield v |
|
128 | yield v | |
128 |
|
129 | |||
|
130 | def __call__(self, t, **map): | |||
|
131 | '''Perform expansion. t is name of map element to expand. map contains | |||
|
132 | added elements for use during expansion. Is a generator.''' | |||
|
133 | tmpl = self._template(t) | |||
|
134 | iters = [self._process(tmpl, map)] | |||
|
135 | while iters: | |||
|
136 | try: | |||
|
137 | item = iters[0].next() | |||
|
138 | except StopIteration: | |||
|
139 | iters.pop(0) | |||
|
140 | continue | |||
|
141 | if isinstance(item, str): | |||
|
142 | yield item | |||
|
143 | elif item is None: | |||
|
144 | yield '' | |||
|
145 | elif hasattr(item, '__iter__'): | |||
|
146 | iters.insert(0, iter(item)) | |||
|
147 | else: | |||
|
148 | yield str(item) | |||
|
149 | ||||
129 | def templatepath(name=None): |
|
150 | def templatepath(name=None): | |
130 | '''return location of template file or directory (if no name). |
|
151 | '''return location of template file or directory (if no name). | |
131 | returns None if not found.''' |
|
152 | returns None if not found.''' |
@@ -96,9 +96,13 def rollback(opener, file): | |||||
96 | files = {} |
|
96 | files = {} | |
97 | for l in open(file).readlines(): |
|
97 | for l in open(file).readlines(): | |
98 | f, o = l.split('\0') |
|
98 | f, o = l.split('\0') | |
99 | files[f] = o |
|
99 | files[f] = int(o) | |
100 | for f in files: |
|
100 | for f in files: | |
101 | o = files[f] |
|
101 | o = files[f] | |
|
102 | if o: | |||
102 | opener(f, "a").truncate(int(o)) |
|
103 | opener(f, "a").truncate(int(o)) | |
|
104 | else: | |||
|
105 | fn = opener(f).name | |||
|
106 | os.unlink(fn) | |||
103 | os.unlink(file) |
|
107 | os.unlink(file) | |
104 |
|
108 |
@@ -312,15 +312,11 class ui(object): | |||||
312 | items = self._configitems(section, untrusted=untrusted, abort=True) |
|
312 | items = self._configitems(section, untrusted=untrusted, abort=True) | |
313 | if self.debugflag and not untrusted and self.ucdata: |
|
313 | if self.debugflag and not untrusted and self.ucdata: | |
314 | uitems = self._configitems(section, untrusted=True, abort=False) |
|
314 | uitems = self._configitems(section, untrusted=True, abort=False) | |
315 | keys = uitems.keys() |
|
315 | for k in util.sort(uitems): | |
316 | keys.sort() |
|
|||
317 | for k in keys: |
|
|||
318 | if uitems[k] != items.get(k): |
|
316 | if uitems[k] != items.get(k): | |
319 | self.warn(_("Ignoring untrusted configuration option " |
|
317 | self.warn(_("Ignoring untrusted configuration option " | |
320 | "%s.%s = %s\n") % (section, k, uitems[k])) |
|
318 | "%s.%s = %s\n") % (section, k, uitems[k])) | |
321 |
|
|
319 | return util.sort(items.items()) | |
322 | x.sort() |
|
|||
323 | return x |
|
|||
324 |
|
320 | |||
325 | def walkconfig(self, untrusted=False): |
|
321 | def walkconfig(self, untrusted=False): | |
326 | cdata = self._get_cdata(untrusted) |
|
322 | cdata = self._get_cdata(untrusted) | |
@@ -335,14 +331,16 class ui(object): | |||||
335 |
|
331 | |||
336 | Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL |
|
332 | Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL | |
337 | and stop searching if one of these is set. |
|
333 | and stop searching if one of these is set. | |
338 | If not found, use ($LOGNAME or $USER or $LNAME or |
|
334 | If not found and ui.askusername is True, ask the user, else use | |
339 | $USERNAME) +"@full.hostname". |
|
335 | ($LOGNAME or $USER or $LNAME or $USERNAME) + "@full.hostname". | |
340 | """ |
|
336 | """ | |
341 | user = os.environ.get("HGUSER") |
|
337 | user = os.environ.get("HGUSER") | |
342 | if user is None: |
|
338 | if user is None: | |
343 | user = self.config("ui", "username") |
|
339 | user = self.config("ui", "username") | |
344 | if user is None: |
|
340 | if user is None: | |
345 | user = os.environ.get("EMAIL") |
|
341 | user = os.environ.get("EMAIL") | |
|
342 | if user is None and self.configbool("ui", "askusername"): | |||
|
343 | user = self.prompt(_("Enter a commit username:"), default=None) | |||
346 | if user is None: |
|
344 | if user is None: | |
347 | try: |
|
345 | try: | |
348 | user = '%s@%s' % (util.getuser(), socket.getfqdn()) |
|
346 | user = '%s@%s' % (util.getuser(), socket.getfqdn()) |
@@ -15,7 +15,9 platform-specific details from the core. | |||||
15 | from i18n import _ |
|
15 | from i18n import _ | |
16 | import cStringIO, errno, getpass, re, shutil, sys, tempfile |
|
16 | import cStringIO, errno, getpass, re, shutil, sys, tempfile | |
17 | import os, stat, threading, time, calendar, ConfigParser, locale, glob, osutil |
|
17 | import os, stat, threading, time, calendar, ConfigParser, locale, glob, osutil | |
18 | import urlparse |
|
18 | import imp, urlparse | |
|
19 | ||||
|
20 | # Python compatibility | |||
19 |
|
21 | |||
20 | try: |
|
22 | try: | |
21 | set = set |
|
23 | set = set | |
@@ -23,6 +25,30 try: | |||||
23 | except NameError: |
|
25 | except NameError: | |
24 | from sets import Set as set, ImmutableSet as frozenset |
|
26 | from sets import Set as set, ImmutableSet as frozenset | |
25 |
|
27 | |||
|
28 | _md5 = None | |||
|
29 | def md5(s): | |||
|
30 | global _md5 | |||
|
31 | if _md5 is None: | |||
|
32 | try: | |||
|
33 | import hashlib | |||
|
34 | _md5 = hashlib.md5 | |||
|
35 | except ImportError: | |||
|
36 | import md5 | |||
|
37 | _md5 = md5.md5 | |||
|
38 | return _md5(s) | |||
|
39 | ||||
|
40 | _sha1 = None | |||
|
41 | def sha1(s): | |||
|
42 | global _sha1 | |||
|
43 | if _sha1 is None: | |||
|
44 | try: | |||
|
45 | import hashlib | |||
|
46 | _sha1 = hashlib.sha1 | |||
|
47 | except ImportError: | |||
|
48 | import sha | |||
|
49 | _sha1 = sha.sha | |||
|
50 | return _sha1(s) | |||
|
51 | ||||
26 | try: |
|
52 | try: | |
27 | _encoding = os.environ.get("HGENCODING") |
|
53 | _encoding = os.environ.get("HGENCODING") | |
28 | if sys.platform == 'darwin' and not _encoding: |
|
54 | if sys.platform == 'darwin' and not _encoding: | |
@@ -217,8 +243,8 def filter(s, cmd): | |||||
217 | return pipefilter(s, cmd) |
|
243 | return pipefilter(s, cmd) | |
218 |
|
244 | |||
219 | def binary(s): |
|
245 | def binary(s): | |
220 |
"""return true if a string is binary data |
|
246 | """return true if a string is binary data""" | |
221 |
if s and '\0' in s |
|
247 | if s and '\0' in s: | |
222 | return True |
|
248 | return True | |
223 | return False |
|
249 | return False | |
224 |
|
250 | |||
@@ -226,6 +252,12 def unique(g): | |||||
226 | """return the uniq elements of iterable g""" |
|
252 | """return the uniq elements of iterable g""" | |
227 | return dict.fromkeys(g).keys() |
|
253 | return dict.fromkeys(g).keys() | |
228 |
|
254 | |||
|
255 | def sort(l): | |||
|
256 | if not isinstance(l, list): | |||
|
257 | l = list(l) | |||
|
258 | l.sort() | |||
|
259 | return l | |||
|
260 | ||||
229 | class Abort(Exception): |
|
261 | class Abort(Exception): | |
230 | """Raised if a command needs to print an error and exit.""" |
|
262 | """Raised if a command needs to print an error and exit.""" | |
231 |
|
263 | |||
@@ -251,12 +283,12 def expand_glob(pats): | |||||
251 | ret.append(p) |
|
283 | ret.append(p) | |
252 | return ret |
|
284 | return ret | |
253 |
|
285 | |||
254 |
def patkind(name, d |
|
286 | def patkind(name, default): | |
255 | """Split a string into an optional pattern kind prefix and the |
|
287 | """Split a string into an optional pattern kind prefix and the | |
256 | actual pattern.""" |
|
288 | actual pattern.""" | |
257 | for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre': |
|
289 | for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre': | |
258 | if name.startswith(prefix + ':'): return name.split(':', 1) |
|
290 | if name.startswith(prefix + ':'): return name.split(':', 1) | |
259 |
return dflt |
|
291 | return default, name | |
260 |
|
292 | |||
261 | def globre(pat, head='^', tail='$'): |
|
293 | def globre(pat, head='^', tail='$'): | |
262 | "convert a glob pattern into a regexp" |
|
294 | "convert a glob pattern into a regexp" | |
@@ -386,17 +418,7 def canonpath(root, cwd, myname): | |||||
386 |
|
418 | |||
387 | raise Abort('%s not under root' % myname) |
|
419 | raise Abort('%s not under root' % myname) | |
388 |
|
420 | |||
389 | def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None): |
|
421 | def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, dflt_pat='glob'): | |
390 | return _matcher(canonroot, cwd, names, inc, exc, 'glob', src) |
|
|||
391 |
|
||||
392 | def cmdmatcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, |
|
|||
393 | globbed=False, default=None): |
|
|||
394 | default = default or 'relpath' |
|
|||
395 | if default == 'relpath' and not globbed: |
|
|||
396 | names = expand_glob(names) |
|
|||
397 | return _matcher(canonroot, cwd, names, inc, exc, default, src) |
|
|||
398 |
|
||||
399 | def _matcher(canonroot, cwd, names, inc, exc, dflt_pat, src): |
|
|||
400 | """build a function to match a set of file patterns |
|
422 | """build a function to match a set of file patterns | |
401 |
|
423 | |||
402 | arguments: |
|
424 | arguments: | |
@@ -537,13 +559,29 def _matcher(canonroot, cwd, names, inc, | |||||
537 |
|
559 | |||
538 | _hgexecutable = None |
|
560 | _hgexecutable = None | |
539 |
|
561 | |||
|
562 | def main_is_frozen(): | |||
|
563 | """return True if we are a frozen executable. | |||
|
564 | ||||
|
565 | The code supports py2exe (most common, Windows only) and tools/freeze | |||
|
566 | (portable, not much used). | |||
|
567 | """ | |||
|
568 | return (hasattr(sys, "frozen") or # new py2exe | |||
|
569 | hasattr(sys, "importers") or # old py2exe | |||
|
570 | imp.is_frozen("__main__")) # tools/freeze | |||
|
571 | ||||
540 | def hgexecutable(): |
|
572 | def hgexecutable(): | |
541 | """return location of the 'hg' executable. |
|
573 | """return location of the 'hg' executable. | |
542 |
|
574 | |||
543 | Defaults to $HG or 'hg' in the search path. |
|
575 | Defaults to $HG or 'hg' in the search path. | |
544 | """ |
|
576 | """ | |
545 | if _hgexecutable is None: |
|
577 | if _hgexecutable is None: | |
546 | set_hgexecutable(os.environ.get('HG') or find_exe('hg', 'hg')) |
|
578 | hg = os.environ.get('HG') | |
|
579 | if hg: | |||
|
580 | set_hgexecutable(hg) | |||
|
581 | elif main_is_frozen(): | |||
|
582 | set_hgexecutable(sys.executable) | |||
|
583 | else: | |||
|
584 | set_hgexecutable(find_exe('hg', 'hg')) | |||
547 | return _hgexecutable |
|
585 | return _hgexecutable | |
548 |
|
586 | |||
549 | def set_hgexecutable(path): |
|
587 | def set_hgexecutable(path): | |
@@ -807,7 +845,7 def groupname(gid=None): | |||||
807 |
|
845 | |||
808 | # File system features |
|
846 | # File system features | |
809 |
|
847 | |||
810 |
def check |
|
848 | def checkcase(path): | |
811 | """ |
|
849 | """ | |
812 | Check whether the given path is on a case-sensitive filesystem |
|
850 | Check whether the given path is on a case-sensitive filesystem | |
813 |
|
851 | |||
@@ -827,6 +865,53 def checkfolding(path): | |||||
827 | except: |
|
865 | except: | |
828 | return True |
|
866 | return True | |
829 |
|
867 | |||
|
868 | _fspathcache = {} | |||
|
869 | def fspath(name, root): | |||
|
870 | '''Get name in the case stored in the filesystem | |||
|
871 | ||||
|
872 | The name is either relative to root, or it is an absolute path starting | |||
|
873 | with root. Note that this function is unnecessary, and should not be | |||
|
874 | called, for case-sensitive filesystems (simply because it's expensive). | |||
|
875 | ''' | |||
|
876 | # If name is absolute, make it relative | |||
|
877 | if name.lower().startswith(root.lower()): | |||
|
878 | l = len(root) | |||
|
879 | if name[l] == os.sep or name[l] == os.altsep: | |||
|
880 | l = l + 1 | |||
|
881 | name = name[l:] | |||
|
882 | ||||
|
883 | if not os.path.exists(os.path.join(root, name)): | |||
|
884 | return None | |||
|
885 | ||||
|
886 | seps = os.sep | |||
|
887 | if os.altsep: | |||
|
888 | seps = seps + os.altsep | |||
|
889 | # Protect backslashes. This gets silly very quickly. | |||
|
890 | seps.replace('\\','\\\\') | |||
|
891 | pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps)) | |||
|
892 | dir = os.path.normcase(os.path.normpath(root)) | |||
|
893 | result = [] | |||
|
894 | for part, sep in pattern.findall(name): | |||
|
895 | if sep: | |||
|
896 | result.append(sep) | |||
|
897 | continue | |||
|
898 | ||||
|
899 | if dir not in _fspathcache: | |||
|
900 | _fspathcache[dir] = os.listdir(dir) | |||
|
901 | contents = _fspathcache[dir] | |||
|
902 | ||||
|
903 | lpart = part.lower() | |||
|
904 | for n in contents: | |||
|
905 | if n.lower() == lpart: | |||
|
906 | result.append(n) | |||
|
907 | break | |||
|
908 | else: | |||
|
909 | # Cannot happen, as the file exists! | |||
|
910 | result.append(part) | |||
|
911 | dir = os.path.join(dir, lpart) | |||
|
912 | ||||
|
913 | return ''.join(result) | |||
|
914 | ||||
830 | def checkexec(path): |
|
915 | def checkexec(path): | |
831 | """ |
|
916 | """ | |
832 | Check whether the given path is on a filesystem with UNIX-like exec flags |
|
917 | Check whether the given path is on a filesystem with UNIX-like exec flags | |
@@ -854,12 +939,6 def checkexec(path): | |||||
854 | return False |
|
939 | return False | |
855 | return not (new_file_has_exec or exec_flags_cannot_flip) |
|
940 | return not (new_file_has_exec or exec_flags_cannot_flip) | |
856 |
|
941 | |||
857 | def execfunc(path, fallback): |
|
|||
858 | '''return an is_exec() function with default to fallback''' |
|
|||
859 | if checkexec(path): |
|
|||
860 | return lambda x: is_exec(os.path.join(path, x)) |
|
|||
861 | return fallback |
|
|||
862 |
|
||||
863 | def checklink(path): |
|
942 | def checklink(path): | |
864 | """check whether the given path is on a symlink-capable filesystem""" |
|
943 | """check whether the given path is on a symlink-capable filesystem""" | |
865 | # mktemp is not racy because symlink creation will fail if the |
|
944 | # mktemp is not racy because symlink creation will fail if the | |
@@ -872,12 +951,6 def checklink(path): | |||||
872 | except (OSError, AttributeError): |
|
951 | except (OSError, AttributeError): | |
873 | return False |
|
952 | return False | |
874 |
|
953 | |||
875 | def linkfunc(path, fallback): |
|
|||
876 | '''return an is_link() function with default to fallback''' |
|
|||
877 | if checklink(path): |
|
|||
878 | return lambda x: os.path.islink(os.path.join(path, x)) |
|
|||
879 | return fallback |
|
|||
880 |
|
||||
881 | _umask = os.umask(0) |
|
954 | _umask = os.umask(0) | |
882 | os.umask(_umask) |
|
955 | os.umask(_umask) | |
883 |
|
956 | |||
@@ -1044,12 +1117,12 if os.name == 'nt': | |||||
1044 | # through the current COMSPEC. cmd.exe suppress enclosing quotes. |
|
1117 | # through the current COMSPEC. cmd.exe suppress enclosing quotes. | |
1045 | return '"' + cmd + '"' |
|
1118 | return '"' + cmd + '"' | |
1046 |
|
1119 | |||
1047 | def popen(command): |
|
1120 | def popen(command, mode='r'): | |
1048 | # Work around "popen spawned process may not write to stdout |
|
1121 | # Work around "popen spawned process may not write to stdout | |
1049 | # under windows" |
|
1122 | # under windows" | |
1050 | # http://bugs.python.org/issue1366 |
|
1123 | # http://bugs.python.org/issue1366 | |
1051 | command += " 2> %s" % nulldev |
|
1124 | command += " 2> %s" % nulldev | |
1052 | return os.popen(quotecommand(command)) |
|
1125 | return os.popen(quotecommand(command), mode) | |
1053 |
|
1126 | |||
1054 | def explain_exit(code): |
|
1127 | def explain_exit(code): | |
1055 | return _("exited with status %d") % code, code |
|
1128 | return _("exited with status %d") % code, code | |
@@ -1210,8 +1283,8 else: | |||||
1210 | def quotecommand(cmd): |
|
1283 | def quotecommand(cmd): | |
1211 | return cmd |
|
1284 | return cmd | |
1212 |
|
1285 | |||
1213 | def popen(command): |
|
1286 | def popen(command, mode='r'): | |
1214 | return os.popen(command) |
|
1287 | return os.popen(command, mode) | |
1215 |
|
1288 | |||
1216 | def testpid(pid): |
|
1289 | def testpid(pid): | |
1217 | '''return False if pid dead, True if running or not sure''' |
|
1290 | '''return False if pid dead, True if running or not sure''' | |
@@ -1272,39 +1345,6 def find_exe(name, default=None): | |||||
1272 | return name |
|
1345 | return name | |
1273 | return find_in_path(name, os.environ.get('PATH', ''), default=default) |
|
1346 | return find_in_path(name, os.environ.get('PATH', ''), default=default) | |
1274 |
|
1347 | |||
1275 | def _buildencodefun(): |
|
|||
1276 | e = '_' |
|
|||
1277 | win_reserved = [ord(x) for x in '\\:*?"<>|'] |
|
|||
1278 | cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ]) |
|
|||
1279 | for x in (range(32) + range(126, 256) + win_reserved): |
|
|||
1280 | cmap[chr(x)] = "~%02x" % x |
|
|||
1281 | for x in range(ord("A"), ord("Z")+1) + [ord(e)]: |
|
|||
1282 | cmap[chr(x)] = e + chr(x).lower() |
|
|||
1283 | dmap = {} |
|
|||
1284 | for k, v in cmap.iteritems(): |
|
|||
1285 | dmap[v] = k |
|
|||
1286 | def decode(s): |
|
|||
1287 | i = 0 |
|
|||
1288 | while i < len(s): |
|
|||
1289 | for l in xrange(1, 4): |
|
|||
1290 | try: |
|
|||
1291 | yield dmap[s[i:i+l]] |
|
|||
1292 | i += l |
|
|||
1293 | break |
|
|||
1294 | except KeyError: |
|
|||
1295 | pass |
|
|||
1296 | else: |
|
|||
1297 | raise KeyError |
|
|||
1298 | return (lambda s: "".join([cmap[c] for c in s]), |
|
|||
1299 | lambda s: "".join(list(decode(s)))) |
|
|||
1300 |
|
||||
1301 | encodefilename, decodefilename = _buildencodefun() |
|
|||
1302 |
|
||||
1303 | def encodedopener(openerfn, fn): |
|
|||
1304 | def o(path, *args, **kw): |
|
|||
1305 | return openerfn(fn(path), *args, **kw) |
|
|||
1306 | return o |
|
|||
1307 |
|
||||
1308 | def mktempcopy(name, emptyok=False, createmode=None): |
|
1348 | def mktempcopy(name, emptyok=False, createmode=None): | |
1309 | """Create a temporary file with the same contents from name |
|
1349 | """Create a temporary file with the same contents from name | |
1310 |
|
1350 |
@@ -7,7 +7,7 | |||||
7 |
|
7 | |||
8 | from node import nullid, short |
|
8 | from node import nullid, short | |
9 | from i18n import _ |
|
9 | from i18n import _ | |
10 | import revlog |
|
10 | import revlog, util | |
11 |
|
11 | |||
12 | def verify(repo): |
|
12 | def verify(repo): | |
13 | lock = repo.lock() |
|
13 | lock = repo.lock() | |
@@ -17,265 +17,201 def verify(repo): | |||||
17 | del lock |
|
17 | del lock | |
18 |
|
18 | |||
19 | def _verify(repo): |
|
19 | def _verify(repo): | |
|
20 | mflinkrevs = {} | |||
20 | filelinkrevs = {} |
|
21 | filelinkrevs = {} | |
21 | filenodes = {} |
|
22 | filenodes = {} | |
22 |
|
|
23 | revisions = 0 | |
23 | firstbad = [None] |
|
24 | badrevs = {} | |
24 | errors = [0] |
|
25 | errors = [0] | |
25 | warnings = [0] |
|
26 | warnings = [0] | |
26 | neededmanifests = {} |
|
27 | ui = repo.ui | |
|
28 | cl = repo.changelog | |||
|
29 | mf = repo.manifest | |||
27 |
|
30 | |||
28 | def err(linkrev, msg, filename=None): |
|
31 | def err(linkrev, msg, filename=None): | |
29 | if linkrev != None: |
|
32 | if linkrev != None: | |
30 |
|
|
33 | badrevs[linkrev] = True | |
31 | firstbad[0] = min(firstbad[0], linkrev) |
|
|||
32 |
|
|
34 | else: | |
33 |
|
|
35 | linkrev = '?' | |
34 | else: |
|
|||
35 | linkrev = "?" |
|
|||
36 | msg = "%s: %s" % (linkrev, msg) |
|
36 | msg = "%s: %s" % (linkrev, msg) | |
37 | if filename: |
|
37 | if filename: | |
38 | msg = "%s@%s" % (filename, msg) |
|
38 | msg = "%s@%s" % (filename, msg) | |
39 |
|
|
39 | ui.warn(" " + msg + "\n") | |
40 | errors[0] += 1 |
|
40 | errors[0] += 1 | |
41 |
|
41 | |||
|
42 | def exc(linkrev, msg, inst, filename=None): | |||
|
43 | if isinstance(inst, KeyboardInterrupt): | |||
|
44 | ui.warn(_("interrupted")) | |||
|
45 | raise | |||
|
46 | err(linkrev, "%s: %s" % (msg, inst), filename) | |||
|
47 | ||||
42 | def warn(msg): |
|
48 | def warn(msg): | |
43 |
|
|
49 | ui.warn(msg + "\n") | |
44 | warnings[0] += 1 |
|
50 | warnings[0] += 1 | |
45 |
|
51 | |||
46 |
def check |
|
52 | def checklog(obj, name): | |
|
53 | if not len(obj) and (havecl or havemf): | |||
|
54 | err(0, _("empty or missing %s") % name) | |||
|
55 | return | |||
|
56 | ||||
47 | d = obj.checksize() |
|
57 | d = obj.checksize() | |
48 | if d[0]: |
|
58 | if d[0]: | |
49 | err(None, _("data length off by %d bytes") % d[0], name) |
|
59 | err(None, _("data length off by %d bytes") % d[0], name) | |
50 | if d[1]: |
|
60 | if d[1]: | |
51 | err(None, _("index contains %d extra bytes") % d[1], name) |
|
61 | err(None, _("index contains %d extra bytes") % d[1], name) | |
52 |
|
62 | |||
53 | def checkversion(obj, name): |
|
|||
54 | if obj.version != revlog.REVLOGV0: |
|
63 | if obj.version != revlog.REVLOGV0: | |
55 | if not revlogv1: |
|
64 | if not revlogv1: | |
56 | warn(_("warning: `%s' uses revlog format 1") % name) |
|
65 | warn(_("warning: `%s' uses revlog format 1") % name) | |
57 | elif revlogv1: |
|
66 | elif revlogv1: | |
58 | warn(_("warning: `%s' uses revlog format 0") % name) |
|
67 | warn(_("warning: `%s' uses revlog format 0") % name) | |
59 |
|
68 | |||
60 | revlogv1 = repo.changelog.version != revlog.REVLOGV0 |
|
69 | def checkentry(obj, i, node, seen, linkrevs, f): | |
61 | if repo.ui.verbose or not revlogv1: |
|
70 | lr = obj.linkrev(node) | |
62 | repo.ui.status(_("repository uses revlog format %d\n") % |
|
71 | if lr < 0 or (havecl and lr not in linkrevs): | |
|
72 | t = "unexpected" | |||
|
73 | if lr < 0 or lr >= len(cl): | |||
|
74 | t = "nonexistent" | |||
|
75 | err(None, _("rev %d point to %s changeset %d") % (i, t, lr), f) | |||
|
76 | if linkrevs: | |||
|
77 | warn(_(" (expected %s)") % " ".join(map(str,linkrevs))) | |||
|
78 | lr = None # can't be trusted | |||
|
79 | ||||
|
80 | try: | |||
|
81 | p1, p2 = obj.parents(node) | |||
|
82 | if p1 not in seen and p1 != nullid: | |||
|
83 | err(lr, _("unknown parent 1 %s of %s") % | |||
|
84 | (short(p1), short(n)), f) | |||
|
85 | if p2 not in seen and p2 != nullid: | |||
|
86 | err(lr, _("unknown parent 2 %s of %s") % | |||
|
87 | (short(p2), short(p1)), f) | |||
|
88 | except Exception, inst: | |||
|
89 | exc(lr, _("checking parents of %s") % short(node), inst, f) | |||
|
90 | ||||
|
91 | if node in seen: | |||
|
92 | err(lr, _("duplicate revision %d (%d)") % (i, seen[n]), f) | |||
|
93 | seen[n] = i | |||
|
94 | return lr | |||
|
95 | ||||
|
96 | revlogv1 = cl.version != revlog.REVLOGV0 | |||
|
97 | if ui.verbose or not revlogv1: | |||
|
98 | ui.status(_("repository uses revlog format %d\n") % | |||
63 | (revlogv1 and 1 or 0)) |
|
99 | (revlogv1 and 1 or 0)) | |
64 |
|
100 | |||
65 |
havecl = |
|
101 | havecl = len(cl) > 0 | |
66 | seen = {} |
|
102 | havemf = len(mf) > 0 | |
67 | repo.ui.status(_("checking changesets\n")) |
|
|||
68 | if repo.changelog.count() == 0 and repo.manifest.count() > 1: |
|
|||
69 | havecl = 0 |
|
|||
70 | err(0, _("empty or missing 00changelog.i")) |
|
|||
71 | else: |
|
|||
72 | checksize(repo.changelog, "changelog") |
|
|||
73 |
|
103 | |||
74 | for i in xrange(repo.changelog.count()): |
|
104 | ui.status(_("checking changesets\n")) | |
75 | changesets += 1 |
|
105 | seen = {} | |
76 | n = repo.changelog.node(i) |
|
106 | checklog(cl, "changelog") | |
77 | l = repo.changelog.linkrev(n) |
|
107 | for i in repo: | |
78 |
|
|
108 | n = cl.node(i) | |
79 | err(i, _("incorrect link (%d) for changeset") %(l)) |
|
109 | checkentry(cl, i, n, seen, [i], "changelog") | |
80 | if n in seen: |
|
|||
81 | err(i, _("duplicates changeset at revision %d") % seen[n]) |
|
|||
82 | seen[n] = i |
|
|||
83 |
|
110 | |||
84 | for p in repo.changelog.parents(n): |
|
|||
85 | if p not in repo.changelog.nodemap: |
|
|||
86 | err(i, _("changeset has unknown parent %s") % short(p)) |
|
|||
87 | try: |
|
111 | try: | |
88 |
changes = |
|
112 | changes = cl.read(n) | |
89 | except KeyboardInterrupt: |
|
113 | mflinkrevs.setdefault(changes[0], []).append(i) | |
90 | repo.ui.warn(_("interrupted")) |
|
|||
91 | raise |
|
|||
92 | except Exception, inst: |
|
|||
93 | err(i, _("unpacking changeset: %s") % inst) |
|
|||
94 | continue |
|
|||
95 |
|
||||
96 | if changes[0] not in neededmanifests: |
|
|||
97 | neededmanifests[changes[0]] = i |
|
|||
98 |
|
||||
99 | for f in changes[3]: |
|
114 | for f in changes[3]: | |
100 | filelinkrevs.setdefault(f, []).append(i) |
|
115 | filelinkrevs.setdefault(f, []).append(i) | |
101 |
|
116 | except Exception, inst: | ||
102 | seen = {} |
|
117 | exc(i, _("unpacking changeset %s") % short(n), inst) | |
103 | repo.ui.status(_("checking manifests\n")) |
|
|||
104 | if repo.changelog.count() > 0 and repo.manifest.count() == 0: |
|
|||
105 | havemf = 0 |
|
|||
106 | err(0, _("empty or missing 00manifest.i")) |
|
|||
107 | else: |
|
|||
108 | checkversion(repo.manifest, "manifest") |
|
|||
109 | checksize(repo.manifest, "manifest") |
|
|||
110 |
|
||||
111 | for i in xrange(repo.manifest.count()): |
|
|||
112 | n = repo.manifest.node(i) |
|
|||
113 | l = repo.manifest.linkrev(n) |
|
|||
114 |
|
118 | |||
115 | if l < 0 or (havecl and l >= repo.changelog.count()): |
|
119 | ui.status(_("checking manifests\n")) | |
116 | err(None, _("bad link (%d) at manifest revision %d") % (l, i)) |
|
120 | seen = {} | |
117 |
|
121 | checklog(mf, "manifest") | ||
118 | if n in neededmanifests: |
|
122 | for i in mf: | |
119 | del neededmanifests[n] |
|
123 | n = mf.node(i) | |
120 |
|
124 | lr = checkentry(mf, i, n, seen, mflinkrevs.get(n, []), "manifest") | ||
121 |
if n in |
|
125 | if n in mflinkrevs: | |
122 | err(l, _("duplicates manifest from %d") % seen[n]) |
|
126 | del mflinkrevs[n] | |
123 |
|
||||
124 | seen[n] = l |
|
|||
125 |
|
||||
126 | for p in repo.manifest.parents(n): |
|
|||
127 | if p not in repo.manifest.nodemap: |
|
|||
128 | err(l, _("manifest has unknown parent %s") % short(p)) |
|
|||
129 |
|
127 | |||
130 | try: |
|
128 | try: | |
131 |
for f, fn in |
|
129 | for f, fn in mf.readdelta(n).iteritems(): | |
|
130 | if not f: | |||
|
131 | err(lr, _("file without name in manifest")) | |||
|
132 | elif f != "/dev/null": | |||
132 | fns = filenodes.setdefault(f, {}) |
|
133 | fns = filenodes.setdefault(f, {}) | |
133 | if fn not in fns: |
|
134 | if fn not in fns: | |
134 | fns[fn] = n |
|
135 | fns[fn] = n | |
135 | except KeyboardInterrupt: |
|
|||
136 | repo.ui.warn(_("interrupted")) |
|
|||
137 | raise |
|
|||
138 | except Exception, inst: |
|
136 | except Exception, inst: | |
139 |
e |
|
137 | exc(lr, _("reading manifest delta %s") % short(n), inst) | |
140 | continue |
|
|||
141 |
|
||||
142 | repo.ui.status(_("crosschecking files in changesets and manifests\n")) |
|
|||
143 |
|
138 | |||
144 | if havemf > 0: |
|
139 | ui.status(_("crosschecking files in changesets and manifests\n")) | |
145 | nm = [(c, m) for m, c in neededmanifests.items()] |
|
|||
146 | nm.sort() |
|
|||
147 | for c, m in nm: |
|
|||
148 | err(c, _("changeset refers to unknown manifest %s") % short(m)) |
|
|||
149 | del neededmanifests, nm |
|
|||
150 |
|
||||
151 | if havecl: |
|
|||
152 | fl = filenodes.keys() |
|
|||
153 | fl.sort() |
|
|||
154 | for f in fl: |
|
|||
155 | if f not in filelinkrevs: |
|
|||
156 | lrs = [repo.manifest.linkrev(n) for n in filenodes[f]] |
|
|||
157 | lrs.sort() |
|
|||
158 | err(lrs[0], _("in manifest but not in changeset"), f) |
|
|||
159 | del fl |
|
|||
160 |
|
140 | |||
161 | if havemf: |
|
141 | if havemf: | |
162 | fl = filelinkrevs.keys() |
|
142 | for c, m in util.sort([(c, m) for m in mflinkrevs for c in mflinkrevs[m]]): | |
163 | fl.sort() |
|
143 | err(c, _("changeset refers to unknown manifest %s") % short(m)) | |
164 | for f in fl: |
|
144 | del mflinkrevs | |
|
145 | ||||
|
146 | for f in util.sort(filelinkrevs): | |||
165 | if f not in filenodes: |
|
147 | if f not in filenodes: | |
166 | lr = filelinkrevs[f][0] |
|
148 | lr = filelinkrevs[f][0] | |
167 | err(lr, _("in changeset but not in manifest"), f) |
|
149 | err(lr, _("in changeset but not in manifest"), f) | |
168 | del fl |
|
|||
169 |
|
150 | |||
170 | repo.ui.status(_("checking files\n")) |
|
151 | if havecl: | |
171 | ff = dict.fromkeys(filenodes.keys() + filelinkrevs.keys()).keys() |
|
152 | for f in util.sort(filenodes): | |
172 | ff.sort() |
|
153 | if f not in filelinkrevs: | |
173 | for f in ff: |
|
154 | try: | |
174 | if f == "/dev/null": |
|
155 | lr = min([repo.file(f).linkrev(n) for n in filenodes[f]]) | |
175 |
|
|
156 | except: | |
176 | files += 1 |
|
157 | lr = None | |
177 | if not f: |
|
158 | err(lr, _("in manifest but not in changeset"), f) | |
178 | lr = filelinkrevs[f][0] |
|
159 | ||
179 | err(lr, _("file without name in manifest")) |
|
160 | ui.status(_("checking files\n")) | |
180 | continue |
|
161 | files = util.sort(util.unique(filenodes.keys() + filelinkrevs.keys())) | |
|
162 | for f in files: | |||
181 | fl = repo.file(f) |
|
163 | fl = repo.file(f) | |
182 |
check |
|
164 | checklog(fl, f) | |
183 | checksize(fl, f) |
|
|||
184 |
|
||||
185 | if fl.count() == 0: |
|
|||
186 | err(filelinkrevs[f][0], _("empty or missing revlog"), f) |
|
|||
187 | continue |
|
|||
188 |
|
||||
189 | seen = {} |
|
165 | seen = {} | |
190 | nodes = {nullid: 1} |
|
166 | for i in fl: | |
191 | for i in xrange(fl.count()): |
|
|||
192 | revisions += 1 |
|
167 | revisions += 1 | |
193 | n = fl.node(i) |
|
168 | n = fl.node(i) | |
194 | flr = fl.linkrev(n) |
|
169 | lr = checkentry(fl, i, n, seen, filelinkrevs.get(f, []), f) | |
195 |
|
||||
196 | if flr < 0 or (havecl and flr not in filelinkrevs.get(f, [])): |
|
|||
197 | if flr < 0 or flr >= repo.changelog.count(): |
|
|||
198 | err(None, _("rev %d point to nonexistent changeset %d") |
|
|||
199 | % (i, flr), f) |
|
|||
200 | else: |
|
|||
201 | err(None, _("rev %d points to unexpected changeset %d") |
|
|||
202 | % (i, flr), f) |
|
|||
203 | if f in filelinkrevs: |
|
|||
204 | warn(_(" (expected %s)") % filelinkrevs[f][0]) |
|
|||
205 | flr = None # can't be trusted |
|
|||
206 | else: |
|
|||
207 | if havecl: |
|
|||
208 | filelinkrevs[f].remove(flr) |
|
|||
209 |
|
||||
210 | if n in seen: |
|
|||
211 | err(flr, _("duplicate revision %d") % i, f) |
|
|||
212 | if f in filenodes: |
|
170 | if f in filenodes: | |
213 | if havemf and n not in filenodes[f]: |
|
171 | if havemf and n not in filenodes[f]: | |
214 |
err( |
|
172 | err(lr, _("%s not in manifests") % (short(n)), f) | |
215 | else: |
|
173 | else: | |
216 | del filenodes[f][n] |
|
174 | del filenodes[f][n] | |
217 |
|
175 | |||
218 | # verify contents |
|
176 | # verify contents | |
219 | try: |
|
177 | try: | |
220 | t = fl.read(n) |
|
178 | t = fl.read(n) | |
221 | except KeyboardInterrupt: |
|
179 | rp = fl.renamed(n) | |
222 | repo.ui.warn(_("interrupted")) |
|
180 | if len(t) != fl.size(i): | |
223 | raise |
|
181 | if not fl._readmeta(n): # ancient copy? | |
|
182 | err(lr, _("unpacked size is %s, %s expected") % | |||
|
183 | (len(t), fl.size(i)), f) | |||
224 | except Exception, inst: |
|
184 | except Exception, inst: | |
225 |
e |
|
185 | exc(lr, _("unpacking %s") % short(n), inst, f) | |
226 |
|
||||
227 | # verify parents |
|
|||
228 | try: |
|
|||
229 | (p1, p2) = fl.parents(n) |
|
|||
230 | if p1 not in nodes: |
|
|||
231 | err(flr, _("unknown parent 1 %s of %s") % |
|
|||
232 | (short(p1), short(n)), f) |
|
|||
233 | if p2 not in nodes: |
|
|||
234 | err(flr, _("unknown parent 2 %s of %s") % |
|
|||
235 | (short(p2), short(p1)), f) |
|
|||
236 | except KeyboardInterrupt: |
|
|||
237 | repo.ui.warn(_("interrupted")) |
|
|||
238 | raise |
|
|||
239 | except Exception, inst: |
|
|||
240 | err(flr, _("checking parents of %s: %s") % (short(n), inst), f) |
|
|||
241 | nodes[n] = 1 |
|
|||
242 |
|
186 | |||
243 | # check renames |
|
187 | # check renames | |
244 | try: |
|
188 | try: | |
245 | rp = fl.renamed(n) |
|
|||
246 | if rp: |
|
189 | if rp: | |
247 | fl2 = repo.file(rp[0]) |
|
190 | fl2 = repo.file(rp[0]) | |
248 |
if fl2 |
|
191 | if not len(fl2): | |
249 |
err( |
|
192 | err(lr, _("empty or missing copy source revlog %s:%s") | |
250 | % (rp[0], short(rp[1])), f) |
|
193 | % (rp[0], short(rp[1])), f) | |
251 | elif rp[1] == nullid: |
|
194 | elif rp[1] == nullid: | |
252 |
err( |
|
195 | err(lr, _("copy source revision is nullid %s:%s") | |
253 | % (rp[0], short(rp[1])), f) |
|
196 | % (rp[0], short(rp[1])), f) | |
254 | else: |
|
197 | else: | |
255 | rev = fl2.rev(rp[1]) |
|
198 | rev = fl2.rev(rp[1]) | |
256 | except KeyboardInterrupt: |
|
|||
257 | repo.ui.warn(_("interrupted")) |
|
|||
258 | raise |
|
|||
259 | except Exception, inst: |
|
199 | except Exception, inst: | |
260 |
e |
|
200 | exc(lr, _("checking rename of %s") % short(n), inst, f) | |
261 | (short(n), inst), f) |
|
|||
262 |
|
201 | |||
263 | # cross-check |
|
202 | # cross-check | |
264 | if f in filenodes: |
|
203 | if f in filenodes: | |
265 |
fns = [( |
|
204 | fns = [(mf.linkrev(l), n) for n,l in filenodes[f].items()] | |
266 | for n in filenodes[f]] |
|
205 | for lr, node in util.sort(fns): | |
267 | fns.sort() |
|
|||
268 | for lr, node in fns: |
|
|||
269 | err(lr, _("%s in manifests not found") % short(node), f) |
|
206 | err(lr, _("%s in manifests not found") % short(node), f) | |
270 |
|
207 | |||
271 |
|
|
208 | ui.status(_("%d files, %d changesets, %d total revisions\n") % | |
272 |
(files, |
|
209 | (len(files), len(cl), revisions)) | |
273 |
|
||||
274 | if warnings[0]: |
|
210 | if warnings[0]: | |
275 |
|
|
211 | ui.warn(_("%d warnings encountered!\n") % warnings[0]) | |
276 | if errors[0]: |
|
212 | if errors[0]: | |
277 |
|
|
213 | ui.warn(_("%d integrity errors encountered!\n") % errors[0]) | |
278 |
if |
|
214 | if badrevs: | |
279 |
|
|
215 | ui.warn(_("(first damaged changeset appears to be %d)\n") | |
280 |
|
|
216 | % min(badrevs)) | |
281 | return 1 |
|
217 | return 1 |
@@ -19,6 +19,9 from distutils.ccompiler import new_comp | |||||
19 | import mercurial.version |
|
19 | import mercurial.version | |
20 |
|
20 | |||
21 | extra = {} |
|
21 | extra = {} | |
|
22 | scripts = ['hg'] | |||
|
23 | if os.name == 'nt': | |||
|
24 | scripts.append('contrib/win32/hg.bat') | |||
22 |
|
25 | |||
23 | # simplified version of distutils.ccompiler.CCompiler.has_function |
|
26 | # simplified version of distutils.ccompiler.CCompiler.has_function | |
24 | # that actually removes its temporary files. |
|
27 | # that actually removes its temporary files. | |
@@ -88,10 +91,11 mercurial.version.remember_version(versi | |||||
88 | cmdclass = {'install_data': install_package_data} |
|
91 | cmdclass = {'install_data': install_package_data} | |
89 |
|
92 | |||
90 | ext_modules=[ |
|
93 | ext_modules=[ | |
91 |
Extension('mercurial. |
|
94 | Extension('mercurial.base85', ['mercurial/base85.c']), | |
92 | Extension('mercurial.bdiff', ['mercurial/bdiff.c']), |
|
95 | Extension('mercurial.bdiff', ['mercurial/bdiff.c']), | |
93 |
Extension('mercurial. |
|
96 | Extension('mercurial.diffhelpers', ['mercurial/diffhelpers.c']), | |
94 |
Extension('mercurial. |
|
97 | Extension('mercurial.mpatch', ['mercurial/mpatch.c']), | |
|
98 | Extension('mercurial.parsers', ['mercurial/parsers.c']), | |||
95 | ] |
|
99 | ] | |
96 |
|
100 | |||
97 | packages = ['mercurial', 'mercurial.hgweb', 'hgext', 'hgext.convert'] |
|
101 | packages = ['mercurial', 'mercurial.hgweb', 'hgext', 'hgext.convert'] | |
@@ -118,7 +122,7 setup(name='mercurial', | |||||
118 | url='http://selenic.com/mercurial', |
|
122 | url='http://selenic.com/mercurial', | |
119 | description='Scalable distributed SCM', |
|
123 | description='Scalable distributed SCM', | |
120 | license='GNU GPL', |
|
124 | license='GNU GPL', | |
121 |
scripts= |
|
125 | scripts=scripts, | |
122 | packages=packages, |
|
126 | packages=packages, | |
123 | ext_modules=ext_modules, |
|
127 | ext_modules=ext_modules, | |
124 | data_files=[(os.path.join('mercurial', root), |
|
128 | data_files=[(os.path.join('mercurial', root), |
@@ -9,6 +9,7 | |||||
9 |
|
9 | |||
10 | <div class="buttons"> |
|
10 | <div class="buttons"> | |
11 | <a href="#url#shortlog/#rev#{sessionvars%urlparameter}">shortlog</a> |
|
11 | <a href="#url#shortlog/#rev#{sessionvars%urlparameter}">shortlog</a> | |
|
12 | <a href="#url#graph{sessionvars%urlparameter}">graph</a> | |||
12 | <a href="#url#tags{sessionvars%urlparameter}">tags</a> |
|
13 | <a href="#url#tags{sessionvars%urlparameter}">tags</a> | |
13 | <a href="#url#file/#node|short#{sessionvars%urlparameter}">files</a> |
|
14 | <a href="#url#file/#node|short#{sessionvars%urlparameter}">files</a> | |
14 | #archives%archiveentry# |
|
15 | #archives%archiveentry# |
@@ -6,6 +6,7 | |||||
6 | <div class="buttons"> |
|
6 | <div class="buttons"> | |
7 | <a href="#url#log/#rev#{sessionvars%urlparameter}">changelog</a> |
|
7 | <a href="#url#log/#rev#{sessionvars%urlparameter}">changelog</a> | |
8 | <a href="#url#shortlog/#rev#{sessionvars%urlparameter}">shortlog</a> |
|
8 | <a href="#url#shortlog/#rev#{sessionvars%urlparameter}">shortlog</a> | |
|
9 | <a href="#url#graph{sessionvars%urlparameter}">graph</a> | |||
9 | <a href="#url#tags{sessionvars%urlparameter}">tags</a> |
|
10 | <a href="#url#tags{sessionvars%urlparameter}">tags</a> | |
10 | <a href="#url#file/#node|short#{sessionvars%urlparameter}">files</a> |
|
11 | <a href="#url#file/#node|short#{sessionvars%urlparameter}">files</a> | |
11 | <a href="#url#raw-rev/#node|short#">raw</a> |
|
12 | <a href="#url#raw-rev/#node|short#">raw</a> |
@@ -6,6 +6,7 | |||||
6 | <div class="buttons"> |
|
6 | <div class="buttons"> | |
7 | <a href="#url#log/#rev#{sessionvars%urlparameter}">changelog</a> |
|
7 | <a href="#url#log/#rev#{sessionvars%urlparameter}">changelog</a> | |
8 | <a href="#url#shortlog/#rev#{sessionvars%urlparameter}">shortlog</a> |
|
8 | <a href="#url#shortlog/#rev#{sessionvars%urlparameter}">shortlog</a> | |
|
9 | <a href="#url#graph{sessionvars%urlparameter}">graph</a> | |||
9 | <a href="#url#tags{sessionvars%urlparameter}">tags</a> |
|
10 | <a href="#url#tags{sessionvars%urlparameter}">tags</a> | |
10 | <a href="#url#rev/#node|short#{sessionvars%urlparameter}">changeset</a> |
|
11 | <a href="#url#rev/#node|short#{sessionvars%urlparameter}">changeset</a> | |
11 | <a href="#url#file/#node|short##path|urlescape#{sessionvars%urlparameter}">files</a> |
|
12 | <a href="#url#file/#node|short##path|urlescape#{sessionvars%urlparameter}">files</a> |
@@ -6,6 +6,7 | |||||
6 | <div class="buttons"> |
|
6 | <div class="buttons"> | |
7 | <a href="#url#log/#rev#{sessionvars%urlparameter}">changelog</a> |
|
7 | <a href="#url#log/#rev#{sessionvars%urlparameter}">changelog</a> | |
8 | <a href="#url#shortlog/#rev#{sessionvars%urlparameter}">shortlog</a> |
|
8 | <a href="#url#shortlog/#rev#{sessionvars%urlparameter}">shortlog</a> | |
|
9 | <a href="#url#graph{sessionvars%urlparameter}">graph</a> | |||
9 | <a href="#url#tags{sessionvars%urlparameter}">tags</a> |
|
10 | <a href="#url#tags{sessionvars%urlparameter}">tags</a> | |
10 | <a href="#url#rev/#node|short#{sessionvars%urlparameter}">changeset</a> |
|
11 | <a href="#url#rev/#node|short#{sessionvars%urlparameter}">changeset</a> | |
11 | <a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">file</a> |
|
12 | <a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">file</a> |
@@ -11,6 +11,7 | |||||
11 | <div class="buttons"> |
|
11 | <div class="buttons"> | |
12 | <a href="#url#log{sessionvars%urlparameter}">changelog</a> |
|
12 | <a href="#url#log{sessionvars%urlparameter}">changelog</a> | |
13 | <a href="#url#shortlog{sessionvars%urlparameter}">shortlog</a> |
|
13 | <a href="#url#shortlog{sessionvars%urlparameter}">shortlog</a> | |
|
14 | <a href="#url#graph{sessionvars%urlparameter}">graph</a> | |||
14 | <a href="#url#tags{sessionvars%urlparameter}">tags</a> |
|
15 | <a href="#url#tags{sessionvars%urlparameter}">tags</a> | |
15 | <a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">file</a> |
|
16 | <a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">file</a> | |
16 | <a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}">annotate</a> |
|
17 | <a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}">annotate</a> |
@@ -6,6 +6,7 | |||||
6 | <div class="buttons"> |
|
6 | <div class="buttons"> | |
7 | <a href="#url#log/#rev#{sessionvars%urlparameter}">changelog</a> |
|
7 | <a href="#url#log/#rev#{sessionvars%urlparameter}">changelog</a> | |
8 | <a href="#url#shortlog/#rev#{sessionvars%urlparameter}">shortlog</a> |
|
8 | <a href="#url#shortlog/#rev#{sessionvars%urlparameter}">shortlog</a> | |
|
9 | <a href="#url#graph{sessionvars%urlparameter}">graph</a> | |||
9 | <a href="#url#tags{sessionvars%urlparameter}">tags</a> |
|
10 | <a href="#url#tags{sessionvars%urlparameter}">tags</a> | |
10 | <a href="#url#rev/#node|short#{sessionvars%urlparameter}">changeset</a> |
|
11 | <a href="#url#rev/#node|short#{sessionvars%urlparameter}">changeset</a> | |
11 | <a href="#url#file/#node|short##path|urlescape#{sessionvars%urlparameter}">files</a> |
|
12 | <a href="#url#file/#node|short##path|urlescape#{sessionvars%urlparameter}">files</a> |
@@ -19,7 +19,12 | |||||
19 | </form> |
|
19 | </form> | |
20 |
|
20 | |||
21 | <div class="page_nav"> |
|
21 | <div class="page_nav"> | |
22 | <a href="{url}summary{sessionvars%urlparameter}">summary</a> | <a href="{url}shortlog/#rev#{sessionvars%urlparameter}">shortlog</a> | changelog | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | <a href="{url}file/#node|short#{sessionvars%urlparameter}">files</a>#archives%archiveentry# |
|
22 | <a href="{url}summary{sessionvars%urlparameter}">summary</a> | | |
|
23 | <a href="{url}shortlog/#rev#{sessionvars%urlparameter}">shortlog</a> | | |||
|
24 | changelog | | |||
|
25 | <a href="{url}graph{sessionvars%urlparameter}">graph</a> | | |||
|
26 | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | | |||
|
27 | <a href="{url}file/#node|short#{sessionvars%urlparameter}">files</a>#archives%archiveentry# | |||
23 | <br/> |
|
28 | <br/> | |
24 | #changenav%naventry#<br/> |
|
29 | #changenav%naventry#<br/> | |
25 | </div> |
|
30 | </div> |
@@ -12,7 +12,7 | |||||
12 | </div> |
|
12 | </div> | |
13 |
|
13 | |||
14 | <div class="page_nav"> |
|
14 | <div class="page_nav"> | |
15 | <a href="{url}summary{sessionvars%urlparameter}">summary</a> | <a href="{url}shortlog/#rev#{sessionvars%urlparameter}">shortlog</a> | <a href="{url}log/#rev#{sessionvars%urlparameter}">changelog</a> | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | <a href="{url}file/#node|short#{sessionvars%urlparameter}">files</a> | changeset | <a href="{url}raw-rev/#node|short#">raw</a> #archives%archiveentry#<br/> |
|
15 | <a href="{url}summary{sessionvars%urlparameter}">summary</a> | <a href="{url}shortlog/#rev#{sessionvars%urlparameter}">shortlog</a> | <a href="{url}log/#rev#{sessionvars%urlparameter}">changelog</a> | <a href="{url}graph{sessionvars%urlparameter}">graph</a> | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | <a href="{url}file/#node|short#{sessionvars%urlparameter}">files</a> | changeset | <a href="{url}raw-rev/#node|short#">raw</a> #archives%archiveentry#<br/> | |
16 | </div> |
|
16 | </div> | |
17 |
|
17 | |||
18 | <div> |
|
18 | <div> |
@@ -15,6 +15,7 | |||||
15 | <a href="{url}summary{sessionvars%urlparameter}">summary</a> | |
|
15 | <a href="{url}summary{sessionvars%urlparameter}">summary</a> | | |
16 | <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | |
|
16 | <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | | |
17 | <a href="{url}log{sessionvars%urlparameter}">changelog</a> | |
|
17 | <a href="{url}log{sessionvars%urlparameter}">changelog</a> | | |
|
18 | <a href="{url}graph{sessionvars%urlparameter}">graph</a> | | |||
18 | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | |
|
19 | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | | |
19 | <a href="{url}file/#node|short##path|urlescape#{sessionvars%urlparameter}">files</a> | |
|
20 | <a href="{url}file/#node|short##path|urlescape#{sessionvars%urlparameter}">files</a> | | |
20 | <a href="{url}rev/#node|short#{sessionvars%urlparameter}">changeset</a> | |
|
21 | <a href="{url}rev/#node|short#{sessionvars%urlparameter}">changeset</a> | |
@@ -15,6 +15,7 | |||||
15 | <a href="{url}summary{sessionvars%urlparameter}">summary</a> | |
|
15 | <a href="{url}summary{sessionvars%urlparameter}">summary</a> | | |
16 | <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | |
|
16 | <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | | |
17 | <a href="{url}log{sessionvars%urlparameter}">changelog</a> | |
|
17 | <a href="{url}log{sessionvars%urlparameter}">changelog</a> | | |
|
18 | <a href="{url}graph{sessionvars%urlparameter}">graph</a> | | |||
18 | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | |
|
19 | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | | |
19 | <a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">files</a> | |
|
20 | <a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">files</a> | | |
20 | <a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> | |
|
21 | <a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> | |
@@ -15,6 +15,7 | |||||
15 | <a href="{url}summary{sessionvars%urlparameter}">summary</a> | |
|
15 | <a href="{url}summary{sessionvars%urlparameter}">summary</a> | | |
16 | <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | |
|
16 | <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | | |
17 | <a href="{url}log{sessionvars%urlparameter}">changelog</a> | |
|
17 | <a href="{url}log{sessionvars%urlparameter}">changelog</a> | | |
|
18 | <a href="{url}graph{sessionvars%urlparameter}">graph</a> | | |||
18 | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | |
|
19 | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | | |
19 | <a href="{url}file/{node|short}/#file|urlescape#{sessionvars%urlparameter}">file</a> | |
|
20 | <a href="{url}file/{node|short}/#file|urlescape#{sessionvars%urlparameter}">file</a> | | |
20 | revisions | |
|
21 | revisions | |
@@ -15,6 +15,7 | |||||
15 | <a href="{url}summary{sessionvars%urlparameter}">summary</a> | |
|
15 | <a href="{url}summary{sessionvars%urlparameter}">summary</a> | | |
16 | <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | |
|
16 | <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | | |
17 | <a href="{url}log{sessionvars%urlparameter}">changelog</a> | |
|
17 | <a href="{url}log{sessionvars%urlparameter}">changelog</a> | | |
|
18 | <a href="{url}graph{sessionvars%urlparameter}">graph</a> | | |||
18 | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | |
|
19 | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | | |
19 | <a href="{url}file/#node|short##path|urlescape#{sessionvars%urlparameter}">files</a> | |
|
20 | <a href="{url}file/#node|short##path|urlescape#{sessionvars%urlparameter}">files</a> | | |
20 | <a href="{url}rev/#node|short#{sessionvars%urlparameter}">changeset</a> | |
|
21 | <a href="{url}rev/#node|short#{sessionvars%urlparameter}">changeset</a> | |
@@ -15,6 +15,7 | |||||
15 | <a href="{url}summary{sessionvars%urlparameter}">summary</a> | |
|
15 | <a href="{url}summary{sessionvars%urlparameter}">summary</a> | | |
16 | <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | |
|
16 | <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | | |
17 | <a href="{url}log{sessionvars%urlparameter}">changelog</a> | |
|
17 | <a href="{url}log{sessionvars%urlparameter}">changelog</a> | | |
|
18 | <a href="{url}graph{sessionvars%urlparameter}">graph</a> | | |||
18 | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | |
|
19 | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | | |
19 | files | |
|
20 | files | | |
20 | <a href="{url}rev/#node|short#{sessionvars%urlparameter}">changeset</a> #archives%archiveentry#<br/> |
|
21 | <a href="{url}rev/#node|short#{sessionvars%urlparameter}">changeset</a> #archives%archiveentry#<br/> |
@@ -9,6 +9,7 error = error.tmpl | |||||
9 | notfound = notfound.tmpl |
|
9 | notfound = notfound.tmpl | |
10 | naventry = '<a href="{url}log/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' |
|
10 | naventry = '<a href="{url}log/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' | |
11 | navshortentry = '<a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' |
|
11 | navshortentry = '<a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' | |
|
12 | navgraphentry = '<a href="{url}graph/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' | |||
12 | filenaventry = '<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{label|escape}</a> ' |
|
13 | filenaventry = '<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{label|escape}</a> ' | |
13 | filedifflink = '<a href="#url#diff/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#file|escape#</a> ' |
|
14 | filedifflink = '<a href="#url#diff/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#file|escape#</a> ' | |
14 | filenodelink = '<tr class="parity#parity#"><td><a class="list" href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">#file|escape#</a></td><td></td><td class="link"><a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">file</a> | <a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}">annotate</a> | <a href="#url#diff/#node|short#/#file|urlescape#{sessionvars%urlparameter}">diff</a> | <a href="#url#log/#node|short#/#file|urlescape#{sessionvars%urlparameter}">revisions</a></td></tr>' |
|
15 | filenodelink = '<tr class="parity#parity#"><td><a class="list" href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">#file|escape#</a></td><td></td><td class="link"><a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">file</a> | <a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}">annotate</a> | <a href="#url#diff/#node|short#/#file|urlescape#{sessionvars%urlparameter}">diff</a> | <a href="#url#log/#node|short#/#file|urlescape#{sessionvars%urlparameter}">revisions</a></td></tr>' | |
@@ -24,7 +25,7 fileannotate = fileannotate.tmpl | |||||
24 | filediff = filediff.tmpl |
|
25 | filediff = filediff.tmpl | |
25 | filelog = filelog.tmpl |
|
26 | filelog = filelog.tmpl | |
26 | fileline = '<div style="font-family:monospace" class="parity#parity#"><pre><a class="linenr" href="##lineid#" id="#lineid#">#linenumber#</a> #line|escape#</pre></div>' |
|
27 | fileline = '<div style="font-family:monospace" class="parity#parity#"><pre><a class="linenr" href="##lineid#" id="#lineid#">#linenumber#</a> #line|escape#</pre></div>' | |
27 |
annotateline = '<tr style="font-family:monospace" class="parity#parity#"><td class="linenr" style="text-align: right;"><a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}#l{targetline}">#author| |
|
28 | annotateline = '<tr style="font-family:monospace" class="parity#parity#"><td class="linenr" style="text-align: right;"><a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}#l{targetline}" title="{node|short}: {desc|escape|firstline}">#author|user#@#rev#</a></td><td><pre><a class="linenr" href="##lineid#" id="#lineid#">#linenumber#</a></pre></td><td><pre>#line|escape#</pre></td></tr>' | |
28 | difflineplus = '<span style="color:#008800;"><a class="linenr" href="##lineid#" id="#lineid#">#linenumber#</a> #line|escape#</span>' |
|
29 | difflineplus = '<span style="color:#008800;"><a class="linenr" href="##lineid#" id="#lineid#">#linenumber#</a> #line|escape#</span>' | |
29 | difflineminus = '<span style="color:#cc0000;"><a class="linenr" href="##lineid#" id="#lineid#">#linenumber#</a> #line|escape#</span>' |
|
30 | difflineminus = '<span style="color:#cc0000;"><a class="linenr" href="##lineid#" id="#lineid#">#linenumber#</a> #line|escape#</span>' | |
30 | difflineat = '<span style="color:#990099;"><a class="linenr" href="##lineid#" id="#lineid#">#linenumber#</a> #line|escape#</span>' |
|
31 | difflineat = '<span style="color:#990099;"><a class="linenr" href="##lineid#" id="#lineid#">#linenumber#</a> #line|escape#</span>' | |
@@ -50,6 +51,7 filelogparent = '<tr><td align="right">p | |||||
50 | filediffchild = '<tr><td>child {rev}</td><td style="font-family:monospace"><a class="list" href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>' |
|
51 | filediffchild = '<tr><td>child {rev}</td><td style="font-family:monospace"><a class="list" href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>' | |
51 | filelogchild = '<tr><td align="right">child #rev#: </td><td><a href="{url}file{node|short}/#file|urlescape#{sessionvars%urlparameter}">#node|short#</a></td></tr>' |
|
52 | filelogchild = '<tr><td align="right">child #rev#: </td><td><a href="{url}file{node|short}/#file|urlescape#{sessionvars%urlparameter}">#node|short#</a></td></tr>' | |
52 | shortlog = shortlog.tmpl |
|
53 | shortlog = shortlog.tmpl | |
|
54 | graph = graph.tmpl | |||
53 | tagtag = '<span class="tagtag" title="{name}">{name}</span> ' |
|
55 | tagtag = '<span class="tagtag" title="{name}">{name}</span> ' | |
54 | branchtag = '<span class="branchtag" title="{name}">{name}</span> ' |
|
56 | branchtag = '<span class="branchtag" title="{name}">{name}</span> ' | |
55 | inbranchtag = '<span class="inbranchtag" title="{name}">{name}</span> ' |
|
57 | inbranchtag = '<span class="inbranchtag" title="{name}">{name}</span> ' |
@@ -22,6 +22,7 | |||||
22 | <a href="{url}summary{sessionvars%urlparameter}">summary</a> | |
|
22 | <a href="{url}summary{sessionvars%urlparameter}">summary</a> | | |
23 | <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | |
|
23 | <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | | |
24 | <a href="{url}log{sessionvars%urlparameter}">changelog</a> | |
|
24 | <a href="{url}log{sessionvars%urlparameter}">changelog</a> | | |
|
25 | <a href="{url}graph{sessionvars%urlparameter}">graph</a> | | |||
25 | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | |
|
26 | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | | |
26 | <a href="{url}file/#node|short#{sessionvars%urlparameter}">files</a>#archives%archiveentry# |
|
27 | <a href="{url}file/#node|short#{sessionvars%urlparameter}">files</a>#archives%archiveentry# | |
27 | <br/> |
|
28 | <br/> |
@@ -21,6 +21,7 | |||||
21 | <a href="{url}summary{sessionvars%urlparameter}">summary</a> | |
|
21 | <a href="{url}summary{sessionvars%urlparameter}">summary</a> | | |
22 | shortlog | |
|
22 | shortlog | | |
23 | <a href="{url}log/#rev#{sessionvars%urlparameter}">changelog</a> | |
|
23 | <a href="{url}log/#rev#{sessionvars%urlparameter}">changelog</a> | | |
|
24 | <a href="{url}graph{sessionvars%urlparameter}">graph</a> | | |||
24 | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | |
|
25 | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | | |
25 | <a href="{url}file/#node|short#{sessionvars%urlparameter}">files</a>#archives%archiveentry# |
|
26 | <a href="{url}file/#node|short#{sessionvars%urlparameter}">files</a>#archives%archiveentry# | |
26 | <br/> |
|
27 | <br/> |
@@ -22,6 +22,7 | |||||
22 | summary | |
|
22 | summary | | |
23 | <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | |
|
23 | <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | | |
24 | <a href="{url}log{sessionvars%urlparameter}">changelog</a> | |
|
24 | <a href="{url}log{sessionvars%urlparameter}">changelog</a> | | |
|
25 | <a href="{url}graph{sessionvars%urlparameter}">graph</a> | | |||
25 | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | |
|
26 | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | | |
26 | <a href="{url}file/#node|short#{sessionvars%urlparameter}">files</a>#archives%archiveentry# |
|
27 | <a href="{url}file/#node|short#{sessionvars%urlparameter}">files</a>#archives%archiveentry# | |
27 | <br/> |
|
28 | <br/> |
@@ -15,6 +15,7 | |||||
15 | <a href="{url}summary{sessionvars%urlparameter}">summary</a> | |
|
15 | <a href="{url}summary{sessionvars%urlparameter}">summary</a> | | |
16 | <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | |
|
16 | <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | | |
17 | <a href="{url}log{sessionvars%urlparameter}">changelog</a> | |
|
17 | <a href="{url}log{sessionvars%urlparameter}">changelog</a> | | |
|
18 | <a href="{url}graph{sessionvars%urlparameter}">graph</a> | | |||
18 | tags | |
|
19 | tags | | |
19 | <a href="{url}file/#node|short#{sessionvars%urlparameter}">files</a> |
|
20 | <a href="{url}file/#node|short#{sessionvars%urlparameter}">files</a> | |
20 | <br/> |
|
21 | <br/> |
@@ -6,6 +6,7 | |||||
6 | <div class="buttons"> |
|
6 | <div class="buttons"> | |
7 | <a href="#url#log/#rev#{sessionvars%urlparameter}">changelog</a> |
|
7 | <a href="#url#log/#rev#{sessionvars%urlparameter}">changelog</a> | |
8 | <a href="#url#shortlog/#rev#{sessionvars%urlparameter}">shortlog</a> |
|
8 | <a href="#url#shortlog/#rev#{sessionvars%urlparameter}">shortlog</a> | |
|
9 | <a href="#url#graph{sessionvars%urlparameter}">graph</a> | |||
9 | <a href="#url#tags{sessionvars%urlparameter}">tags</a> |
|
10 | <a href="#url#tags{sessionvars%urlparameter}">tags</a> | |
10 | <a href="#url#rev/#node|short#{sessionvars%urlparameter}">changeset</a> |
|
11 | <a href="#url#rev/#node|short#{sessionvars%urlparameter}">changeset</a> | |
11 | #archives%archiveentry# |
|
12 | #archives%archiveentry# |
@@ -6,8 +6,10 search = search.tmpl | |||||
6 | changelog = changelog.tmpl |
|
6 | changelog = changelog.tmpl | |
7 | shortlog = shortlog.tmpl |
|
7 | shortlog = shortlog.tmpl | |
8 | shortlogentry = shortlogentry.tmpl |
|
8 | shortlogentry = shortlogentry.tmpl | |
|
9 | graph = graph.tmpl | |||
9 | naventry = '<a href="{url}log/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' |
|
10 | naventry = '<a href="{url}log/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' | |
10 | navshortentry = '<a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' |
|
11 | navshortentry = '<a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' | |
|
12 | navgraphentry = '<a href="{url}graph/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' | |||
11 | filenaventry = '<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{label|escape}</a> ' |
|
13 | filenaventry = '<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{label|escape}</a> ' | |
12 | filedifflink = '<a href="#url#diff/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#file|escape#</a> ' |
|
14 | filedifflink = '<a href="#url#diff/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#file|escape#</a> ' | |
13 | filenodelink = '<a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#file|escape#</a> ' |
|
15 | filenodelink = '<a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#file|escape#</a> ' | |
@@ -24,7 +26,7 filediff = filediff.tmpl | |||||
24 | filelog = filelog.tmpl |
|
26 | filelog = filelog.tmpl | |
25 | fileline = '<div class="parity#parity#"><a class="lineno" href="##lineid#" id="#lineid#">#linenumber#</a>#line|escape#</div>' |
|
27 | fileline = '<div class="parity#parity#"><a class="lineno" href="##lineid#" id="#lineid#">#linenumber#</a>#line|escape#</div>' | |
26 | filelogentry = filelogentry.tmpl |
|
28 | filelogentry = filelogentry.tmpl | |
27 |
annotateline = '<tr class="parity#parity#"><td class="annotate"><a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}#l{targetline}">#author| |
|
29 | annotateline = '<tr class="parity#parity#"><td class="annotate"><a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}#l{targetline}" title="{node|short}: {desc|escape|firstline}">#author|user#@#rev#</a></td><td><a class="lineno" href="##lineid#" id="#lineid#">#linenumber#</a></td><td><pre>#line|escape#</pre></td></tr>' | |
28 | difflineplus = '<span class="plusline"><a class="lineno" href="##lineid#" id="#lineid#">#linenumber#</a>#line|escape#</span>' |
|
30 | difflineplus = '<span class="plusline"><a class="lineno" href="##lineid#" id="#lineid#">#linenumber#</a>#line|escape#</span>' | |
29 | difflineminus = '<span class="minusline"><a class="lineno" href="##lineid#" id="#lineid#">#linenumber#</a>#line|escape#</span>' |
|
31 | difflineminus = '<span class="minusline"><a class="lineno" href="##lineid#" id="#lineid#">#linenumber#</a>#line|escape#</span>' | |
30 | difflineat = '<span class="atline"><a class="lineno" href="##lineid#" id="#lineid#">#linenumber#</a>#line|escape#</span>' |
|
32 | difflineat = '<span class="atline"><a class="lineno" href="##lineid#" id="#lineid#">#linenumber#</a>#line|escape#</span>' |
@@ -6,6 +6,7 | |||||
6 | <div class="buttons"> |
|
6 | <div class="buttons"> | |
7 | <a href="#url#log{sessionvars%urlparameter}">changelog</a> |
|
7 | <a href="#url#log{sessionvars%urlparameter}">changelog</a> | |
8 | <a href="#url#shortlog{sessionvars%urlparameter}">shortlog</a> |
|
8 | <a href="#url#shortlog{sessionvars%urlparameter}">shortlog</a> | |
|
9 | <a href="#url#graph{sessionvars%urlparameter}">graph</a> | |||
9 | <a href="#url#tags{sessionvars%urlparameter}">tags</a> |
|
10 | <a href="#url#tags{sessionvars%urlparameter}">tags</a> | |
10 | <a href="#url#file/#node|short#{sessionvars%urlparameter}">files</a> |
|
11 | <a href="#url#file/#node|short#{sessionvars%urlparameter}">files</a> | |
11 | #archives%archiveentry# |
|
12 | #archives%archiveentry# |
@@ -9,6 +9,7 | |||||
9 |
|
9 | |||
10 | <div class="buttons"> |
|
10 | <div class="buttons"> | |
11 | <a href="#url#log/#rev#{sessionvars%urlparameter}">changelog</a> |
|
11 | <a href="#url#log/#rev#{sessionvars%urlparameter}">changelog</a> | |
|
12 | <a href="#url#graph{sessionvars%urlparameter}">graph</a> | |||
12 | <a href="#url#tags{sessionvars%urlparameter}">tags</a> |
|
13 | <a href="#url#tags{sessionvars%urlparameter}">tags</a> | |
13 | <a href="#url#file/#node|short#/{sessionvars%urlparameter}">files</a> |
|
14 | <a href="#url#file/#node|short#/{sessionvars%urlparameter}">files</a> | |
14 | #archives%archiveentry# |
|
15 | #archives%archiveentry# |
1 | NO CONTENT: modified file, binary diff hidden |
|
NO CONTENT: modified file, binary diff hidden |
1 | NO CONTENT: modified file, binary diff hidden |
|
NO CONTENT: modified file, binary diff hidden |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file, binary diff hidden |
|
NO CONTENT: modified file, binary diff hidden |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file chmod 100755 => 100644 |
|
NO CONTENT: modified file chmod 100755 => 100644 |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: file copied from tests/test-push-http to tests/test-pull-http |
|
NO CONTENT: file copied from tests/test-push-http to tests/test-pull-http | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed | ||
The requested commit or file is too big and content was truncated. Show full diff |
General Comments 0
You need to be logged in to leave comments.
Login now