##// END OF EJS Templates
Remove unused imports
Joel Rosdahl -
r6212:e75aab65 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,204 +1,204 b''
1 1 # churn.py - create a graph showing who changed the most lines
2 2 #
3 3 # Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7 #
8 8 #
9 9 # Aliases map file format is simple one alias per line in the following
10 10 # format:
11 11 #
12 12 # <alias email> <actual email>
13 13
14 14 from mercurial.i18n import gettext as _
15 from mercurial import hg, mdiff, cmdutil, ui, util, templatefilters, node
15 from mercurial import mdiff, cmdutil, util, node
16 16 import os, sys
17 17
18 18 def get_tty_width():
19 19 if 'COLUMNS' in os.environ:
20 20 try:
21 21 return int(os.environ['COLUMNS'])
22 22 except ValueError:
23 23 pass
24 24 try:
25 25 import termios, array, fcntl
26 26 for dev in (sys.stdout, sys.stdin):
27 27 try:
28 28 fd = dev.fileno()
29 29 if not os.isatty(fd):
30 30 continue
31 31 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
32 32 return array.array('h', arri)[1]
33 33 except ValueError:
34 34 pass
35 35 except ImportError:
36 36 pass
37 37 return 80
38 38
39 39 def __gather(ui, repo, node1, node2):
40 40 def dirtywork(f, mmap1, mmap2):
41 41 lines = 0
42 42
43 43 to = mmap1 and repo.file(f).read(mmap1[f]) or None
44 44 tn = mmap2 and repo.file(f).read(mmap2[f]) or None
45 45
46 46 diff = mdiff.unidiff(to, "", tn, "", f, f).split("\n")
47 47
48 48 for line in diff:
49 49 if not line:
50 50 continue # skip EOF
51 51 if line.startswith(" "):
52 52 continue # context line
53 53 if line.startswith("--- ") or line.startswith("+++ "):
54 54 continue # begining of diff
55 55 if line.startswith("@@ "):
56 56 continue # info line
57 57
58 58 # changed lines
59 59 lines += 1
60 60
61 61 return lines
62 62
63 63 ##
64 64
65 65 lines = 0
66 66
67 67 changes = repo.status(node1, node2, None, util.always)[:5]
68 68
69 69 modified, added, removed, deleted, unknown = changes
70 70
71 71 who = repo.changelog.read(node2)[1]
72 72 who = util.email(who) # get the email of the person
73 73
74 74 mmap1 = repo.manifest.read(repo.changelog.read(node1)[0])
75 75 mmap2 = repo.manifest.read(repo.changelog.read(node2)[0])
76 76 for f in modified:
77 77 lines += dirtywork(f, mmap1, mmap2)
78 78
79 79 for f in added:
80 80 lines += dirtywork(f, None, mmap2)
81 81
82 82 for f in removed:
83 83 lines += dirtywork(f, mmap1, None)
84 84
85 85 for f in deleted:
86 86 lines += dirtywork(f, mmap1, mmap2)
87 87
88 88 for f in unknown:
89 89 lines += dirtywork(f, mmap1, mmap2)
90 90
91 91 return (who, lines)
92 92
93 93 def gather_stats(ui, repo, amap, revs=None, progress=False):
94 94 stats = {}
95 95
96 96 cl = repo.changelog
97 97
98 98 if not revs:
99 99 revs = range(0, cl.count())
100 100
101 101 nr_revs = len(revs)
102 102 cur_rev = 0
103 103
104 104 for rev in revs:
105 105 cur_rev += 1 # next revision
106 106
107 107 node2 = cl.node(rev)
108 108 node1 = cl.parents(node2)[0]
109 109
110 110 if cl.parents(node2)[1] != node.nullid:
111 111 ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,))
112 112 continue
113 113
114 114 who, lines = __gather(ui, repo, node1, node2)
115 115
116 116 # remap the owner if possible
117 117 if who in amap:
118 118 ui.note("using '%s' alias for '%s'\n" % (amap[who], who))
119 119 who = amap[who]
120 120
121 121 if not who in stats:
122 122 stats[who] = 0
123 123 stats[who] += lines
124 124
125 125 ui.note("rev %d: %d lines by %s\n" % (rev, lines, who))
126 126
127 127 if progress:
128 128 nr_revs = max(nr_revs, 1)
129 129 if int(100.0*(cur_rev - 1)/nr_revs) < int(100.0*cur_rev/nr_revs):
130 130 ui.write("\rGenerating stats: %d%%" % (int(100.0*cur_rev/nr_revs),))
131 131 sys.stdout.flush()
132 132
133 133 if progress:
134 134 ui.write("\r")
135 135 sys.stdout.flush()
136 136
137 137 return stats
138 138
139 139 def churn(ui, repo, **opts):
140 140 "Graphs the number of lines changed"
141 141
142 142 def pad(s, l):
143 143 if len(s) < l:
144 144 return s + " " * (l-len(s))
145 145 return s[0:l]
146 146
147 147 def graph(n, maximum, width, char):
148 148 maximum = max(1, maximum)
149 149 n = int(n * width / float(maximum))
150 150
151 151 return char * (n)
152 152
153 153 def get_aliases(f):
154 154 aliases = {}
155 155
156 156 for l in f.readlines():
157 157 l = l.strip()
158 158 alias, actual = l.split(" ")
159 159 aliases[alias] = actual
160 160
161 161 return aliases
162 162
163 163 amap = {}
164 164 aliases = opts.get('aliases')
165 165 if aliases:
166 166 try:
167 167 f = open(aliases,"r")
168 168 except OSError, e:
169 169 print "Error: " + e
170 170 return
171 171
172 172 amap = get_aliases(f)
173 173 f.close()
174 174
175 175 revs = [int(r) for r in cmdutil.revrange(repo, opts['rev'])]
176 176 revs.sort()
177 177 stats = gather_stats(ui, repo, amap, revs, opts.get('progress'))
178 178
179 179 # make a list of tuples (name, lines) and sort it in descending order
180 180 ordered = stats.items()
181 181 ordered.sort(lambda x, y: cmp(y[1], x[1]))
182 182
183 183 if not ordered:
184 184 return
185 185 maximum = ordered[0][1]
186 186
187 187 width = get_tty_width()
188 188 ui.note(_("assuming %i character terminal\n") % width)
189 189 width -= 1
190 190
191 191 for i in ordered:
192 192 person = i[0]
193 193 lines = i[1]
194 194 print "%s %6d %s" % (pad(person, 20), lines,
195 195 graph(lines, maximum, width - 20 - 1 - 6 - 2 - 2, '*'))
196 196
197 197 cmdtable = {
198 198 "churn":
199 199 (churn,
200 200 [('r', 'rev', [], _('limit statistics to the specified revisions')),
201 201 ('', 'aliases', '', _('file with email aliases')),
202 202 ('', 'progress', None, _('show progress'))],
203 203 'hg churn [-r revision range] [-a file] [--progress]'),
204 204 }
@@ -1,219 +1,219 b''
1 1 # color.py color output for the status and qseries commands
2 2 #
3 3 # Copyright (C) 2007 Kevin Christen <kevin.christen@gmail.com>
4 4 #
5 5 # This program is free software; you can redistribute it and/or modify it
6 6 # under the terms of the GNU General Public License as published by the
7 7 # Free Software Foundation; either version 2 of the License, or (at your
8 8 # option) any later version.
9 9 #
10 10 # This program is distributed in the hope that it will be useful, but
11 11 # WITHOUT ANY WARRANTY; without even the implied warranty of
12 12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
13 13 # Public License for more details.
14 14 #
15 15 # You should have received a copy of the GNU General Public License along
16 16 # with this program; if not, write to the Free Software Foundation, Inc.,
17 17 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
18 18
19 19 '''add color output to the status and qseries commands
20 20
21 21 This extension modifies the status command to add color to its output to
22 22 reflect file status, and the qseries command to add color to reflect patch
23 23 status (applied, unapplied, missing). Other effects in addition to color,
24 24 like bold and underlined text, are also available. Effects are rendered
25 25 with the ECMA-48 SGR control function (aka ANSI escape codes). This module
26 26 also provides the render_text function, which can be used to add effects to
27 27 any text.
28 28
29 29 To enable this extension, add this to your .hgrc file:
30 30 [extensions]
31 31 color =
32 32
33 33 Default effects my be overriden from the .hgrc file:
34 34
35 35 [color]
36 36 status.modified = blue bold underline red_background
37 37 status.added = green bold
38 38 status.removed = red bold blue_background
39 39 status.deleted = cyan bold underline
40 40 status.unknown = magenta bold underline
41 41 status.ignored = black bold
42 42
43 43 'none' turns off all effects
44 44 status.clean = none
45 45 status.copied = none
46 46
47 47 qseries.applied = blue bold underline
48 48 qseries.unapplied = black bold
49 49 qseries.missing = red bold
50 50 '''
51 51
52 52 import re, sys
53 53
54 from mercurial import commands, cmdutil, ui
54 from mercurial import commands, cmdutil
55 55 from mercurial.i18n import _
56 56
57 57 # start and stop parameters for effects
58 58 _effect_params = { 'none': (0, 0),
59 59 'black': (30, 39),
60 60 'red': (31, 39),
61 61 'green': (32, 39),
62 62 'yellow': (33, 39),
63 63 'blue': (34, 39),
64 64 'magenta': (35, 39),
65 65 'cyan': (36, 39),
66 66 'white': (37, 39),
67 67 'bold': (1, 22),
68 68 'italic': (3, 23),
69 69 'underline': (4, 24),
70 70 'inverse': (7, 27),
71 71 'black_background': (40, 49),
72 72 'red_background': (41, 49),
73 73 'green_background': (42, 49),
74 74 'yellow_background': (43, 49),
75 75 'blue_background': (44, 49),
76 76 'purple_background': (45, 49),
77 77 'cyan_background': (46, 49),
78 78 'white_background': (47, 49), }
79 79
80 80 def render_effects(text, *effects):
81 81 'Wrap text in commands to turn on each effect.'
82 82 start = []
83 83 stop = []
84 84 for effect in effects:
85 85 start.append(str(_effect_params[effect][0]))
86 86 stop.append(str(_effect_params[effect][1]))
87 87 start = '\033[' + ';'.join(start) + 'm'
88 88 stop = '\033[' + ';'.join(stop) + 'm'
89 89 return start + text + stop
90 90
91 91 def colorstatus(statusfunc, ui, repo, *pats, **opts):
92 92 '''run the status command with colored output'''
93 93
94 94 delimiter = opts['print0'] and '\0' or '\n'
95 95
96 96 # run status and capture it's output
97 97 ui.pushbuffer()
98 98 retval = statusfunc(ui, repo, *pats, **opts)
99 99 # filter out empty strings
100 100 lines = [ line for line in ui.popbuffer().split(delimiter) if line ]
101 101
102 102 if opts['no_status']:
103 103 # if --no-status, run the command again without that option to get
104 104 # output with status abbreviations
105 105 opts['no_status'] = False
106 106 ui.pushbuffer()
107 107 statusfunc(ui, repo, *pats, **opts)
108 108 # filter out empty strings
109 109 lines_with_status = [ line for
110 110 line in ui.popbuffer().split(delimiter) if line ]
111 111 else:
112 112 lines_with_status = lines
113 113
114 114 # apply color to output and display it
115 115 for i in xrange(0, len(lines)):
116 116 status = _status_abbreviations[lines_with_status[i][0]]
117 117 effects = _status_effects[status]
118 118 if effects:
119 119 lines[i] = render_effects(lines[i], *effects)
120 120 sys.stdout.write(lines[i] + delimiter)
121 121 return retval
122 122
123 123 _status_abbreviations = { 'M': 'modified',
124 124 'A': 'added',
125 125 'R': 'removed',
126 126 '!': 'deleted',
127 127 '?': 'unknown',
128 128 'I': 'ignored',
129 129 'C': 'clean',
130 130 ' ': 'copied', }
131 131
132 132 _status_effects = { 'modified': ('blue', 'bold'),
133 133 'added': ('green', 'bold'),
134 134 'removed': ('red', 'bold'),
135 135 'deleted': ('cyan', 'bold', 'underline'),
136 136 'unknown': ('magenta', 'bold', 'underline'),
137 137 'ignored': ('black', 'bold'),
138 138 'clean': ('none', ),
139 139 'copied': ('none', ), }
140 140
141 141 def colorqseries(qseriesfunc, ui, repo, *dummy, **opts):
142 142 '''run the qseries command with colored output'''
143 143 ui.pushbuffer()
144 144 retval = qseriesfunc(ui, repo, **opts)
145 145 patches = ui.popbuffer().splitlines()
146 146 for patch in patches:
147 147 if opts['missing']:
148 148 effects = _patch_effects['missing']
149 149 # Determine if patch is applied. Search for beginning of output
150 150 # line in the applied patch list, in case --summary has been used
151 151 # and output line isn't just the patch name.
152 152 elif [ applied for applied in repo.mq.applied
153 153 if patch.startswith(applied.name) ]:
154 154 effects = _patch_effects['applied']
155 155 else:
156 156 effects = _patch_effects['unapplied']
157 157 sys.stdout.write(render_effects(patch, *effects) + '\n')
158 158 return retval
159 159
160 160 _patch_effects = { 'applied': ('blue', 'bold', 'underline'),
161 161 'missing': ('red', 'bold'),
162 162 'unapplied': ('black', 'bold'), }
163 163
164 164 def uisetup(ui):
165 165 '''Initialize the extension.'''
166 166 nocoloropt = ('', 'no-color', None, _("don't colorize output"))
167 167 _decoratecmd(ui, 'status', commands.table, colorstatus, nocoloropt)
168 168 _configcmdeffects(ui, 'status', _status_effects);
169 169 if ui.config('extensions', 'hgext.mq', default=None) is not None:
170 170 from hgext import mq
171 171 _decoratecmd(ui, 'qseries', mq.cmdtable, colorqseries, nocoloropt)
172 172 _configcmdeffects(ui, 'qseries', _patch_effects);
173 173
174 174 def _decoratecmd(ui, cmd, table, delegate, *delegateoptions):
175 175 '''Replace the function that implements cmd in table with a decorator.
176 176
177 177 The decorator that becomes the new implementation of cmd calls
178 178 delegate. The delegate's first argument is the replaced function,
179 179 followed by the normal Mercurial command arguments (ui, repo, ...). If
180 180 the delegate adds command options, supply them as delegateoptions.
181 181 '''
182 182 cmdkey, cmdentry = _cmdtableitem(ui, cmd, table)
183 183 decorator = lambda ui, repo, *args, **opts: \
184 184 _colordecorator(delegate, cmdentry[0],
185 185 ui, repo, *args, **opts)
186 186 # make sure 'hg help cmd' still works
187 187 decorator.__doc__ = cmdentry[0].__doc__
188 188 decoratorentry = (decorator,) + cmdentry[1:]
189 189 for option in delegateoptions:
190 190 decoratorentry[1].append(option)
191 191 table[cmdkey] = decoratorentry
192 192
193 193 def _cmdtableitem(ui, cmd, table):
194 194 '''Return key, value from table for cmd, or None if not found.'''
195 195 aliases, entry = cmdutil.findcmd(ui, cmd, table)
196 196 for candidatekey, candidateentry in table.iteritems():
197 197 if candidateentry is entry:
198 198 return candidatekey, entry
199 199
200 200 def _colordecorator(colorfunc, nocolorfunc, ui, repo, *args, **opts):
201 201 '''Delegate to colorfunc or nocolorfunc, depending on conditions.
202 202
203 203 Delegate to colorfunc unless --no-color option is set or output is not
204 204 to a tty.
205 205 '''
206 206 if opts['no_color'] or not sys.stdout.isatty():
207 207 return nocolorfunc(ui, repo, *args, **opts)
208 208 return colorfunc(nocolorfunc, ui, repo, *args, **opts)
209 209
210 210 def _configcmdeffects(ui, cmdname, effectsmap):
211 211 '''Override default effects for cmdname with those from .hgrc file.
212 212
213 213 Entries in the .hgrc file are in the [color] section, and look like
214 214 'cmdname'.'status' (for instance, 'status.modified = blue bold inverse').
215 215 '''
216 216 for status in effectsmap:
217 217 effects = ui.config('color', cmdname + '.' + status)
218 218 if effects:
219 219 effectsmap[status] = re.split('\W+', effects)
@@ -1,352 +1,352 b''
1 1 # convcmd - convert extension commands definition
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 from common import NoRepo, SKIPREV, converter_source, converter_sink, mapfile
8 from common import NoRepo, SKIPREV, mapfile
9 9 from cvs import convert_cvs
10 10 from darcs import darcs_source
11 11 from git import convert_git
12 12 from hg import mercurial_source, mercurial_sink
13 13 from subversion import debugsvnlog, svn_source, svn_sink
14 14 from gnuarch import gnuarch_source
15 15 import filemap
16 16
17 17 import os, shutil
18 18 from mercurial import hg, util
19 19 from mercurial.i18n import _
20 20
21 21 orig_encoding = 'ascii'
22 22
23 23 def recode(s):
24 24 if isinstance(s, unicode):
25 25 return s.encode(orig_encoding, 'replace')
26 26 else:
27 27 return s.decode('utf-8').encode(orig_encoding, 'replace')
28 28
29 29 source_converters = [
30 30 ('cvs', convert_cvs),
31 31 ('git', convert_git),
32 32 ('svn', svn_source),
33 33 ('hg', mercurial_source),
34 34 ('darcs', darcs_source),
35 35 ('gnuarch', gnuarch_source),
36 36 ]
37 37
38 38 sink_converters = [
39 39 ('hg', mercurial_sink),
40 40 ('svn', svn_sink),
41 41 ]
42 42
43 43 def convertsource(ui, path, type, rev):
44 44 exceptions = []
45 45 for name, source in source_converters:
46 46 try:
47 47 if not type or name == type:
48 48 return source(ui, path, rev)
49 49 except NoRepo, inst:
50 50 exceptions.append(inst)
51 51 if not ui.quiet:
52 52 for inst in exceptions:
53 53 ui.write(_("%s\n") % inst)
54 54 raise util.Abort('%s: unknown repository type' % path)
55 55
56 56 def convertsink(ui, path, type):
57 57 for name, sink in sink_converters:
58 58 try:
59 59 if not type or name == type:
60 60 return sink(ui, path)
61 61 except NoRepo, inst:
62 62 ui.note(_("convert: %s\n") % inst)
63 63 raise util.Abort('%s: unknown repository type' % path)
64 64
65 65 class converter(object):
66 66 def __init__(self, ui, source, dest, revmapfile, opts):
67 67
68 68 self.source = source
69 69 self.dest = dest
70 70 self.ui = ui
71 71 self.opts = opts
72 72 self.commitcache = {}
73 73 self.authors = {}
74 74 self.authorfile = None
75 75
76 76 self.map = mapfile(ui, revmapfile)
77 77
78 78 # Read first the dst author map if any
79 79 authorfile = self.dest.authorfile()
80 80 if authorfile and os.path.exists(authorfile):
81 81 self.readauthormap(authorfile)
82 82 # Extend/Override with new author map if necessary
83 83 if opts.get('authors'):
84 84 self.readauthormap(opts.get('authors'))
85 85 self.authorfile = self.dest.authorfile()
86 86
87 87 self.splicemap = mapfile(ui, opts.get('splicemap'))
88 88
89 89 def walktree(self, heads):
90 90 '''Return a mapping that identifies the uncommitted parents of every
91 91 uncommitted changeset.'''
92 92 visit = heads
93 93 known = {}
94 94 parents = {}
95 95 while visit:
96 96 n = visit.pop(0)
97 97 if n in known or n in self.map: continue
98 98 known[n] = 1
99 99 commit = self.cachecommit(n)
100 100 parents[n] = []
101 101 for p in commit.parents:
102 102 parents[n].append(p)
103 103 visit.append(p)
104 104
105 105 return parents
106 106
107 107 def toposort(self, parents):
108 108 '''Return an ordering such that every uncommitted changeset is
109 109 preceeded by all its uncommitted ancestors.'''
110 110 visit = parents.keys()
111 111 seen = {}
112 112 children = {}
113 113 actives = []
114 114
115 115 while visit:
116 116 n = visit.pop(0)
117 117 if n in seen: continue
118 118 seen[n] = 1
119 119 # Ensure that nodes without parents are present in the 'children'
120 120 # mapping.
121 121 children.setdefault(n, [])
122 122 hasparent = False
123 123 for p in parents[n]:
124 124 if not p in self.map:
125 125 visit.append(p)
126 126 hasparent = True
127 127 children.setdefault(p, []).append(n)
128 128 if not hasparent:
129 129 actives.append(n)
130 130
131 131 del seen
132 132 del visit
133 133
134 134 if self.opts.get('datesort'):
135 135 dates = {}
136 136 def getdate(n):
137 137 if n not in dates:
138 138 dates[n] = util.parsedate(self.commitcache[n].date)
139 139 return dates[n]
140 140
141 141 def picknext(nodes):
142 142 return min([(getdate(n), n) for n in nodes])[1]
143 143 else:
144 144 prev = [None]
145 145 def picknext(nodes):
146 146 # Return the first eligible child of the previously converted
147 147 # revision, or any of them.
148 148 next = nodes[0]
149 149 for n in nodes:
150 150 if prev[0] in parents[n]:
151 151 next = n
152 152 break
153 153 prev[0] = next
154 154 return next
155 155
156 156 s = []
157 157 pendings = {}
158 158 while actives:
159 159 n = picknext(actives)
160 160 actives.remove(n)
161 161 s.append(n)
162 162
163 163 # Update dependents list
164 164 for c in children.get(n, []):
165 165 if c not in pendings:
166 166 pendings[c] = [p for p in parents[c] if p not in self.map]
167 167 try:
168 168 pendings[c].remove(n)
169 169 except ValueError:
170 170 raise util.Abort(_('cycle detected between %s and %s')
171 171 % (recode(c), recode(n)))
172 172 if not pendings[c]:
173 173 # Parents are converted, node is eligible
174 174 actives.insert(0, c)
175 175 pendings[c] = None
176 176
177 177 if len(s) != len(parents):
178 178 raise util.Abort(_("not all revisions were sorted"))
179 179
180 180 return s
181 181
182 182 def writeauthormap(self):
183 183 authorfile = self.authorfile
184 184 if authorfile:
185 185 self.ui.status('Writing author map file %s\n' % authorfile)
186 186 ofile = open(authorfile, 'w+')
187 187 for author in self.authors:
188 188 ofile.write("%s=%s\n" % (author, self.authors[author]))
189 189 ofile.close()
190 190
191 191 def readauthormap(self, authorfile):
192 192 afile = open(authorfile, 'r')
193 193 for line in afile:
194 194 if line.strip() == '':
195 195 continue
196 196 try:
197 197 srcauthor, dstauthor = line.split('=', 1)
198 198 srcauthor = srcauthor.strip()
199 199 dstauthor = dstauthor.strip()
200 200 if srcauthor in self.authors and dstauthor != self.authors[srcauthor]:
201 201 self.ui.status(
202 202 'Overriding mapping for author %s, was %s, will be %s\n'
203 203 % (srcauthor, self.authors[srcauthor], dstauthor))
204 204 else:
205 205 self.ui.debug('Mapping author %s to %s\n'
206 206 % (srcauthor, dstauthor))
207 207 self.authors[srcauthor] = dstauthor
208 208 except IndexError:
209 209 self.ui.warn(
210 210 'Ignoring bad line in author map file %s: %s\n'
211 211 % (authorfile, line.rstrip()))
212 212 afile.close()
213 213
214 214 def cachecommit(self, rev):
215 215 commit = self.source.getcommit(rev)
216 216 commit.author = self.authors.get(commit.author, commit.author)
217 217 self.commitcache[rev] = commit
218 218 return commit
219 219
220 220 def copy(self, rev):
221 221 commit = self.commitcache[rev]
222 222 do_copies = hasattr(self.dest, 'copyfile')
223 223 filenames = []
224 224
225 225 changes = self.source.getchanges(rev)
226 226 if isinstance(changes, basestring):
227 227 if changes == SKIPREV:
228 228 dest = SKIPREV
229 229 else:
230 230 dest = self.map[changes]
231 231 self.map[rev] = dest
232 232 return
233 233 files, copies = changes
234 234 pbranches = []
235 235 if commit.parents:
236 236 for prev in commit.parents:
237 237 if prev not in self.commitcache:
238 238 self.cachecommit(prev)
239 239 pbranches.append((self.map[prev],
240 240 self.commitcache[prev].branch))
241 241 self.dest.setbranch(commit.branch, pbranches)
242 242 for f, v in files:
243 243 filenames.append(f)
244 244 try:
245 245 data = self.source.getfile(f, v)
246 246 except IOError, inst:
247 247 self.dest.delfile(f)
248 248 else:
249 249 e = self.source.getmode(f, v)
250 250 self.dest.putfile(f, e, data)
251 251 if do_copies:
252 252 if f in copies:
253 253 copyf = copies[f]
254 254 # Merely marks that a copy happened.
255 255 self.dest.copyfile(copyf, f)
256 256
257 257 try:
258 258 parents = self.splicemap[rev].replace(',', ' ').split()
259 259 self.ui.status('spliced in %s as parents of %s\n' %
260 260 (parents, rev))
261 261 parents = [self.map.get(p, p) for p in parents]
262 262 except KeyError:
263 263 parents = [b[0] for b in pbranches]
264 264 newnode = self.dest.putcommit(filenames, parents, commit)
265 265 self.source.converted(rev, newnode)
266 266 self.map[rev] = newnode
267 267
268 268 def convert(self):
269 269
270 270 try:
271 271 self.source.before()
272 272 self.dest.before()
273 273 self.source.setrevmap(self.map)
274 274 self.ui.status("scanning source...\n")
275 275 heads = self.source.getheads()
276 276 parents = self.walktree(heads)
277 277 self.ui.status("sorting...\n")
278 278 t = self.toposort(parents)
279 279 num = len(t)
280 280 c = None
281 281
282 282 self.ui.status("converting...\n")
283 283 for c in t:
284 284 num -= 1
285 285 desc = self.commitcache[c].desc
286 286 if "\n" in desc:
287 287 desc = desc.splitlines()[0]
288 288 # convert log message to local encoding without using
289 289 # tolocal() because util._encoding conver() use it as
290 290 # 'utf-8'
291 291 self.ui.status("%d %s\n" % (num, recode(desc)))
292 292 self.ui.note(_("source: %s\n" % recode(c)))
293 293 self.copy(c)
294 294
295 295 tags = self.source.gettags()
296 296 ctags = {}
297 297 for k in tags:
298 298 v = tags[k]
299 299 if self.map.get(v, SKIPREV) != SKIPREV:
300 300 ctags[k] = self.map[v]
301 301
302 302 if c and ctags:
303 303 nrev = self.dest.puttags(ctags)
304 304 # write another hash correspondence to override the previous
305 305 # one so we don't end up with extra tag heads
306 306 if nrev:
307 307 self.map[c] = nrev
308 308
309 309 self.writeauthormap()
310 310 finally:
311 311 self.cleanup()
312 312
313 313 def cleanup(self):
314 314 try:
315 315 self.dest.after()
316 316 finally:
317 317 self.source.after()
318 318 self.map.close()
319 319
320 320 def convert(ui, src, dest=None, revmapfile=None, **opts):
321 321 global orig_encoding
322 322 orig_encoding = util._encoding
323 323 util._encoding = 'UTF-8'
324 324
325 325 if not dest:
326 326 dest = hg.defaultdest(src) + "-hg"
327 327 ui.status("assuming destination %s\n" % dest)
328 328
329 329 destc = convertsink(ui, dest, opts.get('dest_type'))
330 330
331 331 try:
332 332 srcc = convertsource(ui, src, opts.get('source_type'),
333 333 opts.get('rev'))
334 334 except Exception:
335 335 for path in destc.created:
336 336 shutil.rmtree(path, True)
337 337 raise
338 338
339 339 fmap = opts.get('filemap')
340 340 if fmap:
341 341 srcc = filemap.filemap_source(ui, srcc, fmap)
342 342 destc.setfilemapmode(True)
343 343
344 344 if not revmapfile:
345 345 try:
346 346 revmapfile = destc.revmapfile()
347 347 except:
348 348 revmapfile = os.path.join(destc, "map")
349 349
350 350 c = converter(ui, srcc, destc, revmapfile, opts)
351 351 c.convert()
352 352
@@ -1,301 +1,301 b''
1 1 # GNU Arch support for the convert extension
2 2
3 from common import NoRepo, checktool, commandline, commit, converter_source
3 from common import NoRepo, commandline, commit, converter_source
4 4 from mercurial.i18n import _
5 5 from mercurial import util
6 6 import os, shutil, tempfile, stat
7 7
8 8 class gnuarch_source(converter_source, commandline):
9 9
10 10 class gnuarch_rev:
11 11 def __init__(self, rev):
12 12 self.rev = rev
13 13 self.summary = ''
14 14 self.date = None
15 15 self.author = ''
16 16 self.add_files = []
17 17 self.mod_files = []
18 18 self.del_files = []
19 19 self.ren_files = {}
20 20 self.ren_dirs = {}
21 21
22 22 def __init__(self, ui, path, rev=None):
23 23 super(gnuarch_source, self).__init__(ui, path, rev=rev)
24 24
25 25 if not os.path.exists(os.path.join(path, '{arch}')):
26 26 raise NoRepo(_("%s does not look like a GNU Arch repo" % path))
27 27
28 28 # Could use checktool, but we want to check for baz or tla.
29 29 self.execmd = None
30 30 if util.find_exe('baz'):
31 31 self.execmd = 'baz'
32 32 else:
33 33 if util.find_exe('tla'):
34 34 self.execmd = 'tla'
35 35 else:
36 36 raise util.Abort(_('cannot find a GNU Arch tool'))
37 37
38 38 commandline.__init__(self, ui, self.execmd)
39 39
40 40 self.path = os.path.realpath(path)
41 41 self.tmppath = None
42 42
43 43 self.treeversion = None
44 44 self.lastrev = None
45 45 self.changes = {}
46 46 self.parents = {}
47 47 self.tags = {}
48 48 self.modecache = {}
49 49
50 50 def before(self):
51 51 if self.execmd == 'tla':
52 52 output = self.run0('tree-version', self.path)
53 53 else:
54 54 output = self.run0('tree-version', '-d', self.path)
55 55 self.treeversion = output.strip()
56 56
57 57 self.ui.status(_('analyzing tree version %s...\n' % self.treeversion))
58 58
59 59 # Get name of temporary directory
60 60 version = self.treeversion.split('/')
61 61 self.tmppath = os.path.join(tempfile.gettempdir(),
62 62 'hg-%s' % version[1])
63 63
64 64 # Generate parents dictionary
65 65 child = []
66 66 output, status = self.runlines('revisions', self.treeversion)
67 67 self.checkexit(status, 'archive registered?')
68 68 for l in output:
69 69 rev = l.strip()
70 70 self.changes[rev] = self.gnuarch_rev(rev)
71 71
72 72 # Read author, date and summary
73 73 catlog = self.runlines0('cat-log', '-d', self.path, rev)
74 74 self._parsecatlog(catlog, rev)
75 75
76 76 self.parents[rev] = child
77 77 child = [rev]
78 78 if rev == self.rev:
79 79 break
80 80 self.parents[None] = child
81 81
82 82 def after(self):
83 83 self.ui.debug(_('cleaning up %s\n' % self.tmppath))
84 84 shutil.rmtree(self.tmppath, ignore_errors=True)
85 85
86 86 def getheads(self):
87 87 return self.parents[None]
88 88
89 89 def getfile(self, name, rev):
90 90 if rev != self.lastrev:
91 91 raise util.Abort(_('internal calling inconsistency'))
92 92
93 93 # Raise IOError if necessary (i.e. deleted files).
94 94 if not os.path.exists(os.path.join(self.tmppath, name)):
95 95 raise IOError
96 96
97 97 data, mode = self._getfile(name, rev)
98 98 self.modecache[(name, rev)] = mode
99 99
100 100 return data
101 101
102 102 def getmode(self, name, rev):
103 103 return self.modecache[(name, rev)]
104 104
105 105 def getchanges(self, rev):
106 106 self.modecache = {}
107 107 self._update(rev)
108 108 changes = []
109 109 copies = {}
110 110
111 111 for f in self.changes[rev].add_files:
112 112 changes.append((f, rev))
113 113
114 114 for f in self.changes[rev].mod_files:
115 115 changes.append((f, rev))
116 116
117 117 for f in self.changes[rev].del_files:
118 118 changes.append((f, rev))
119 119
120 120 for src in self.changes[rev].ren_files:
121 121 to = self.changes[rev].ren_files[src]
122 122 changes.append((src, rev))
123 123 changes.append((to, rev))
124 124 copies[src] = to
125 125
126 126 for src in self.changes[rev].ren_dirs:
127 127 to = self.changes[rev].ren_dirs[src]
128 128 chgs, cps = self._rendirchanges(src, to);
129 129 changes += [(f, rev) for f in chgs]
130 130 for c in cps:
131 131 copies[c] = cps[c]
132 132
133 133 changes.sort()
134 134 self.lastrev = rev
135 135
136 136 return changes, copies
137 137
138 138 def getcommit(self, rev):
139 139 changes = self.changes[rev]
140 140 return commit(author = changes.author, date = changes.date,
141 141 desc = changes.summary, parents = self.parents[rev])
142 142
143 143 def gettags(self):
144 144 return self.tags
145 145
146 146 def _execute(self, cmd, *args, **kwargs):
147 147 cmdline = [self.execmd, cmd]
148 148 cmdline += args
149 149 cmdline = [util.shellquote(arg) for arg in cmdline]
150 150 cmdline += ['>', util.nulldev, '2>', util.nulldev]
151 151 cmdline = util.quotecommand(' '.join(cmdline))
152 152 self.ui.debug(cmdline, '\n')
153 153 return os.system(cmdline)
154 154
155 155 def _update(self, rev):
156 156 if rev == 'base-0':
157 157 # Initialise 'base-0' revision
158 158 self._obtainrevision(rev)
159 159 else:
160 160 self.ui.debug(_('applying revision %s...\n' % rev))
161 161 revision = '%s--%s' % (self.treeversion, rev)
162 162 changeset, status = self.runlines('replay', '-d', self.tmppath,
163 163 revision)
164 164 if status:
165 165 # Something went wrong while merging (baz or tla
166 166 # issue?), get latest revision and try from there
167 167 shutil.rmtree(self.tmppath, ignore_errors=True)
168 168 self._obtainrevision(rev)
169 169 else:
170 170 old_rev = self.parents[rev][0]
171 171 self.ui.debug(_('computing changeset between %s and %s...\n' \
172 172 % (old_rev, rev)))
173 173 rev_a = '%s--%s' % (self.treeversion, old_rev)
174 174 rev_b = '%s--%s' % (self.treeversion, rev)
175 175 self._parsechangeset(changeset, rev)
176 176
177 177 def _getfile(self, name, rev):
178 178 mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
179 179 if stat.S_ISLNK(mode):
180 180 data = os.readlink(os.path.join(self.tmppath, name))
181 181 mode = mode and 'l' or ''
182 182 else:
183 183 data = open(os.path.join(self.tmppath, name), 'rb').read()
184 184 mode = (mode & 0111) and 'x' or ''
185 185 return data, mode
186 186
187 187 def _exclude(self, name):
188 188 exclude = [ '{arch}', '.arch-ids', '.arch-inventory' ]
189 189 for exc in exclude:
190 190 if name.find(exc) != -1:
191 191 return True
192 192 return False
193 193
194 194 def _readcontents(self, path):
195 195 files = []
196 196 contents = os.listdir(path)
197 197 while len(contents) > 0:
198 198 c = contents.pop()
199 199 p = os.path.join(path, c)
200 200 # os.walk could be used, but here we avoid internal GNU
201 201 # Arch files and directories, thus saving a lot time.
202 202 if not self._exclude(p):
203 203 if os.path.isdir(p):
204 204 contents += [os.path.join(c, f) for f in os.listdir(p)]
205 205 else:
206 206 files.append(c)
207 207 return files
208 208
209 209 def _rendirchanges(self, src, dest):
210 210 changes = []
211 211 copies = {}
212 212 files = self._readcontents(os.path.join(self.tmppath, dest))
213 213 for f in files:
214 214 s = os.path.join(src, f)
215 215 d = os.path.join(dest, f)
216 216 changes.append(s)
217 217 changes.append(d)
218 218 copies[s] = d
219 219 return changes, copies
220 220
221 221 def _obtainrevision(self, rev):
222 222 self.ui.debug(_('obtaining revision %s...\n' % rev))
223 223 revision = '%s--%s' % (self.treeversion, rev)
224 224 output = self._execute('get', revision, self.tmppath)
225 225 self.checkexit(output)
226 226 self.ui.debug(_('analysing revision %s...\n' % rev))
227 227 files = self._readcontents(self.tmppath)
228 228 self.changes[rev].add_files += files
229 229
230 230 def _stripbasepath(self, path):
231 231 if path.startswith('./'):
232 232 return path[2:]
233 233 return path
234 234
235 235 def _parsecatlog(self, data, rev):
236 236 summary = []
237 237 for l in data:
238 238 l = l.strip()
239 239 if summary:
240 240 summary.append(l)
241 241 elif l.startswith('Summary:'):
242 242 summary.append(l[len('Summary: '):])
243 243 elif l.startswith('Standard-date:'):
244 244 date = l[len('Standard-date: '):]
245 245 strdate = util.strdate(date, '%Y-%m-%d %H:%M:%S')
246 246 self.changes[rev].date = util.datestr(strdate)
247 247 elif l.startswith('Creator:'):
248 248 self.changes[rev].author = l[len('Creator: '):]
249 249 self.changes[rev].summary = '\n'.join(summary)
250 250
251 251 def _parsechangeset(self, data, rev):
252 252 for l in data:
253 253 l = l.strip()
254 254 # Added file (ignore added directory)
255 255 if l.startswith('A') and not l.startswith('A/'):
256 256 file = self._stripbasepath(l[1:].strip())
257 257 if not self._exclude(file):
258 258 self.changes[rev].add_files.append(file)
259 259 # Deleted file (ignore deleted directory)
260 260 elif l.startswith('D') and not l.startswith('D/'):
261 261 file = self._stripbasepath(l[1:].strip())
262 262 if not self._exclude(file):
263 263 self.changes[rev].del_files.append(file)
264 264 # Modified binary file
265 265 elif l.startswith('Mb'):
266 266 file = self._stripbasepath(l[2:].strip())
267 267 if not self._exclude(file):
268 268 self.changes[rev].mod_files.append(file)
269 269 # Modified link
270 270 elif l.startswith('M->'):
271 271 file = self._stripbasepath(l[3:].strip())
272 272 if not self._exclude(file):
273 273 self.changes[rev].mod_files.append(file)
274 274 # Modified file
275 275 elif l.startswith('M'):
276 276 file = self._stripbasepath(l[1:].strip())
277 277 if not self._exclude(file):
278 278 self.changes[rev].mod_files.append(file)
279 279 # Renamed file (or link)
280 280 elif l.startswith('=>'):
281 281 files = l[2:].strip().split(' ')
282 282 if len(files) == 1:
283 283 files = l[2:].strip().split('\t')
284 284 src = self._stripbasepath(files[0])
285 285 dst = self._stripbasepath(files[1])
286 286 if not self._exclude(src) and not self._exclude(dst):
287 287 self.changes[rev].ren_files[src] = dst
288 288 # Conversion from file to link or from link to file (modified)
289 289 elif l.startswith('ch'):
290 290 file = self._stripbasepath(l[2:].strip())
291 291 if not self._exclude(file):
292 292 self.changes[rev].mod_files.append(file)
293 293 # Renamed directory
294 294 elif l.startswith('/>'):
295 295 dirs = l[2:].strip().split(' ')
296 296 if len(dirs) == 1:
297 297 dirs = l[2:].strip().split('\t')
298 298 src = self._stripbasepath(dirs[0])
299 299 dst = self._stripbasepath(dirs[1])
300 300 if not self._exclude(src) and not self._exclude(dst):
301 301 self.changes[rev].ren_dirs[src] = dst
@@ -1,301 +1,301 b''
1 1 # hg backend for convert extension
2 2
3 3 # Notes for hg->hg conversion:
4 4 #
5 5 # * Old versions of Mercurial didn't trim the whitespace from the ends
6 6 # of commit messages, but new versions do. Changesets created by
7 7 # those older versions, then converted, may thus have different
8 8 # hashes for changesets that are otherwise identical.
9 9 #
10 10 # * By default, the source revision is stored in the converted
11 11 # revision. This will cause the converted revision to have a
12 12 # different identity than the source. To avoid this, use the
13 13 # following option: "--config convert.hg.saverev=false"
14 14
15 15
16 16 import os, time
17 17 from mercurial.i18n import _
18 18 from mercurial.node import bin, hex, nullid
19 from mercurial import hg, lock, revlog, util
19 from mercurial import hg, revlog, util
20 20
21 21 from common import NoRepo, commit, converter_source, converter_sink
22 22
23 23 class mercurial_sink(converter_sink):
24 24 def __init__(self, ui, path):
25 25 converter_sink.__init__(self, ui, path)
26 26 self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
27 27 self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False)
28 28 self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default')
29 29 self.lastbranch = None
30 30 if os.path.isdir(path) and len(os.listdir(path)) > 0:
31 31 try:
32 32 self.repo = hg.repository(self.ui, path)
33 33 if not self.repo.local():
34 34 raise NoRepo(_('%s is not a local Mercurial repo') % path)
35 35 except hg.RepoError, err:
36 36 ui.print_exc()
37 37 raise NoRepo(err.args[0])
38 38 else:
39 39 try:
40 40 ui.status(_('initializing destination %s repository\n') % path)
41 41 self.repo = hg.repository(self.ui, path, create=True)
42 42 if not self.repo.local():
43 43 raise NoRepo(_('%s is not a local Mercurial repo') % path)
44 44 self.created.append(path)
45 45 except hg.RepoError, err:
46 46 ui.print_exc()
47 47 raise NoRepo("could not create hg repo %s as sink" % path)
48 48 self.lock = None
49 49 self.wlock = None
50 50 self.filemapmode = False
51 51
52 52 def before(self):
53 53 self.ui.debug(_('run hg sink pre-conversion action\n'))
54 54 self.wlock = self.repo.wlock()
55 55 self.lock = self.repo.lock()
56 56 self.repo.dirstate.clear()
57 57
58 58 def after(self):
59 59 self.ui.debug(_('run hg sink post-conversion action\n'))
60 60 self.repo.dirstate.invalidate()
61 61 self.lock = None
62 62 self.wlock = None
63 63
64 64 def revmapfile(self):
65 65 return os.path.join(self.path, ".hg", "shamap")
66 66
67 67 def authorfile(self):
68 68 return os.path.join(self.path, ".hg", "authormap")
69 69
70 70 def getheads(self):
71 71 h = self.repo.changelog.heads()
72 72 return [ hex(x) for x in h ]
73 73
74 74 def putfile(self, f, e, data):
75 75 self.repo.wwrite(f, data, e)
76 76 if f not in self.repo.dirstate:
77 77 self.repo.dirstate.normallookup(f)
78 78
79 79 def copyfile(self, source, dest):
80 80 self.repo.copy(source, dest)
81 81
82 82 def delfile(self, f):
83 83 try:
84 84 util.unlink(self.repo.wjoin(f))
85 85 #self.repo.remove([f])
86 86 except OSError:
87 87 pass
88 88
89 89 def setbranch(self, branch, pbranches):
90 90 if not self.clonebranches:
91 91 return
92 92
93 93 setbranch = (branch != self.lastbranch)
94 94 self.lastbranch = branch
95 95 if not branch:
96 96 branch = 'default'
97 97 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
98 98 pbranch = pbranches and pbranches[0][1] or 'default'
99 99
100 100 branchpath = os.path.join(self.path, branch)
101 101 if setbranch:
102 102 self.after()
103 103 try:
104 104 self.repo = hg.repository(self.ui, branchpath)
105 105 except:
106 106 self.repo = hg.repository(self.ui, branchpath, create=True)
107 107 self.before()
108 108
109 109 # pbranches may bring revisions from other branches (merge parents)
110 110 # Make sure we have them, or pull them.
111 111 missings = {}
112 112 for b in pbranches:
113 113 try:
114 114 self.repo.lookup(b[0])
115 115 except:
116 116 missings.setdefault(b[1], []).append(b[0])
117 117
118 118 if missings:
119 119 self.after()
120 120 for pbranch, heads in missings.iteritems():
121 121 pbranchpath = os.path.join(self.path, pbranch)
122 122 prepo = hg.repository(self.ui, pbranchpath)
123 123 self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
124 124 self.repo.pull(prepo, [prepo.lookup(h) for h in heads])
125 125 self.before()
126 126
127 127 def putcommit(self, files, parents, commit):
128 128 seen = {}
129 129 pl = []
130 130 for p in parents:
131 131 if p not in seen:
132 132 pl.append(p)
133 133 seen[p] = 1
134 134 parents = pl
135 135 nparents = len(parents)
136 136 if self.filemapmode and nparents == 1:
137 137 m1node = self.repo.changelog.read(bin(parents[0]))[0]
138 138 parent = parents[0]
139 139
140 140 if len(parents) < 2: parents.append("0" * 40)
141 141 if len(parents) < 2: parents.append("0" * 40)
142 142 p2 = parents.pop(0)
143 143
144 144 text = commit.desc
145 145 extra = commit.extra.copy()
146 146 if self.branchnames and commit.branch:
147 147 extra['branch'] = commit.branch
148 148 if commit.rev:
149 149 extra['convert_revision'] = commit.rev
150 150
151 151 while parents:
152 152 p1 = p2
153 153 p2 = parents.pop(0)
154 154 a = self.repo.rawcommit(files, text, commit.author, commit.date,
155 155 bin(p1), bin(p2), extra=extra)
156 156 self.repo.dirstate.clear()
157 157 text = "(octopus merge fixup)\n"
158 158 p2 = hg.hex(self.repo.changelog.tip())
159 159
160 160 if self.filemapmode and nparents == 1:
161 161 man = self.repo.manifest
162 162 mnode = self.repo.changelog.read(bin(p2))[0]
163 163 if not man.cmp(m1node, man.revision(mnode)):
164 164 self.repo.rollback()
165 165 self.repo.dirstate.clear()
166 166 return parent
167 167 return p2
168 168
169 169 def puttags(self, tags):
170 170 try:
171 171 old = self.repo.wfile(".hgtags").read()
172 172 oldlines = old.splitlines(1)
173 173 oldlines.sort()
174 174 except:
175 175 oldlines = []
176 176
177 177 k = tags.keys()
178 178 k.sort()
179 179 newlines = []
180 180 for tag in k:
181 181 newlines.append("%s %s\n" % (tags[tag], tag))
182 182
183 183 newlines.sort()
184 184
185 185 if newlines != oldlines:
186 186 self.ui.status("updating tags\n")
187 187 f = self.repo.wfile(".hgtags", "w")
188 188 f.write("".join(newlines))
189 189 f.close()
190 190 if not oldlines: self.repo.add([".hgtags"])
191 191 date = "%s 0" % int(time.mktime(time.gmtime()))
192 192 extra = {}
193 193 if self.tagsbranch != 'default':
194 194 extra['branch'] = self.tagsbranch
195 195 try:
196 196 tagparent = self.repo.changectx(self.tagsbranch).node()
197 197 except hg.RepoError, inst:
198 198 tagparent = nullid
199 199 self.repo.rawcommit([".hgtags"], "update tags", "convert-repo",
200 200 date, tagparent, nullid, extra=extra)
201 201 return hex(self.repo.changelog.tip())
202 202
203 203 def setfilemapmode(self, active):
204 204 self.filemapmode = active
205 205
206 206 class mercurial_source(converter_source):
207 207 def __init__(self, ui, path, rev=None):
208 208 converter_source.__init__(self, ui, path, rev)
209 209 self.saverev = ui.configbool('convert', 'hg.saverev', True)
210 210 try:
211 211 self.repo = hg.repository(self.ui, path)
212 212 # try to provoke an exception if this isn't really a hg
213 213 # repo, but some other bogus compatible-looking url
214 214 if not self.repo.local():
215 215 raise hg.RepoError()
216 216 except hg.RepoError:
217 217 ui.print_exc()
218 218 raise NoRepo("%s is not a local Mercurial repo" % path)
219 219 self.lastrev = None
220 220 self.lastctx = None
221 221 self._changescache = None
222 222 self.convertfp = None
223 223
224 224 def changectx(self, rev):
225 225 if self.lastrev != rev:
226 226 self.lastctx = self.repo.changectx(rev)
227 227 self.lastrev = rev
228 228 return self.lastctx
229 229
230 230 def getheads(self):
231 231 if self.rev:
232 232 return [hex(self.repo.changectx(self.rev).node())]
233 233 else:
234 234 return [hex(node) for node in self.repo.heads()]
235 235
236 236 def getfile(self, name, rev):
237 237 try:
238 238 return self.changectx(rev).filectx(name).data()
239 239 except revlog.LookupError, err:
240 240 raise IOError(err)
241 241
242 242 def getmode(self, name, rev):
243 243 m = self.changectx(rev).manifest()
244 244 return (m.execf(name) and 'x' or '') + (m.linkf(name) and 'l' or '')
245 245
246 246 def getchanges(self, rev):
247 247 ctx = self.changectx(rev)
248 248 if self._changescache and self._changescache[0] == rev:
249 249 m, a, r = self._changescache[1]
250 250 else:
251 251 m, a, r = self.repo.status(ctx.parents()[0].node(), ctx.node())[:3]
252 252 changes = [(name, rev) for name in m + a + r]
253 253 changes.sort()
254 254 return (changes, self.getcopies(ctx, m + a))
255 255
256 256 def getcopies(self, ctx, files):
257 257 copies = {}
258 258 for name in files:
259 259 try:
260 260 copies[name] = ctx.filectx(name).renamed()[0]
261 261 except TypeError:
262 262 pass
263 263 return copies
264 264
265 265 def getcommit(self, rev):
266 266 ctx = self.changectx(rev)
267 267 parents = [hex(p.node()) for p in ctx.parents() if p.node() != nullid]
268 268 if self.saverev:
269 269 crev = rev
270 270 else:
271 271 crev = None
272 272 return commit(author=ctx.user(), date=util.datestr(ctx.date()),
273 273 desc=ctx.description(), rev=crev, parents=parents,
274 274 branch=ctx.branch(), extra=ctx.extra())
275 275
276 276 def gettags(self):
277 277 tags = [t for t in self.repo.tagslist() if t[0] != 'tip']
278 278 return dict([(name, hex(node)) for name, node in tags])
279 279
280 280 def getchangedfiles(self, rev, i):
281 281 ctx = self.changectx(rev)
282 282 i = i or 0
283 283 changes = self.repo.status(ctx.parents()[i].node(), ctx.node())[:3]
284 284
285 285 if i == 0:
286 286 self._changescache = (rev, changes)
287 287
288 288 return changes[0] + changes[1] + changes[2]
289 289
290 290 def converted(self, rev, destrev):
291 291 if self.convertfp is None:
292 292 self.convertfp = open(os.path.join(self.path, '.hg', 'shamap'),
293 293 'a')
294 294 self.convertfp.write('%s %s\n' % (destrev, rev))
295 295 self.convertfp.flush()
296 296
297 297 def before(self):
298 298 self.ui.debug(_('run hg source pre-conversion action\n'))
299 299
300 300 def after(self):
301 301 self.ui.debug(_('run hg source post-conversion action\n'))
@@ -1,129 +1,125 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2007 Daniel Holth <dholth@fastmail.fm>
4 4 # This is a stripped-down version of the original bzr-svn transport.py,
5 5 # Copyright (C) 2006 Jelmer Vernooij <jelmer@samba.org>
6 6
7 7 # This program is free software; you can redistribute it and/or modify
8 8 # it under the terms of the GNU General Public License as published by
9 9 # the Free Software Foundation; either version 2 of the License, or
10 10 # (at your option) any later version.
11 11
12 12 # This program is distributed in the hope that it will be useful,
13 13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 15 # GNU General Public License for more details.
16 16
17 17 # You should have received a copy of the GNU General Public License
18 18 # along with this program; if not, write to the Free Software
19 19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 20
21 from cStringIO import StringIO
22 import os
23 from tempfile import mktemp
24
25 21 from svn.core import SubversionException, Pool
26 22 import svn.ra
27 23 import svn.client
28 24 import svn.core
29 25
30 26 # Some older versions of the Python bindings need to be
31 27 # explicitly initialized. But what we want to do probably
32 28 # won't work worth a darn against those libraries anyway!
33 29 svn.ra.initialize()
34 30
35 31 svn_config = svn.core.svn_config_get_config(None)
36 32
37 33
38 34 def _create_auth_baton(pool):
39 35 """Create a Subversion authentication baton. """
40 36 import svn.client
41 37 # Give the client context baton a suite of authentication
42 38 # providers.h
43 39 providers = [
44 40 svn.client.get_simple_provider(pool),
45 41 svn.client.get_username_provider(pool),
46 42 svn.client.get_ssl_client_cert_file_provider(pool),
47 43 svn.client.get_ssl_client_cert_pw_file_provider(pool),
48 44 svn.client.get_ssl_server_trust_file_provider(pool),
49 45 ]
50 46 # Platform-dependant authentication methods
51 47 if hasattr(svn.client, 'get_windows_simple_provider'):
52 48 providers.append(svn.client.get_windows_simple_provider(pool))
53 49
54 50 return svn.core.svn_auth_open(providers, pool)
55 51
56 52 class NotBranchError(SubversionException):
57 53 pass
58 54
59 55 class SvnRaTransport(object):
60 56 """
61 57 Open an ra connection to a Subversion repository.
62 58 """
63 59 def __init__(self, url="", ra=None):
64 60 self.pool = Pool()
65 61 self.svn_url = url
66 62 self.username = ''
67 63 self.password = ''
68 64
69 65 # Only Subversion 1.4 has reparent()
70 66 if ra is None or not hasattr(svn.ra, 'reparent'):
71 67 self.client = svn.client.create_context(self.pool)
72 68 ab = _create_auth_baton(self.pool)
73 69 if False:
74 70 svn.core.svn_auth_set_parameter(
75 71 ab, svn.core.SVN_AUTH_PARAM_DEFAULT_USERNAME, self.username)
76 72 svn.core.svn_auth_set_parameter(
77 73 ab, svn.core.SVN_AUTH_PARAM_DEFAULT_PASSWORD, self.password)
78 74 self.client.auth_baton = ab
79 75 self.client.config = svn_config
80 76 try:
81 77 self.ra = svn.client.open_ra_session(
82 78 self.svn_url.encode('utf8'),
83 79 self.client, self.pool)
84 80 except SubversionException, (inst, num):
85 81 if num in (svn.core.SVN_ERR_RA_ILLEGAL_URL,
86 82 svn.core.SVN_ERR_RA_LOCAL_REPOS_OPEN_FAILED,
87 83 svn.core.SVN_ERR_BAD_URL):
88 84 raise NotBranchError(url)
89 85 raise
90 86 else:
91 87 self.ra = ra
92 88 svn.ra.reparent(self.ra, self.svn_url.encode('utf8'))
93 89
94 90 class Reporter:
95 91 def __init__(self, (reporter, report_baton)):
96 92 self._reporter = reporter
97 93 self._baton = report_baton
98 94
99 95 def set_path(self, path, revnum, start_empty, lock_token, pool=None):
100 96 svn.ra.reporter2_invoke_set_path(self._reporter, self._baton,
101 97 path, revnum, start_empty, lock_token, pool)
102 98
103 99 def delete_path(self, path, pool=None):
104 100 svn.ra.reporter2_invoke_delete_path(self._reporter, self._baton,
105 101 path, pool)
106 102
107 103 def link_path(self, path, url, revision, start_empty, lock_token,
108 104 pool=None):
109 105 svn.ra.reporter2_invoke_link_path(self._reporter, self._baton,
110 106 path, url, revision, start_empty, lock_token,
111 107 pool)
112 108
113 109 def finish_report(self, pool=None):
114 110 svn.ra.reporter2_invoke_finish_report(self._reporter,
115 111 self._baton, pool)
116 112
117 113 def abort_report(self, pool=None):
118 114 svn.ra.reporter2_invoke_abort_report(self._reporter,
119 115 self._baton, pool)
120 116
121 117 def do_update(self, revnum, path, *args, **kwargs):
122 118 return self.Reporter(svn.ra.do_update(self.ra, revnum, path, *args, **kwargs))
123 119
124 120 def clone(self, offset=None):
125 121 """See Transport.clone()."""
126 122 if offset is None:
127 123 return self.__class__(self.base)
128 124
129 125 return SvnRaTransport(urlutils.join(self.base, offset), ra=self.ra)
@@ -1,123 +1,123 b''
1 1 # fetch.py - pull and merge remote changes
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from mercurial.i18n import _
9 9 from mercurial.node import nullid, short
10 from mercurial import commands, cmdutil, hg, node, util
10 from mercurial import commands, cmdutil, hg, util
11 11
12 12 def fetch(ui, repo, source='default', **opts):
13 13 '''Pull changes from a remote repository, merge new changes if needed.
14 14
15 15 This finds all changes from the repository at the specified path
16 16 or URL and adds them to the local repository.
17 17
18 18 If the pulled changes add a new head, the head is automatically
19 19 merged, and the result of the merge is committed. Otherwise, the
20 20 working directory is updated to include the new changes.
21 21
22 22 When a merge occurs, the newly pulled changes are assumed to be
23 23 "authoritative". The head of the new changes is used as the first
24 24 parent, with local changes as the second. To switch the merge
25 25 order, use --switch-parent.
26 26
27 27 See 'hg help dates' for a list of formats valid for -d/--date.
28 28 '''
29 29
30 30 def postincoming(other, modheads):
31 31 if modheads == 0:
32 32 return 0
33 33 if modheads == 1:
34 34 return hg.clean(repo, repo.changelog.tip())
35 35 newheads = repo.heads(parent)
36 36 newchildren = [n for n in repo.heads(parent) if n != parent]
37 37 newparent = parent
38 38 if newchildren:
39 39 newparent = newchildren[0]
40 40 hg.clean(repo, newparent)
41 41 newheads = [n for n in repo.heads() if n != newparent]
42 42 if len(newheads) > 1:
43 43 ui.status(_('not merging with %d other new heads '
44 44 '(use "hg heads" and "hg merge" to merge them)') %
45 45 (len(newheads) - 1))
46 46 return
47 47 err = False
48 48 if newheads:
49 49 # By default, we consider the repository we're pulling
50 50 # *from* as authoritative, so we merge our changes into
51 51 # theirs.
52 52 if opts['switch_parent']:
53 53 firstparent, secondparent = newparent, newheads[0]
54 54 else:
55 55 firstparent, secondparent = newheads[0], newparent
56 56 ui.status(_('updating to %d:%s\n') %
57 57 (repo.changelog.rev(firstparent),
58 58 short(firstparent)))
59 59 hg.clean(repo, firstparent)
60 60 ui.status(_('merging with %d:%s\n') %
61 61 (repo.changelog.rev(secondparent), short(secondparent)))
62 62 err = hg.merge(repo, secondparent, remind=False)
63 63 if not err:
64 64 mod, add, rem = repo.status()[:3]
65 65 message = (cmdutil.logmessage(opts) or
66 66 (_('Automated merge with %s') %
67 67 util.removeauth(other.url())))
68 68 n = repo.commit(mod + add + rem, message,
69 69 opts['user'], opts['date'],
70 70 force_editor=opts.get('force_editor'))
71 71 ui.status(_('new changeset %d:%s merges remote changes '
72 72 'with local\n') % (repo.changelog.rev(n),
73 73 short(n)))
74 74
75 75 def pull():
76 76 cmdutil.setremoteconfig(ui, opts)
77 77
78 78 other = hg.repository(ui, ui.expandpath(source))
79 79 ui.status(_('pulling from %s\n') %
80 80 util.hidepassword(ui.expandpath(source)))
81 81 revs = None
82 82 if opts['rev']:
83 83 if not other.local():
84 84 raise util.Abort(_("fetch -r doesn't work for remote "
85 85 "repositories yet"))
86 86 else:
87 87 revs = [other.lookup(rev) for rev in opts['rev']]
88 88 modheads = repo.pull(other, heads=revs)
89 89 return postincoming(other, modheads)
90 90
91 91 date = opts.get('date')
92 92 if date:
93 93 opts['date'] = util.parsedate(date)
94 94
95 95 parent, p2 = repo.dirstate.parents()
96 96 if parent != repo.changelog.tip():
97 97 raise util.Abort(_('working dir not at tip '
98 98 '(use "hg update" to check out tip)'))
99 99 if p2 != nullid:
100 100 raise util.Abort(_('outstanding uncommitted merge'))
101 101 wlock = lock = None
102 102 try:
103 103 wlock = repo.wlock()
104 104 lock = repo.lock()
105 105 mod, add, rem = repo.status()[:3]
106 106 if mod or add or rem:
107 107 raise util.Abort(_('outstanding uncommitted changes'))
108 108 if len(repo.heads()) > 1:
109 109 raise util.Abort(_('multiple heads in this repository '
110 110 '(use "hg heads" and "hg merge" to merge)'))
111 111 return pull()
112 112 finally:
113 113 del lock, wlock
114 114
115 115 cmdtable = {
116 116 'fetch':
117 117 (fetch,
118 118 [('r', 'rev', [], _('a specific revision you would like to pull')),
119 119 ('f', 'force-editor', None, _('edit commit message')),
120 120 ('', 'switch-parent', None, _('switch parents when merging')),
121 121 ] + commands.commitopts + commands.commitopts2 + commands.remoteopts,
122 122 _('hg fetch [SOURCE]')),
123 123 }
@@ -1,326 +1,326 b''
1 1 # ASCII graph log extension for Mercurial
2 2 #
3 3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
4 4 #
5 5 # This software may be used and distributed according to the terms of
6 6 # the GNU General Public License, incorporated herein by reference.
7 7
8 8 import os
9 9 import sys
10 10 from mercurial.cmdutil import revrange, show_changeset
11 11 from mercurial.commands import templateopts
12 12 from mercurial.i18n import _
13 from mercurial.node import nullid, nullrev
13 from mercurial.node import nullrev
14 14 from mercurial.util import Abort, canonpath
15 15
16 16 def revision_grapher(repo, start_rev, stop_rev):
17 17 """incremental revision grapher
18 18
19 19 This generator function walks through the revision history from
20 20 revision start_rev to revision stop_rev (which must be less than
21 21 or equal to start_rev) and for each revision emits tuples with the
22 22 following elements:
23 23
24 24 - Current revision.
25 25 - Current node.
26 26 - Column of the current node in the set of ongoing edges.
27 27 - Edges; a list of (col, next_col) indicating the edges between
28 28 the current node and its parents.
29 29 - Number of columns (ongoing edges) in the current revision.
30 30 - The difference between the number of columns (ongoing edges)
31 31 in the next revision and the number of columns (ongoing edges)
32 32 in the current revision. That is: -1 means one column removed;
33 33 0 means no columns added or removed; 1 means one column added.
34 34 """
35 35
36 36 assert start_rev >= stop_rev
37 37 curr_rev = start_rev
38 38 revs = []
39 39 while curr_rev >= stop_rev:
40 40 node = repo.changelog.node(curr_rev)
41 41
42 42 # Compute revs and next_revs.
43 43 if curr_rev not in revs:
44 44 # New head.
45 45 revs.append(curr_rev)
46 46 rev_index = revs.index(curr_rev)
47 47 next_revs = revs[:]
48 48
49 49 # Add parents to next_revs.
50 50 parents = get_rev_parents(repo, curr_rev)
51 51 parents_to_add = []
52 52 for parent in parents:
53 53 if parent not in next_revs:
54 54 parents_to_add.append(parent)
55 55 parents_to_add.sort()
56 56 next_revs[rev_index:rev_index + 1] = parents_to_add
57 57
58 58 edges = []
59 59 for parent in parents:
60 60 edges.append((rev_index, next_revs.index(parent)))
61 61
62 62 n_columns_diff = len(next_revs) - len(revs)
63 63 yield (curr_rev, node, rev_index, edges, len(revs), n_columns_diff)
64 64
65 65 revs = next_revs
66 66 curr_rev -= 1
67 67
68 68 def filelog_grapher(repo, path, start_rev, stop_rev):
69 69 """incremental file log grapher
70 70
71 71 This generator function walks through the revision history of a
72 72 single file from revision start_rev to revision stop_rev (which must
73 73 be less than or equal to start_rev) and for each revision emits
74 74 tuples with the following elements:
75 75
76 76 - Current revision.
77 77 - Current node.
78 78 - Column of the current node in the set of ongoing edges.
79 79 - Edges; a list of (col, next_col) indicating the edges between
80 80 the current node and its parents.
81 81 - Number of columns (ongoing edges) in the current revision.
82 82 - The difference between the number of columns (ongoing edges)
83 83 in the next revision and the number of columns (ongoing edges)
84 84 in the current revision. That is: -1 means one column removed;
85 85 0 means no columns added or removed; 1 means one column added.
86 86 """
87 87
88 88 assert start_rev >= stop_rev
89 89 curr_rev = start_rev
90 90 revs = []
91 91 filerev = repo.file(path).count() - 1
92 92 while filerev >= 0:
93 93 fctx = repo.filectx(path, fileid=filerev)
94 94
95 95 # Compute revs and next_revs.
96 96 if filerev not in revs:
97 97 revs.append(filerev)
98 98 rev_index = revs.index(filerev)
99 99 next_revs = revs[:]
100 100
101 101 # Add parents to next_revs.
102 102 parents = [f.filerev() for f in fctx.parents() if f.path() == path]
103 103 parents_to_add = []
104 104 for parent in parents:
105 105 if parent not in next_revs:
106 106 parents_to_add.append(parent)
107 107 parents_to_add.sort()
108 108 next_revs[rev_index:rev_index + 1] = parents_to_add
109 109
110 110 edges = []
111 111 for parent in parents:
112 112 edges.append((rev_index, next_revs.index(parent)))
113 113
114 114 changerev = fctx.linkrev()
115 115 if changerev <= start_rev:
116 116 node = repo.changelog.node(changerev)
117 117 n_columns_diff = len(next_revs) - len(revs)
118 118 yield (changerev, node, rev_index, edges, len(revs), n_columns_diff)
119 119 if changerev <= stop_rev:
120 120 break
121 121 revs = next_revs
122 122 filerev -= 1
123 123
124 124 def get_rev_parents(repo, rev):
125 125 return [x for x in repo.changelog.parentrevs(rev) if x != nullrev]
126 126
127 127 def fix_long_right_edges(edges):
128 128 for (i, (start, end)) in enumerate(edges):
129 129 if end > start:
130 130 edges[i] = (start, end + 1)
131 131
132 132 def draw_edges(edges, nodeline, interline):
133 133 for (start, end) in edges:
134 134 if start == end + 1:
135 135 interline[2 * end + 1] = "/"
136 136 elif start == end - 1:
137 137 interline[2 * start + 1] = "\\"
138 138 elif start == end:
139 139 interline[2 * start] = "|"
140 140 else:
141 141 nodeline[2 * end] = "+"
142 142 if start > end:
143 143 (start, end) = (end,start)
144 144 for i in range(2 * start + 1, 2 * end):
145 145 if nodeline[i] != "+":
146 146 nodeline[i] = "-"
147 147
148 148 def format_line(line, level, logstr):
149 149 text = "%-*s %s" % (2 * level, "".join(line), logstr)
150 150 return "%s\n" % text.rstrip()
151 151
152 152 def get_nodeline_edges_tail(
153 153 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
154 154 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
155 155 # Still going in the same non-vertical direction.
156 156 if n_columns_diff == -1:
157 157 start = max(node_index + 1, p_node_index)
158 158 tail = ["|", " "] * (start - node_index - 1)
159 159 tail.extend(["/", " "] * (n_columns - start))
160 160 return tail
161 161 else:
162 162 return ["\\", " "] * (n_columns - node_index - 1)
163 163 else:
164 164 return ["|", " "] * (n_columns - node_index - 1)
165 165
166 166 def get_padding_line(ni, n_columns, edges):
167 167 line = []
168 168 line.extend(["|", " "] * ni)
169 169 if (ni, ni - 1) in edges or (ni, ni) in edges:
170 170 # (ni, ni - 1) (ni, ni)
171 171 # | | | | | | | |
172 172 # +---o | | o---+
173 173 # | | c | | c | |
174 174 # | |/ / | |/ /
175 175 # | | | | | |
176 176 c = "|"
177 177 else:
178 178 c = " "
179 179 line.extend([c, " "])
180 180 line.extend(["|", " "] * (n_columns - ni - 1))
181 181 return line
182 182
183 183 def get_limit(limit_opt):
184 184 if limit_opt:
185 185 try:
186 186 limit = int(limit_opt)
187 187 except ValueError:
188 188 raise Abort(_("limit must be a positive integer"))
189 189 if limit <= 0:
190 190 raise Abort(_("limit must be positive"))
191 191 else:
192 192 limit = sys.maxint
193 193 return limit
194 194
195 195 def get_revs(repo, rev_opt):
196 196 if rev_opt:
197 197 revs = revrange(repo, rev_opt)
198 198 return (max(revs), min(revs))
199 199 else:
200 200 return (repo.changelog.count() - 1, 0)
201 201
202 202 def graphlog(ui, repo, path=None, **opts):
203 203 """show revision history alongside an ASCII revision graph
204 204
205 205 Print a revision history alongside a revision graph drawn with
206 206 ASCII characters.
207 207
208 208 Nodes printed as an @ character are parents of the working
209 209 directory.
210 210 """
211 211
212 212 limit = get_limit(opts["limit"])
213 213 (start_rev, stop_rev) = get_revs(repo, opts["rev"])
214 214 stop_rev = max(stop_rev, start_rev - limit + 1)
215 215 if start_rev == nullrev:
216 216 return
217 217 cs_printer = show_changeset(ui, repo, opts)
218 218 if path:
219 219 cpath = canonpath(repo.root, os.getcwd(), path)
220 220 grapher = filelog_grapher(repo, cpath, start_rev, stop_rev)
221 221 else:
222 222 grapher = revision_grapher(repo, start_rev, stop_rev)
223 223 repo_parents = repo.dirstate.parents()
224 224 prev_n_columns_diff = 0
225 225 prev_node_index = 0
226 226
227 227 for (rev, node, node_index, edges, n_columns, n_columns_diff) in grapher:
228 228 # log_strings is the list of all log strings to draw alongside
229 229 # the graph.
230 230 ui.pushbuffer()
231 231 cs_printer.show(rev, node)
232 232 log_strings = ui.popbuffer().split("\n")[:-1]
233 233
234 234 if n_columns_diff == -1:
235 235 # Transform
236 236 #
237 237 # | | | | | |
238 238 # o | | into o---+
239 239 # |X / |/ /
240 240 # | | | |
241 241 fix_long_right_edges(edges)
242 242
243 243 # add_padding_line says whether to rewrite
244 244 #
245 245 # | | | | | | | |
246 246 # | o---+ into | o---+
247 247 # | / / | | | # <--- padding line
248 248 # o | | | / /
249 249 # o | |
250 250 add_padding_line = (len(log_strings) > 2 and
251 251 n_columns_diff == -1 and
252 252 [x for (x, y) in edges if x + 1 < y])
253 253
254 254 # fix_nodeline_tail says whether to rewrite
255 255 #
256 256 # | | o | | | | o | |
257 257 # | | |/ / | | |/ /
258 258 # | o | | into | o / / # <--- fixed nodeline tail
259 259 # | |/ / | |/ /
260 260 # o | | o | |
261 261 fix_nodeline_tail = len(log_strings) <= 2 and not add_padding_line
262 262
263 263 # nodeline is the line containing the node character (@ or o).
264 264 nodeline = ["|", " "] * node_index
265 265 if node in repo_parents:
266 266 node_ch = "@"
267 267 else:
268 268 node_ch = "o"
269 269 nodeline.extend([node_ch, " "])
270 270
271 271 nodeline.extend(
272 272 get_nodeline_edges_tail(
273 273 node_index, prev_node_index, n_columns, n_columns_diff,
274 274 prev_n_columns_diff, fix_nodeline_tail))
275 275
276 276 # shift_interline is the line containing the non-vertical
277 277 # edges between this entry and the next.
278 278 shift_interline = ["|", " "] * node_index
279 279 if n_columns_diff == -1:
280 280 n_spaces = 1
281 281 edge_ch = "/"
282 282 elif n_columns_diff == 0:
283 283 n_spaces = 2
284 284 edge_ch = "|"
285 285 else:
286 286 n_spaces = 3
287 287 edge_ch = "\\"
288 288 shift_interline.extend(n_spaces * [" "])
289 289 shift_interline.extend([edge_ch, " "] * (n_columns - node_index - 1))
290 290
291 291 # Draw edges from the current node to its parents.
292 292 draw_edges(edges, nodeline, shift_interline)
293 293
294 294 # lines is the list of all graph lines to print.
295 295 lines = [nodeline]
296 296 if add_padding_line:
297 297 lines.append(get_padding_line(node_index, n_columns, edges))
298 298 lines.append(shift_interline)
299 299
300 300 # Make sure that there are as many graph lines as there are
301 301 # log strings.
302 302 while len(log_strings) < len(lines):
303 303 log_strings.append("")
304 304 if len(lines) < len(log_strings):
305 305 extra_interline = ["|", " "] * (n_columns + n_columns_diff)
306 306 while len(lines) < len(log_strings):
307 307 lines.append(extra_interline)
308 308
309 309 # Print lines.
310 310 indentation_level = max(n_columns, n_columns + n_columns_diff)
311 311 for (line, logstr) in zip(lines, log_strings):
312 312 ui.write(format_line(line, indentation_level, logstr))
313 313
314 314 # ...and start over.
315 315 prev_node_index = node_index
316 316 prev_n_columns_diff = n_columns_diff
317 317
318 318 cmdtable = {
319 319 "glog":
320 320 (graphlog,
321 321 [('l', 'limit', '', _('limit number of changes displayed')),
322 322 ('p', 'patch', False, _('show patch')),
323 323 ('r', 'rev', [], _('show the specified revision or range')),
324 324 ] + templateopts,
325 325 _('hg glog [OPTION]... [FILE]')),
326 326 }
@@ -1,356 +1,356 b''
1 1 # Minimal support for git commands on an hg repository
2 2 #
3 3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7 #
8 8 # The hgk extension allows browsing the history of a repository in a
9 9 # graphical way. It requires Tcl/Tk version 8.4 or later. (Tcl/Tk is
10 10 # not distributed with Mercurial.)
11 11 #
12 12 # hgk consists of two parts: a Tcl script that does the displaying and
13 13 # querying of information, and an extension to mercurial named hgk.py,
14 14 # which provides hooks for hgk to get information. hgk can be found in
15 15 # the contrib directory, and hgk.py can be found in the hgext
16 16 # directory.
17 17 #
18 18 # To load the hgext.py extension, add it to your .hgrc file (you have
19 19 # to use your global $HOME/.hgrc file, not one in a repository). You
20 20 # can specify an absolute path:
21 21 #
22 22 # [extensions]
23 23 # hgk=/usr/local/lib/hgk.py
24 24 #
25 25 # Mercurial can also scan the default python library path for a file
26 26 # named 'hgk.py' if you set hgk empty:
27 27 #
28 28 # [extensions]
29 29 # hgk=
30 30 #
31 31 # The hg view command will launch the hgk Tcl script. For this command
32 32 # to work, hgk must be in your search path. Alternately, you can
33 33 # specify the path to hgk in your .hgrc file:
34 34 #
35 35 # [hgk]
36 36 # path=/location/of/hgk
37 37 #
38 38 # hgk can make use of the extdiff extension to visualize
39 39 # revisions. Assuming you had already configured extdiff vdiff
40 40 # command, just add:
41 41 #
42 42 # [hgk]
43 43 # vdiff=vdiff
44 44 #
45 45 # Revisions context menu will now display additional entries to fire
46 46 # vdiff on hovered and selected revisions.
47 47
48 48 import os
49 from mercurial import hg, fancyopts, commands, ui, util, patch, revlog
49 from mercurial import hg, commands, util, patch, revlog
50 50
51 51 def difftree(ui, repo, node1=None, node2=None, *files, **opts):
52 52 """diff trees from two commits"""
53 53 def __difftree(repo, node1, node2, files=[]):
54 54 assert node2 is not None
55 55 mmap = repo.changectx(node1).manifest()
56 56 mmap2 = repo.changectx(node2).manifest()
57 57 status = repo.status(node1, node2, files=files)[:5]
58 58 modified, added, removed, deleted, unknown = status
59 59
60 60 empty = hg.short(hg.nullid)
61 61
62 62 for f in modified:
63 63 # TODO get file permissions
64 64 ui.write(":100664 100664 %s %s M\t%s\t%s\n" %
65 65 (hg.short(mmap[f]), hg.short(mmap2[f]), f, f))
66 66 for f in added:
67 67 ui.write(":000000 100664 %s %s N\t%s\t%s\n" %
68 68 (empty, hg.short(mmap2[f]), f, f))
69 69 for f in removed:
70 70 ui.write(":100664 000000 %s %s D\t%s\t%s\n" %
71 71 (hg.short(mmap[f]), empty, f, f))
72 72 ##
73 73
74 74 while True:
75 75 if opts['stdin']:
76 76 try:
77 77 line = raw_input().split(' ')
78 78 node1 = line[0]
79 79 if len(line) > 1:
80 80 node2 = line[1]
81 81 else:
82 82 node2 = None
83 83 except EOFError:
84 84 break
85 85 node1 = repo.lookup(node1)
86 86 if node2:
87 87 node2 = repo.lookup(node2)
88 88 else:
89 89 node2 = node1
90 90 node1 = repo.changelog.parents(node1)[0]
91 91 if opts['patch']:
92 92 if opts['pretty']:
93 93 catcommit(ui, repo, node2, "")
94 94 patch.diff(repo, node1, node2,
95 95 files=files,
96 96 opts=patch.diffopts(ui, {'git': True}))
97 97 else:
98 98 __difftree(repo, node1, node2, files=files)
99 99 if not opts['stdin']:
100 100 break
101 101
102 102 def catcommit(ui, repo, n, prefix, ctx=None):
103 103 nlprefix = '\n' + prefix;
104 104 if ctx is None:
105 105 ctx = repo.changectx(n)
106 106 (p1, p2) = ctx.parents()
107 107 ui.write("tree %s\n" % hg.short(ctx.changeset()[0])) # use ctx.node() instead ??
108 108 if p1: ui.write("parent %s\n" % hg.short(p1.node()))
109 109 if p2: ui.write("parent %s\n" % hg.short(p2.node()))
110 110 date = ctx.date()
111 111 description = ctx.description().replace("\0", "")
112 112 lines = description.splitlines()
113 113 if lines and lines[-1].startswith('committer:'):
114 114 committer = lines[-1].split(': ')[1].rstrip()
115 115 else:
116 116 committer = ctx.user()
117 117
118 118 ui.write("author %s %s %s\n" % (ctx.user(), int(date[0]), date[1]))
119 119 ui.write("committer %s %s %s\n" % (committer, int(date[0]), date[1]))
120 120 ui.write("revision %d\n" % ctx.rev())
121 121 ui.write("branch %s\n\n" % ctx.branch())
122 122
123 123 if prefix != "":
124 124 ui.write("%s%s\n" % (prefix, description.replace('\n', nlprefix).strip()))
125 125 else:
126 126 ui.write(description + "\n")
127 127 if prefix:
128 128 ui.write('\0')
129 129
130 130 def base(ui, repo, node1, node2):
131 131 """Output common ancestor information"""
132 132 node1 = repo.lookup(node1)
133 133 node2 = repo.lookup(node2)
134 134 n = repo.changelog.ancestor(node1, node2)
135 135 ui.write(hg.short(n) + "\n")
136 136
137 137 def catfile(ui, repo, type=None, r=None, **opts):
138 138 """cat a specific revision"""
139 139 # in stdin mode, every line except the commit is prefixed with two
140 140 # spaces. This way the our caller can find the commit without magic
141 141 # strings
142 142 #
143 143 prefix = ""
144 144 if opts['stdin']:
145 145 try:
146 146 (type, r) = raw_input().split(' ');
147 147 prefix = " "
148 148 except EOFError:
149 149 return
150 150
151 151 else:
152 152 if not type or not r:
153 153 ui.warn("cat-file: type or revision not supplied\n")
154 154 commands.help_(ui, 'cat-file')
155 155
156 156 while r:
157 157 if type != "commit":
158 158 ui.warn("aborting hg cat-file only understands commits\n")
159 159 return 1;
160 160 n = repo.lookup(r)
161 161 catcommit(ui, repo, n, prefix)
162 162 if opts['stdin']:
163 163 try:
164 164 (type, r) = raw_input().split(' ');
165 165 except EOFError:
166 166 break
167 167 else:
168 168 break
169 169
170 170 # git rev-tree is a confusing thing. You can supply a number of
171 171 # commit sha1s on the command line, and it walks the commit history
172 172 # telling you which commits are reachable from the supplied ones via
173 173 # a bitmask based on arg position.
174 174 # you can specify a commit to stop at by starting the sha1 with ^
175 175 def revtree(ui, args, repo, full="tree", maxnr=0, parents=False):
176 176 def chlogwalk():
177 177 count = repo.changelog.count()
178 178 i = count
179 179 l = [0] * 100
180 180 chunk = 100
181 181 while True:
182 182 if chunk > i:
183 183 chunk = i
184 184 i = 0
185 185 else:
186 186 i -= chunk
187 187
188 188 for x in xrange(0, chunk):
189 189 if i + x >= count:
190 190 l[chunk - x:] = [0] * (chunk - x)
191 191 break
192 192 if full != None:
193 193 l[x] = repo.changectx(i + x)
194 194 l[x].changeset() # force reading
195 195 else:
196 196 l[x] = 1
197 197 for x in xrange(chunk-1, -1, -1):
198 198 if l[x] != 0:
199 199 yield (i + x, full != None and l[x] or None)
200 200 if i == 0:
201 201 break
202 202
203 203 # calculate and return the reachability bitmask for sha
204 204 def is_reachable(ar, reachable, sha):
205 205 if len(ar) == 0:
206 206 return 1
207 207 mask = 0
208 208 for i in xrange(len(ar)):
209 209 if sha in reachable[i]:
210 210 mask |= 1 << i
211 211
212 212 return mask
213 213
214 214 reachable = []
215 215 stop_sha1 = []
216 216 want_sha1 = []
217 217 count = 0
218 218
219 219 # figure out which commits they are asking for and which ones they
220 220 # want us to stop on
221 221 for i in xrange(len(args)):
222 222 if args[i].startswith('^'):
223 223 s = repo.lookup(args[i][1:])
224 224 stop_sha1.append(s)
225 225 want_sha1.append(s)
226 226 elif args[i] != 'HEAD':
227 227 want_sha1.append(repo.lookup(args[i]))
228 228
229 229 # calculate the graph for the supplied commits
230 230 for i in xrange(len(want_sha1)):
231 231 reachable.append({});
232 232 n = want_sha1[i];
233 233 visit = [n];
234 234 reachable[i][n] = 1
235 235 while visit:
236 236 n = visit.pop(0)
237 237 if n in stop_sha1:
238 238 continue
239 239 for p in repo.changelog.parents(n):
240 240 if p not in reachable[i]:
241 241 reachable[i][p] = 1
242 242 visit.append(p)
243 243 if p in stop_sha1:
244 244 continue
245 245
246 246 # walk the repository looking for commits that are in our
247 247 # reachability graph
248 248 for i, ctx in chlogwalk():
249 249 n = repo.changelog.node(i)
250 250 mask = is_reachable(want_sha1, reachable, n)
251 251 if mask:
252 252 parentstr = ""
253 253 if parents:
254 254 pp = repo.changelog.parents(n)
255 255 if pp[0] != hg.nullid:
256 256 parentstr += " " + hg.short(pp[0])
257 257 if pp[1] != hg.nullid:
258 258 parentstr += " " + hg.short(pp[1])
259 259 if not full:
260 260 ui.write("%s%s\n" % (hg.short(n), parentstr))
261 261 elif full == "commit":
262 262 ui.write("%s%s\n" % (hg.short(n), parentstr))
263 263 catcommit(ui, repo, n, ' ', ctx)
264 264 else:
265 265 (p1, p2) = repo.changelog.parents(n)
266 266 (h, h1, h2) = map(hg.short, (n, p1, p2))
267 267 (i1, i2) = map(repo.changelog.rev, (p1, p2))
268 268
269 269 date = ctx.date()[0]
270 270 ui.write("%s %s:%s" % (date, h, mask))
271 271 mask = is_reachable(want_sha1, reachable, p1)
272 272 if i1 != hg.nullrev and mask > 0:
273 273 ui.write("%s:%s " % (h1, mask)),
274 274 mask = is_reachable(want_sha1, reachable, p2)
275 275 if i2 != hg.nullrev and mask > 0:
276 276 ui.write("%s:%s " % (h2, mask))
277 277 ui.write("\n")
278 278 if maxnr and count >= maxnr:
279 279 break
280 280 count += 1
281 281
282 282 def revparse(ui, repo, *revs, **opts):
283 283 """Parse given revisions"""
284 284 def revstr(rev):
285 285 if rev == 'HEAD':
286 286 rev = 'tip'
287 287 return revlog.hex(repo.lookup(rev))
288 288
289 289 for r in revs:
290 290 revrange = r.split(':', 1)
291 291 ui.write('%s\n' % revstr(revrange[0]))
292 292 if len(revrange) == 2:
293 293 ui.write('^%s\n' % revstr(revrange[1]))
294 294
295 295 # git rev-list tries to order things by date, and has the ability to stop
296 296 # at a given commit without walking the whole repo. TODO add the stop
297 297 # parameter
298 298 def revlist(ui, repo, *revs, **opts):
299 299 """print revisions"""
300 300 if opts['header']:
301 301 full = "commit"
302 302 else:
303 303 full = None
304 304 copy = [x for x in revs]
305 305 revtree(ui, copy, repo, full, opts['max_count'], opts['parents'])
306 306
307 307 def config(ui, repo, **opts):
308 308 """print extension options"""
309 309 def writeopt(name, value):
310 310 ui.write('k=%s\nv=%s\n' % (name, value))
311 311
312 312 writeopt('vdiff', ui.config('hgk', 'vdiff', ''))
313 313
314 314
315 315 def view(ui, repo, *etc, **opts):
316 316 "start interactive history viewer"
317 317 os.chdir(repo.root)
318 318 optstr = ' '.join(['--%s %s' % (k, v) for k, v in opts.iteritems() if v])
319 319 cmd = ui.config("hgk", "path", "hgk") + " %s %s" % (optstr, " ".join(etc))
320 320 ui.debug("running %s\n" % cmd)
321 321 util.system(cmd)
322 322
323 323 cmdtable = {
324 324 "^view":
325 325 (view,
326 326 [('l', 'limit', '', 'limit number of changes displayed')],
327 327 'hg view [-l LIMIT] [REVRANGE]'),
328 328 "debug-diff-tree":
329 329 (difftree,
330 330 [('p', 'patch', None, 'generate patch'),
331 331 ('r', 'recursive', None, 'recursive'),
332 332 ('P', 'pretty', None, 'pretty'),
333 333 ('s', 'stdin', None, 'stdin'),
334 334 ('C', 'copy', None, 'detect copies'),
335 335 ('S', 'search', "", 'search')],
336 336 'hg git-diff-tree [OPTION]... NODE1 NODE2 [FILE]...'),
337 337 "debug-cat-file":
338 338 (catfile,
339 339 [('s', 'stdin', None, 'stdin')],
340 340 'hg debug-cat-file [OPTION]... TYPE FILE'),
341 341 "debug-config":
342 342 (config, [], 'hg debug-config'),
343 343 "debug-merge-base":
344 344 (base, [], 'hg debug-merge-base node node'),
345 345 "debug-rev-parse":
346 346 (revparse,
347 347 [('', 'default', '', 'ignored')],
348 348 'hg debug-rev-parse REV'),
349 349 "debug-rev-list":
350 350 (revlist,
351 351 [('H', 'header', None, 'header'),
352 352 ('t', 'topo-order', None, 'topo-order'),
353 353 ('p', 'parents', None, 'parents'),
354 354 ('n', 'max-count', 0, 'max-count')],
355 355 'hg debug-rev-list [options] revs'),
356 356 }
@@ -1,103 +1,98 b''
1 1 """
2 2 This is Mercurial extension for syntax highlighting in the file
3 3 revision view of hgweb.
4 4
5 5 It depends on the pygments syntax highlighting library:
6 6 http://pygments.org/
7 7
8 8 To enable the extension add this to hgrc:
9 9
10 10 [extensions]
11 11 hgext.highlight =
12 12
13 13 There is a single configuration option:
14 14
15 15 [web]
16 16 pygments_style = <style>
17 17
18 18 The default is 'colorful'. If this is changed the corresponding CSS
19 19 file should be re-generated by running
20 20
21 21 # pygmentize -f html -S <newstyle>
22 22
23 23
24 24 -- Adam Hupp <adam@hupp.org>
25 25
26 26
27 27 """
28 28
29 29 from mercurial import demandimport
30 30 demandimport.ignore.extend(['pkgutil',
31 31 'pkg_resources',
32 32 '__main__',])
33 33
34 import mimetypes
35
36 from mercurial.hgweb import hgweb_mod
37 34 from mercurial.hgweb.hgweb_mod import hgweb
38 35 from mercurial import util
39 from mercurial.hgweb.common import paritygen
40 from mercurial.node import hex
41 36 from mercurial.templatefilters import filters
42 37
43 38 from pygments import highlight
44 39 from pygments.util import ClassNotFound
45 40 from pygments.lexers import guess_lexer, guess_lexer_for_filename, TextLexer
46 41 from pygments.formatters import HtmlFormatter
47 42
48 43 SYNTAX_CSS = ('\n<link rel="stylesheet" href="#staticurl#highlight.css" '
49 44 'type="text/css" />')
50 45
51 46 def pygmentize(self, tmpl, fctx, field):
52 47 # append a <link ...> to the syntax highlighting css
53 48 old_header = ''.join(tmpl('header'))
54 49 if SYNTAX_CSS not in old_header:
55 50 new_header = old_header + SYNTAX_CSS
56 51 tmpl.cache['header'] = new_header
57 52
58 53 text = fctx.data()
59 54 if util.binary(text):
60 55 return
61 56
62 57 style = self.config("web", "pygments_style", "colorful")
63 58 # To get multi-line strings right, we can't format line-by-line
64 59 try:
65 60 lexer = guess_lexer_for_filename(fctx.path(), text,
66 61 encoding=util._encoding)
67 62 except ClassNotFound:
68 63 try:
69 64 lexer = guess_lexer(text, encoding=util._encoding)
70 65 except ClassNotFound:
71 66 lexer = TextLexer(encoding=util._encoding)
72 67
73 68 formatter = HtmlFormatter(style=style, encoding=util._encoding)
74 69
75 70 colorized = highlight(text, lexer, formatter)
76 71 # strip wrapping div
77 72 colorized = colorized[:colorized.find('\n</pre>')]
78 73 colorized = colorized[colorized.find('<pre>')+5:]
79 74 coloriter = iter(colorized.splitlines())
80 75
81 76 filters['colorize'] = lambda x: coloriter.next()
82 77
83 78 oldl = tmpl.cache[field]
84 79 newl = oldl.replace('line|escape', 'line|colorize')
85 80 tmpl.cache[field] = newl
86 81
87 82 def filerevision_highlight(self, tmpl, fctx):
88 83 pygmentize(self, tmpl, fctx, 'fileline')
89 84
90 85 return realrevision(self, tmpl, fctx)
91 86
92 87 def fileannotate_highlight(self, tmpl, fctx):
93 88 pygmentize(self, tmpl, fctx, 'annotateline')
94 89
95 90 return realannotate(self, tmpl, fctx)
96 91
97 92 # monkeypatch in the new version
98 93 # should be safer than overriding the method in a derived class
99 94 # and then patching the class
100 95 realrevision = hgweb.filerevision
101 96 hgweb.filerevision = filerevision_highlight
102 97 realannotate = hgweb.fileannotate
103 98 hgweb.fileannotate = fileannotate_highlight
@@ -1,466 +1,466 b''
1 1 # Command for sending a collection of Mercurial changesets as a series
2 2 # of patch emails.
3 3 #
4 4 # The series is started off with a "[PATCH 0 of N]" introduction,
5 5 # which describes the series as a whole.
6 6 #
7 7 # Each patch email has a Subject line of "[PATCH M of N] ...", using
8 8 # the first line of the changeset description as the subject text.
9 9 # The message contains two or three body parts:
10 10 #
11 11 # The remainder of the changeset description.
12 12 #
13 13 # [Optional] If the diffstat program is installed, the result of
14 14 # running diffstat on the patch.
15 15 #
16 16 # The patch itself, as generated by "hg export".
17 17 #
18 18 # Each message refers to all of its predecessors using the In-Reply-To
19 19 # and References headers, so they will show up as a sequence in
20 20 # threaded mail and news readers, and in mail archives.
21 21 #
22 22 # For each changeset, you will be prompted with a diffstat summary and
23 23 # the changeset summary, so you can be sure you are sending the right
24 24 # changes.
25 25 #
26 26 # To enable this extension:
27 27 #
28 28 # [extensions]
29 29 # hgext.patchbomb =
30 30 #
31 31 # To configure other defaults, add a section like this to your hgrc
32 32 # file:
33 33 #
34 34 # [email]
35 35 # from = My Name <my@email>
36 36 # to = recipient1, recipient2, ...
37 37 # cc = cc1, cc2, ...
38 38 # bcc = bcc1, bcc2, ...
39 39 #
40 40 # Then you can use the "hg email" command to mail a series of changesets
41 41 # as a patchbomb.
42 42 #
43 43 # To avoid sending patches prematurely, it is a good idea to first run
44 44 # the "email" command with the "-n" option (test only). You will be
45 45 # prompted for an email recipient address, a subject an an introductory
46 46 # message describing the patches of your patchbomb. Then when all is
47 47 # done, patchbomb messages are displayed. If PAGER environment variable
48 48 # is set, your pager will be fired up once for each patchbomb message, so
49 49 # you can verify everything is alright.
50 50 #
51 51 # The "-m" (mbox) option is also very useful. Instead of previewing
52 52 # each patchbomb message in a pager or sending the messages directly,
53 53 # it will create a UNIX mailbox file with the patch emails. This
54 54 # mailbox file can be previewed with any mail user agent which supports
55 55 # UNIX mbox files, i.e. with mutt:
56 56 #
57 57 # % mutt -R -f mbox
58 58 #
59 59 # When you are previewing the patchbomb messages, you can use `formail'
60 60 # (a utility that is commonly installed as part of the procmail package),
61 61 # to send each message out:
62 62 #
63 63 # % formail -s sendmail -bm -t < mbox
64 64 #
65 65 # That should be all. Now your patchbomb is on its way out.
66 66
67 67 import os, errno, socket, tempfile
68 68 import email.MIMEMultipart, email.MIMEText, email.MIMEBase
69 69 import email.Utils, email.Encoders
70 from mercurial import cmdutil, commands, hg, mail, ui, patch, util
70 from mercurial import cmdutil, commands, hg, mail, patch, util
71 71 from mercurial.i18n import _
72 72 from mercurial.node import bin
73 73
74 74 def patchbomb(ui, repo, *revs, **opts):
75 75 '''send changesets by email
76 76
77 77 By default, diffs are sent in the format generated by hg export,
78 78 one per message. The series starts with a "[PATCH 0 of N]"
79 79 introduction, which describes the series as a whole.
80 80
81 81 Each patch email has a Subject line of "[PATCH M of N] ...", using
82 82 the first line of the changeset description as the subject text.
83 83 The message contains two or three body parts. First, the rest of
84 84 the changeset description. Next, (optionally) if the diffstat
85 85 program is installed, the result of running diffstat on the patch.
86 86 Finally, the patch itself, as generated by "hg export".
87 87
88 88 With --outgoing, emails will be generated for patches not
89 89 found in the destination repository (or only those which are
90 90 ancestors of the specified revisions if any are provided)
91 91
92 92 With --bundle, changesets are selected as for --outgoing,
93 93 but a single email containing a binary Mercurial bundle as an
94 94 attachment will be sent.
95 95
96 96 Examples:
97 97
98 98 hg email -r 3000 # send patch 3000 only
99 99 hg email -r 3000 -r 3001 # send patches 3000 and 3001
100 100 hg email -r 3000:3005 # send patches 3000 through 3005
101 101 hg email 3000 # send patch 3000 (deprecated)
102 102
103 103 hg email -o # send all patches not in default
104 104 hg email -o DEST # send all patches not in DEST
105 105 hg email -o -r 3000 # send all ancestors of 3000 not in default
106 106 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
107 107
108 108 hg email -b # send bundle of all patches not in default
109 109 hg email -b DEST # send bundle of all patches not in DEST
110 110 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
111 111 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
112 112
113 113 Before using this command, you will need to enable email in your hgrc.
114 114 See the [email] section in hgrc(5) for details.
115 115 '''
116 116
117 117 def prompt(prompt, default = None, rest = ': ', empty_ok = False):
118 118 if not ui.interactive:
119 119 return default
120 120 if default:
121 121 prompt += ' [%s]' % default
122 122 prompt += rest
123 123 while True:
124 124 r = ui.prompt(prompt, default=default)
125 125 if r:
126 126 return r
127 127 if default is not None:
128 128 return default
129 129 if empty_ok:
130 130 return r
131 131 ui.warn(_('Please enter a valid value.\n'))
132 132
133 133 def confirm(s, denial):
134 134 if not prompt(s, default = 'y', rest = '? ').lower().startswith('y'):
135 135 raise util.Abort(denial)
136 136
137 137 def cdiffstat(summary, patchlines):
138 138 s = patch.diffstat(patchlines)
139 139 if s:
140 140 if summary:
141 141 ui.write(summary, '\n')
142 142 ui.write(s, '\n')
143 143 confirm(_('Does the diffstat above look okay'),
144 144 _('diffstat rejected'))
145 145 elif s is None:
146 146 ui.warn(_('No diffstat information available.\n'))
147 147 s = ''
148 148 return s
149 149
150 150 def makepatch(patch, idx, total):
151 151 desc = []
152 152 node = None
153 153 body = ''
154 154 for line in patch:
155 155 if line.startswith('#'):
156 156 if line.startswith('# Node ID'):
157 157 node = line.split()[-1]
158 158 continue
159 159 if line.startswith('diff -r') or line.startswith('diff --git'):
160 160 break
161 161 desc.append(line)
162 162 if not node:
163 163 raise ValueError
164 164
165 165 if opts['attach']:
166 166 body = ('\n'.join(desc[1:]).strip() or
167 167 'Patch subject is complete summary.')
168 168 body += '\n\n\n'
169 169
170 170 if opts.get('plain'):
171 171 while patch and patch[0].startswith('# '):
172 172 patch.pop(0)
173 173 if patch:
174 174 patch.pop(0)
175 175 while patch and not patch[0].strip():
176 176 patch.pop(0)
177 177 if opts.get('diffstat'):
178 178 body += cdiffstat('\n'.join(desc), patch) + '\n\n'
179 179 if opts.get('attach') or opts.get('inline'):
180 180 msg = email.MIMEMultipart.MIMEMultipart()
181 181 if body:
182 182 msg.attach(email.MIMEText.MIMEText(body, 'plain'))
183 183 p = email.MIMEText.MIMEText('\n'.join(patch), 'x-patch')
184 184 binnode = bin(node)
185 185 # if node is mq patch, it will have patch file name as tag
186 186 patchname = [t for t in repo.nodetags(binnode)
187 187 if t.endswith('.patch') or t.endswith('.diff')]
188 188 if patchname:
189 189 patchname = patchname[0]
190 190 elif total > 1:
191 191 patchname = cmdutil.make_filename(repo, '%b-%n.patch',
192 192 binnode, idx, total)
193 193 else:
194 194 patchname = cmdutil.make_filename(repo, '%b.patch', binnode)
195 195 disposition = 'inline'
196 196 if opts['attach']:
197 197 disposition = 'attachment'
198 198 p['Content-Disposition'] = disposition + '; filename=' + patchname
199 199 msg.attach(p)
200 200 else:
201 201 body += '\n'.join(patch)
202 202 msg = email.MIMEText.MIMEText(body)
203 203
204 204 subj = desc[0].strip().rstrip('. ')
205 205 if total == 1:
206 206 subj = '[PATCH] ' + (opts.get('subject') or subj)
207 207 else:
208 208 tlen = len(str(total))
209 209 subj = '[PATCH %0*d of %d] %s' % (tlen, idx, total, subj)
210 210 msg['Subject'] = subj
211 211 msg['X-Mercurial-Node'] = node
212 212 return msg
213 213
214 214 def outgoing(dest, revs):
215 215 '''Return the revisions present locally but not in dest'''
216 216 dest = ui.expandpath(dest or 'default-push', dest or 'default')
217 217 revs = [repo.lookup(rev) for rev in revs]
218 218 other = hg.repository(ui, dest)
219 219 ui.status(_('comparing with %s\n') % dest)
220 220 o = repo.findoutgoing(other)
221 221 if not o:
222 222 ui.status(_("no changes found\n"))
223 223 return []
224 224 o = repo.changelog.nodesbetween(o, revs or None)[0]
225 225 return [str(repo.changelog.rev(r)) for r in o]
226 226
227 227 def getbundle(dest):
228 228 tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
229 229 tmpfn = os.path.join(tmpdir, 'bundle')
230 230 try:
231 231 commands.bundle(ui, repo, tmpfn, dest, **opts)
232 232 return open(tmpfn, 'rb').read()
233 233 finally:
234 234 try:
235 235 os.unlink(tmpfn)
236 236 except:
237 237 pass
238 238 os.rmdir(tmpdir)
239 239
240 240 if not (opts.get('test') or opts.get('mbox')):
241 241 # really sending
242 242 mail.validateconfig(ui)
243 243
244 244 if not (revs or opts.get('rev')
245 245 or opts.get('outgoing') or opts.get('bundle')):
246 246 raise util.Abort(_('specify at least one changeset with -r or -o'))
247 247
248 248 cmdutil.setremoteconfig(ui, opts)
249 249 if opts.get('outgoing') and opts.get('bundle'):
250 250 raise util.Abort(_("--outgoing mode always on with --bundle;"
251 251 " do not re-specify --outgoing"))
252 252
253 253 if opts.get('outgoing') or opts.get('bundle'):
254 254 if len(revs) > 1:
255 255 raise util.Abort(_("too many destinations"))
256 256 dest = revs and revs[0] or None
257 257 revs = []
258 258
259 259 if opts.get('rev'):
260 260 if revs:
261 261 raise util.Abort(_('use only one form to specify the revision'))
262 262 revs = opts.get('rev')
263 263
264 264 if opts.get('outgoing'):
265 265 revs = outgoing(dest, opts.get('rev'))
266 266 if opts.get('bundle'):
267 267 opts['revs'] = revs
268 268
269 269 # start
270 270 if opts.get('date'):
271 271 start_time = util.parsedate(opts.get('date'))
272 272 else:
273 273 start_time = util.makedate()
274 274
275 275 def genmsgid(id):
276 276 return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
277 277
278 278 def getdescription(body, sender):
279 279 if opts.get('desc'):
280 280 body = open(opts.get('desc')).read()
281 281 else:
282 282 ui.write(_('\nWrite the introductory message for the '
283 283 'patch series.\n\n'))
284 284 body = ui.edit(body, sender)
285 285 return body
286 286
287 287 def getexportmsgs():
288 288 patches = []
289 289
290 290 class exportee:
291 291 def __init__(self, container):
292 292 self.lines = []
293 293 self.container = container
294 294 self.name = 'email'
295 295
296 296 def write(self, data):
297 297 self.lines.append(data)
298 298
299 299 def close(self):
300 300 self.container.append(''.join(self.lines).split('\n'))
301 301 self.lines = []
302 302
303 303 commands.export(ui, repo, *revs, **{'output': exportee(patches),
304 304 'switch_parent': False,
305 305 'text': None,
306 306 'git': opts.get('git')})
307 307
308 308 jumbo = []
309 309 msgs = []
310 310
311 311 ui.write(_('This patch series consists of %d patches.\n\n')
312 312 % len(patches))
313 313
314 314 for p, i in zip(patches, xrange(len(patches))):
315 315 jumbo.extend(p)
316 316 msgs.append(makepatch(p, i + 1, len(patches)))
317 317
318 318 if len(patches) > 1:
319 319 tlen = len(str(len(patches)))
320 320
321 321 subj = '[PATCH %0*d of %d] %s' % (
322 322 tlen, 0, len(patches),
323 323 opts.get('subject') or
324 324 prompt('Subject:',
325 325 rest=' [PATCH %0*d of %d] ' % (tlen, 0, len(patches))))
326 326
327 327 body = ''
328 328 if opts.get('diffstat'):
329 329 d = cdiffstat(_('Final summary:\n'), jumbo)
330 330 if d:
331 331 body = '\n' + d
332 332
333 333 body = getdescription(body, sender)
334 334 msg = email.MIMEText.MIMEText(body)
335 335 msg['Subject'] = subj
336 336
337 337 msgs.insert(0, msg)
338 338 return msgs
339 339
340 340 def getbundlemsgs(bundle):
341 341 subj = (opts.get('subject')
342 342 or prompt('Subject:', default='A bundle for your repository'))
343 343
344 344 body = getdescription('', sender)
345 345 msg = email.MIMEMultipart.MIMEMultipart()
346 346 if body:
347 347 msg.attach(email.MIMEText.MIMEText(body, 'plain'))
348 348 datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
349 349 datapart.set_payload(bundle)
350 350 datapart.add_header('Content-Disposition', 'attachment',
351 351 filename='bundle.hg')
352 352 email.Encoders.encode_base64(datapart)
353 353 msg.attach(datapart)
354 354 msg['Subject'] = subj
355 355 return [msg]
356 356
357 357 sender = (opts.get('from') or ui.config('email', 'from') or
358 358 ui.config('patchbomb', 'from') or
359 359 prompt('From', ui.username()))
360 360
361 361 if opts.get('bundle'):
362 362 msgs = getbundlemsgs(getbundle(dest))
363 363 else:
364 364 msgs = getexportmsgs()
365 365
366 366 def getaddrs(opt, prpt, default = None):
367 367 addrs = opts.get(opt) or (ui.config('email', opt) or
368 368 ui.config('patchbomb', opt) or
369 369 prompt(prpt, default = default)).split(',')
370 370 return [a.strip() for a in addrs if a.strip()]
371 371
372 372 to = getaddrs('to', 'To')
373 373 cc = getaddrs('cc', 'Cc', '')
374 374
375 375 bcc = opts.get('bcc') or (ui.config('email', 'bcc') or
376 376 ui.config('patchbomb', 'bcc') or '').split(',')
377 377 bcc = [a.strip() for a in bcc if a.strip()]
378 378
379 379 ui.write('\n')
380 380
381 381 parent = None
382 382
383 383 sender_addr = email.Utils.parseaddr(sender)[1]
384 384 sendmail = None
385 385 for m in msgs:
386 386 try:
387 387 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
388 388 except TypeError:
389 389 m['Message-Id'] = genmsgid('patchbomb')
390 390 if parent:
391 391 m['In-Reply-To'] = parent
392 392 else:
393 393 parent = m['Message-Id']
394 394 m['Date'] = util.datestr(date=start_time,
395 395 format="%a, %d %b %Y %H:%M:%S", timezone=True)
396 396
397 397 start_time = (start_time[0] + 1, start_time[1])
398 398 m['From'] = sender
399 399 m['To'] = ', '.join(to)
400 400 if cc:
401 401 m['Cc'] = ', '.join(cc)
402 402 if bcc:
403 403 m['Bcc'] = ', '.join(bcc)
404 404 if opts.get('test'):
405 405 ui.status('Displaying ', m['Subject'], ' ...\n')
406 406 ui.flush()
407 407 if 'PAGER' in os.environ:
408 408 fp = os.popen(os.environ['PAGER'], 'w')
409 409 else:
410 410 fp = ui
411 411 try:
412 412 fp.write(m.as_string(0))
413 413 fp.write('\n')
414 414 except IOError, inst:
415 415 if inst.errno != errno.EPIPE:
416 416 raise
417 417 if fp is not ui:
418 418 fp.close()
419 419 elif opts.get('mbox'):
420 420 ui.status('Writing ', m['Subject'], ' ...\n')
421 421 fp = open(opts.get('mbox'), 'In-Reply-To' in m and 'ab+' or 'wb+')
422 422 date = util.datestr(date=start_time,
423 423 format='%a %b %d %H:%M:%S %Y', timezone=False)
424 424 fp.write('From %s %s\n' % (sender_addr, date))
425 425 fp.write(m.as_string(0))
426 426 fp.write('\n\n')
427 427 fp.close()
428 428 else:
429 429 if not sendmail:
430 430 sendmail = mail.connect(ui)
431 431 ui.status('Sending ', m['Subject'], ' ...\n')
432 432 # Exim does not remove the Bcc field
433 433 del m['Bcc']
434 434 sendmail(sender, to + bcc + cc, m.as_string(0))
435 435
436 436 cmdtable = {
437 437 "email":
438 438 (patchbomb,
439 439 [('a', 'attach', None, _('send patches as attachments')),
440 440 ('i', 'inline', None, _('send patches as inline attachments')),
441 441 ('', 'bcc', [], _('email addresses of blind copy recipients')),
442 442 ('c', 'cc', [], _('email addresses of copy recipients')),
443 443 ('d', 'diffstat', None, _('add diffstat output to messages')),
444 444 ('', 'date', '', _('use the given date as the sending date')),
445 445 ('', 'desc', '', _('use the given file as the series description')),
446 446 ('g', 'git', None, _('use git extended diff format')),
447 447 ('f', 'from', '', _('email address of sender')),
448 448 ('', 'plain', None, _('omit hg patch header')),
449 449 ('n', 'test', None, _('print messages that would be sent')),
450 450 ('m', 'mbox', '',
451 451 _('write messages to mbox file instead of sending them')),
452 452 ('o', 'outgoing', None,
453 453 _('send changes not found in the target repository')),
454 454 ('b', 'bundle', None,
455 455 _('send changes not in target as a binary bundle')),
456 456 ('r', 'rev', [], _('a revision to send')),
457 457 ('s', 'subject', '',
458 458 _('subject of first message (intro or single patch)')),
459 459 ('t', 'to', [], _('email addresses of recipients')),
460 460 ('', 'force', None,
461 461 _('run even when remote repository is unrelated (with -b)')),
462 462 ('', 'base', [],
463 463 _('a base changeset to specify instead of a destination (with -b)')),
464 464 ] + commands.remoteopts,
465 465 _('hg email [OPTION]... [DEST]...'))
466 466 }
@@ -1,152 +1,152 b''
1 1 # Copyright (C) 2006 - Marco Barisione <marco@barisione.org>
2 2 #
3 3 # This is a small extension for Mercurial (http://www.selenic.com/mercurial)
4 4 # that removes files not known to mercurial
5 5 #
6 6 # This program was inspired by the "cvspurge" script contained in CVS utilities
7 7 # (http://www.red-bean.com/cvsutils/).
8 8 #
9 9 # To enable the "purge" extension put these lines in your ~/.hgrc:
10 10 # [extensions]
11 11 # hgext.purge =
12 12 #
13 13 # For help on the usage of "hg purge" use:
14 14 # hg help purge
15 15 #
16 16 # This program is free software; you can redistribute it and/or modify
17 17 # it under the terms of the GNU General Public License as published by
18 18 # the Free Software Foundation; either version 2 of the License, or
19 19 # (at your option) any later version.
20 20 #
21 21 # This program is distributed in the hope that it will be useful,
22 22 # but WITHOUT ANY WARRANTY; without even the implied warranty of
23 23 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
24 24 # GNU General Public License for more details.
25 25 #
26 26 # You should have received a copy of the GNU General Public License
27 27 # along with this program; if not, write to the Free Software
28 28 # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
29 29
30 from mercurial import hg, util, commands
30 from mercurial import util, commands
31 31 from mercurial.i18n import _
32 32 import os
33 33
34 34 def dopurge(ui, repo, dirs=None, act=True, ignored=False,
35 35 abort_on_err=False, eol='\n',
36 36 force=False, include=None, exclude=None):
37 37 def error(msg):
38 38 if abort_on_err:
39 39 raise util.Abort(msg)
40 40 else:
41 41 ui.warn(_('warning: %s\n') % msg)
42 42
43 43 def remove(remove_func, name):
44 44 if act:
45 45 try:
46 46 remove_func(os.path.join(repo.root, name))
47 47 except OSError, e:
48 48 error(_('%s cannot be removed') % name)
49 49 else:
50 50 ui.write('%s%s' % (name, eol))
51 51
52 52 if not force:
53 53 _check_fs(ui, repo)
54 54
55 55 directories = []
56 56 files = []
57 57 missing = []
58 58 roots, match, anypats = util.cmdmatcher(repo.root, repo.getcwd(), dirs,
59 59 include, exclude)
60 60 for src, f, st in repo.dirstate.statwalk(files=roots, match=match,
61 61 ignored=ignored, directories=True):
62 62 if src == 'd':
63 63 directories.append(f)
64 64 elif src == 'm':
65 65 missing.append(f)
66 66 elif src == 'f' and f not in repo.dirstate:
67 67 files.append(f)
68 68
69 69 directories.sort()
70 70
71 71 for f in files:
72 72 if f not in repo.dirstate:
73 73 ui.note(_('Removing file %s\n') % f)
74 74 remove(os.remove, f)
75 75
76 76 for f in directories[::-1]:
77 77 if match(f) and not os.listdir(repo.wjoin(f)):
78 78 ui.note(_('Removing directory %s\n') % f)
79 79 remove(os.rmdir, f)
80 80
81 81 def _check_fs(ui, repo):
82 82 """Abort if there is the chance of having problems with name-mangling fs
83 83
84 84 In a name mangling filesystem (e.g. a case insensitive one)
85 85 dirstate.walk() can yield filenames different from the ones
86 86 stored in the dirstate. This already confuses the status and
87 87 add commands, but with purge this may cause data loss.
88 88
89 89 To prevent this, this function will abort if there are uncommitted
90 90 changes.
91 91 """
92 92
93 93 # We can't use (files, match) to do a partial walk here - we wouldn't
94 94 # notice a modified README file if the user ran "hg purge readme"
95 95 modified, added, removed, deleted = repo.status()[:4]
96 96 if modified or added or removed or deleted:
97 97 if not util.checkfolding(repo.path) and not ui.quiet:
98 98 ui.warn(_("Purging on name mangling filesystems is not "
99 99 "fully supported.\n"))
100 100 raise util.Abort(_("outstanding uncommitted changes"))
101 101
102 102
103 103 def purge(ui, repo, *dirs, **opts):
104 104 '''removes files not tracked by mercurial
105 105
106 106 Delete files not known to mercurial, this is useful to test local and
107 107 uncommitted changes in the otherwise clean source tree.
108 108
109 109 This means that purge will delete:
110 110 - Unknown files: files marked with "?" by "hg status"
111 111 - Ignored files: files usually ignored by Mercurial because they match
112 112 a pattern in a ".hgignore" file
113 113 - Empty directories: in fact Mercurial ignores directories unless they
114 114 contain files under source control managment
115 115 But it will leave untouched:
116 116 - Unmodified tracked files
117 117 - Modified tracked files
118 118 - New files added to the repository (with "hg add")
119 119
120 120 If directories are given on the command line, only files in these
121 121 directories are considered.
122 122
123 123 Be careful with purge, you could irreversibly delete some files you
124 124 forgot to add to the repository. If you only want to print the list of
125 125 files that this program would delete use the --print option.
126 126 '''
127 127 act = not opts['print']
128 128 ignored = bool(opts['all'])
129 129 abort_on_err = bool(opts['abort_on_err'])
130 130 eol = opts['print0'] and '\0' or '\n'
131 131 if eol == '\0':
132 132 # --print0 implies --print
133 133 act = False
134 134 force = bool(opts['force'])
135 135 include = opts['include']
136 136 exclude = opts['exclude']
137 137 dopurge(ui, repo, dirs, act, ignored, abort_on_err,
138 138 eol, force, include, exclude)
139 139
140 140
141 141 cmdtable = {
142 142 'purge|clean':
143 143 (purge,
144 144 [('a', 'abort-on-err', None, _('abort if an error occurs')),
145 145 ('', 'all', None, _('purge ignored files too')),
146 146 ('f', 'force', None, _('purge even when there are uncommitted changes')),
147 147 ('p', 'print', None, _('print the file names instead of deleting them')),
148 148 ('0', 'print0', None, _('end filenames with NUL, for use with xargs'
149 149 ' (implies -p)')),
150 150 ] + commands.walkopts,
151 151 _('hg purge [OPTION]... [DIR]...'))
152 152 }
@@ -1,527 +1,527 b''
1 1 # record.py
2 2 #
3 3 # Copyright 2007 Bryan O'Sullivan <bos@serpentine.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of
6 6 # the GNU General Public License, incorporated herein by reference.
7 7
8 8 '''interactive change selection during commit or qrefresh'''
9 9
10 10 from mercurial.i18n import _
11 from mercurial import cmdutil, commands, cmdutil, extensions, hg, mdiff, patch, revlog
11 from mercurial import cmdutil, commands, extensions, hg, mdiff, patch
12 12 from mercurial import util
13 import copy, cStringIO, errno, operator, os, re, shutil, tempfile
13 import copy, cStringIO, errno, operator, os, re, tempfile
14 14
15 15 lines_re = re.compile(r'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
16 16
17 17 def scanpatch(fp):
18 18 """like patch.iterhunks, but yield different events
19 19
20 20 - ('file', [header_lines + fromfile + tofile])
21 21 - ('context', [context_lines])
22 22 - ('hunk', [hunk_lines])
23 23 - ('range', (-start,len, +start,len, diffp))
24 24 """
25 25 lr = patch.linereader(fp)
26 26
27 27 def scanwhile(first, p):
28 28 """scan lr while predicate holds"""
29 29 lines = [first]
30 30 while True:
31 31 line = lr.readline()
32 32 if not line:
33 33 break
34 34 if p(line):
35 35 lines.append(line)
36 36 else:
37 37 lr.push(line)
38 38 break
39 39 return lines
40 40
41 41 while True:
42 42 line = lr.readline()
43 43 if not line:
44 44 break
45 45 if line.startswith('diff --git a/'):
46 46 def notheader(line):
47 47 s = line.split(None, 1)
48 48 return not s or s[0] not in ('---', 'diff')
49 49 header = scanwhile(line, notheader)
50 50 fromfile = lr.readline()
51 51 if fromfile.startswith('---'):
52 52 tofile = lr.readline()
53 53 header += [fromfile, tofile]
54 54 else:
55 55 lr.push(fromfile)
56 56 yield 'file', header
57 57 elif line[0] == ' ':
58 58 yield 'context', scanwhile(line, lambda l: l[0] in ' \\')
59 59 elif line[0] in '-+':
60 60 yield 'hunk', scanwhile(line, lambda l: l[0] in '-+\\')
61 61 else:
62 62 m = lines_re.match(line)
63 63 if m:
64 64 yield 'range', m.groups()
65 65 else:
66 66 raise patch.PatchError('unknown patch content: %r' % line)
67 67
68 68 class header(object):
69 69 """patch header
70 70
71 71 XXX shoudn't we move this to mercurial/patch.py ?
72 72 """
73 73 diff_re = re.compile('diff --git a/(.*) b/(.*)$')
74 74 allhunks_re = re.compile('(?:index|new file|deleted file) ')
75 75 pretty_re = re.compile('(?:new file|deleted file) ')
76 76 special_re = re.compile('(?:index|new|deleted|copy|rename) ')
77 77
78 78 def __init__(self, header):
79 79 self.header = header
80 80 self.hunks = []
81 81
82 82 def binary(self):
83 83 for h in self.header:
84 84 if h.startswith('index '):
85 85 return True
86 86
87 87 def pretty(self, fp):
88 88 for h in self.header:
89 89 if h.startswith('index '):
90 90 fp.write(_('this modifies a binary file (all or nothing)\n'))
91 91 break
92 92 if self.pretty_re.match(h):
93 93 fp.write(h)
94 94 if self.binary():
95 95 fp.write(_('this is a binary file\n'))
96 96 break
97 97 if h.startswith('---'):
98 98 fp.write(_('%d hunks, %d lines changed\n') %
99 99 (len(self.hunks),
100 100 sum([h.added + h.removed for h in self.hunks])))
101 101 break
102 102 fp.write(h)
103 103
104 104 def write(self, fp):
105 105 fp.write(''.join(self.header))
106 106
107 107 def allhunks(self):
108 108 for h in self.header:
109 109 if self.allhunks_re.match(h):
110 110 return True
111 111
112 112 def files(self):
113 113 fromfile, tofile = self.diff_re.match(self.header[0]).groups()
114 114 if fromfile == tofile:
115 115 return [fromfile]
116 116 return [fromfile, tofile]
117 117
118 118 def filename(self):
119 119 return self.files()[-1]
120 120
121 121 def __repr__(self):
122 122 return '<header %s>' % (' '.join(map(repr, self.files())))
123 123
124 124 def special(self):
125 125 for h in self.header:
126 126 if self.special_re.match(h):
127 127 return True
128 128
129 129 def countchanges(hunk):
130 130 """hunk -> (n+,n-)"""
131 131 add = len([h for h in hunk if h[0] == '+'])
132 132 rem = len([h for h in hunk if h[0] == '-'])
133 133 return add, rem
134 134
135 135 class hunk(object):
136 136 """patch hunk
137 137
138 138 XXX shouldn't we merge this with patch.hunk ?
139 139 """
140 140 maxcontext = 3
141 141
142 142 def __init__(self, header, fromline, toline, proc, before, hunk, after):
143 143 def trimcontext(number, lines):
144 144 delta = len(lines) - self.maxcontext
145 145 if False and delta > 0:
146 146 return number + delta, lines[:self.maxcontext]
147 147 return number, lines
148 148
149 149 self.header = header
150 150 self.fromline, self.before = trimcontext(fromline, before)
151 151 self.toline, self.after = trimcontext(toline, after)
152 152 self.proc = proc
153 153 self.hunk = hunk
154 154 self.added, self.removed = countchanges(self.hunk)
155 155
156 156 def write(self, fp):
157 157 delta = len(self.before) + len(self.after)
158 158 fromlen = delta + self.removed
159 159 tolen = delta + self.added
160 160 fp.write('@@ -%d,%d +%d,%d @@%s\n' %
161 161 (self.fromline, fromlen, self.toline, tolen,
162 162 self.proc and (' ' + self.proc)))
163 163 fp.write(''.join(self.before + self.hunk + self.after))
164 164
165 165 pretty = write
166 166
167 167 def filename(self):
168 168 return self.header.filename()
169 169
170 170 def __repr__(self):
171 171 return '<hunk %r@%d>' % (self.filename(), self.fromline)
172 172
173 173 def parsepatch(fp):
174 174 """patch -> [] of hunks """
175 175 class parser(object):
176 176 """patch parsing state machine"""
177 177 def __init__(self):
178 178 self.fromline = 0
179 179 self.toline = 0
180 180 self.proc = ''
181 181 self.header = None
182 182 self.context = []
183 183 self.before = []
184 184 self.hunk = []
185 185 self.stream = []
186 186
187 187 def addrange(self, (fromstart, fromend, tostart, toend, proc)):
188 188 self.fromline = int(fromstart)
189 189 self.toline = int(tostart)
190 190 self.proc = proc
191 191
192 192 def addcontext(self, context):
193 193 if self.hunk:
194 194 h = hunk(self.header, self.fromline, self.toline, self.proc,
195 195 self.before, self.hunk, context)
196 196 self.header.hunks.append(h)
197 197 self.stream.append(h)
198 198 self.fromline += len(self.before) + h.removed
199 199 self.toline += len(self.before) + h.added
200 200 self.before = []
201 201 self.hunk = []
202 202 self.proc = ''
203 203 self.context = context
204 204
205 205 def addhunk(self, hunk):
206 206 if self.context:
207 207 self.before = self.context
208 208 self.context = []
209 209 self.hunk = data
210 210
211 211 def newfile(self, hdr):
212 212 self.addcontext([])
213 213 h = header(hdr)
214 214 self.stream.append(h)
215 215 self.header = h
216 216
217 217 def finished(self):
218 218 self.addcontext([])
219 219 return self.stream
220 220
221 221 transitions = {
222 222 'file': {'context': addcontext,
223 223 'file': newfile,
224 224 'hunk': addhunk,
225 225 'range': addrange},
226 226 'context': {'file': newfile,
227 227 'hunk': addhunk,
228 228 'range': addrange},
229 229 'hunk': {'context': addcontext,
230 230 'file': newfile,
231 231 'range': addrange},
232 232 'range': {'context': addcontext,
233 233 'hunk': addhunk},
234 234 }
235 235
236 236 p = parser()
237 237
238 238 state = 'context'
239 239 for newstate, data in scanpatch(fp):
240 240 try:
241 241 p.transitions[state][newstate](p, data)
242 242 except KeyError:
243 243 raise patch.PatchError('unhandled transition: %s -> %s' %
244 244 (state, newstate))
245 245 state = newstate
246 246 return p.finished()
247 247
248 248 def filterpatch(ui, chunks):
249 249 """Interactively filter patch chunks into applied-only chunks"""
250 250 chunks = list(chunks)
251 251 chunks.reverse()
252 252 seen = {}
253 253 def consumefile():
254 254 """fetch next portion from chunks until a 'header' is seen
255 255 NB: header == new-file mark
256 256 """
257 257 consumed = []
258 258 while chunks:
259 259 if isinstance(chunks[-1], header):
260 260 break
261 261 else:
262 262 consumed.append(chunks.pop())
263 263 return consumed
264 264
265 265 resp_all = [None] # this two are changed from inside prompt,
266 266 resp_file = [None] # so can't be usual variables
267 267 applied = {} # 'filename' -> [] of chunks
268 268 def prompt(query):
269 269 """prompt query, and process base inputs
270 270
271 271 - y/n for the rest of file
272 272 - y/n for the rest
273 273 - ? (help)
274 274 - q (quit)
275 275
276 276 else, input is returned to the caller.
277 277 """
278 278 if resp_all[0] is not None:
279 279 return resp_all[0]
280 280 if resp_file[0] is not None:
281 281 return resp_file[0]
282 282 while True:
283 283 r = (ui.prompt(query + _(' [Ynsfdaq?] '), '(?i)[Ynsfdaq?]?$')
284 284 or 'y').lower()
285 285 if r == '?':
286 286 c = record.__doc__.find('y - record this change')
287 287 for l in record.__doc__[c:].splitlines():
288 288 if l: ui.write(_(l.strip()), '\n')
289 289 continue
290 290 elif r == 's':
291 291 r = resp_file[0] = 'n'
292 292 elif r == 'f':
293 293 r = resp_file[0] = 'y'
294 294 elif r == 'd':
295 295 r = resp_all[0] = 'n'
296 296 elif r == 'a':
297 297 r = resp_all[0] = 'y'
298 298 elif r == 'q':
299 299 raise util.Abort(_('user quit'))
300 300 return r
301 301 while chunks:
302 302 chunk = chunks.pop()
303 303 if isinstance(chunk, header):
304 304 # new-file mark
305 305 resp_file = [None]
306 306 fixoffset = 0
307 307 hdr = ''.join(chunk.header)
308 308 if hdr in seen:
309 309 consumefile()
310 310 continue
311 311 seen[hdr] = True
312 312 if resp_all[0] is None:
313 313 chunk.pretty(ui)
314 314 r = prompt(_('examine changes to %s?') %
315 315 _(' and ').join(map(repr, chunk.files())))
316 316 if r == 'y':
317 317 applied[chunk.filename()] = [chunk]
318 318 if chunk.allhunks():
319 319 applied[chunk.filename()] += consumefile()
320 320 else:
321 321 consumefile()
322 322 else:
323 323 # new hunk
324 324 if resp_file[0] is None and resp_all[0] is None:
325 325 chunk.pretty(ui)
326 326 r = prompt(_('record this change to %r?') %
327 327 chunk.filename())
328 328 if r == 'y':
329 329 if fixoffset:
330 330 chunk = copy.copy(chunk)
331 331 chunk.toline += fixoffset
332 332 applied[chunk.filename()].append(chunk)
333 333 else:
334 334 fixoffset += chunk.removed - chunk.added
335 335 return reduce(operator.add, [h for h in applied.itervalues()
336 336 if h[0].special() or len(h) > 1], [])
337 337
338 338 def record(ui, repo, *pats, **opts):
339 339 '''interactively select changes to commit
340 340
341 341 If a list of files is omitted, all changes reported by "hg status"
342 342 will be candidates for recording.
343 343
344 344 See 'hg help dates' for a list of formats valid for -d/--date.
345 345
346 346 You will be prompted for whether to record changes to each
347 347 modified file, and for files with multiple changes, for each
348 348 change to use. For each query, the following responses are
349 349 possible:
350 350
351 351 y - record this change
352 352 n - skip this change
353 353
354 354 s - skip remaining changes to this file
355 355 f - record remaining changes to this file
356 356
357 357 d - done, skip remaining changes and files
358 358 a - record all changes to all remaining files
359 359 q - quit, recording no changes
360 360
361 361 ? - display help'''
362 362
363 363 def record_committer(ui, repo, pats, opts):
364 364 commands.commit(ui, repo, *pats, **opts)
365 365
366 366 dorecord(ui, repo, record_committer, *pats, **opts)
367 367
368 368
369 369 def qrecord(ui, repo, patch, *pats, **opts):
370 370 '''interactively record a new patch
371 371
372 372 see 'hg help qnew' & 'hg help record' for more information and usage
373 373 '''
374 374
375 375 try:
376 376 mq = extensions.find('mq')
377 377 except KeyError:
378 378 raise util.Abort(_("'mq' extension not loaded"))
379 379
380 380 def qrecord_committer(ui, repo, pats, opts):
381 381 mq.new(ui, repo, patch, *pats, **opts)
382 382
383 383 opts = opts.copy()
384 384 opts['force'] = True # always 'qnew -f'
385 385 dorecord(ui, repo, qrecord_committer, *pats, **opts)
386 386
387 387
388 388 def dorecord(ui, repo, committer, *pats, **opts):
389 389 if not ui.interactive:
390 390 raise util.Abort(_('running non-interactively, use commit instead'))
391 391
392 392 def recordfunc(ui, repo, files, message, match, opts):
393 393 """This is generic record driver.
394 394
395 395 It's job is to interactively filter local changes, and accordingly
396 396 prepare working dir into a state, where the job can be delegated to
397 397 non-interactive commit command such as 'commit' or 'qrefresh'.
398 398
399 399 After the actual job is done by non-interactive command, working dir
400 400 state is restored to original.
401 401
402 402 In the end we'll record intresting changes, and everything else will be
403 403 left in place, so the user can continue his work.
404 404 """
405 405 if files:
406 406 changes = None
407 407 else:
408 408 changes = repo.status(files=files, match=match)[:5]
409 409 modified, added, removed = changes[:3]
410 410 files = modified + added + removed
411 411 diffopts = mdiff.diffopts(git=True, nodates=True)
412 412 fp = cStringIO.StringIO()
413 413 patch.diff(repo, repo.dirstate.parents()[0], files=files,
414 414 match=match, changes=changes, opts=diffopts, fp=fp)
415 415 fp.seek(0)
416 416
417 417 # 1. filter patch, so we have intending-to apply subset of it
418 418 chunks = filterpatch(ui, parsepatch(fp))
419 419 del fp
420 420
421 421 contenders = {}
422 422 for h in chunks:
423 423 try: contenders.update(dict.fromkeys(h.files()))
424 424 except AttributeError: pass
425 425
426 426 newfiles = [f for f in files if f in contenders]
427 427
428 428 if not newfiles:
429 429 ui.status(_('no changes to record\n'))
430 430 return 0
431 431
432 432 if changes is None:
433 433 changes = repo.status(files=newfiles, match=match)[:5]
434 434 modified = dict.fromkeys(changes[0])
435 435
436 436 # 2. backup changed files, so we can restore them in the end
437 437 backups = {}
438 438 backupdir = repo.join('record-backups')
439 439 try:
440 440 os.mkdir(backupdir)
441 441 except OSError, err:
442 442 if err.errno != errno.EEXIST:
443 443 raise
444 444 try:
445 445 # backup continues
446 446 for f in newfiles:
447 447 if f not in modified:
448 448 continue
449 449 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
450 450 dir=backupdir)
451 451 os.close(fd)
452 452 ui.debug('backup %r as %r\n' % (f, tmpname))
453 453 util.copyfile(repo.wjoin(f), tmpname)
454 454 backups[f] = tmpname
455 455
456 456 fp = cStringIO.StringIO()
457 457 for c in chunks:
458 458 if c.filename() in backups:
459 459 c.write(fp)
460 460 dopatch = fp.tell()
461 461 fp.seek(0)
462 462
463 463 # 3a. apply filtered patch to clean repo (clean)
464 464 if backups:
465 465 hg.revert(repo, repo.dirstate.parents()[0], backups.has_key)
466 466
467 467 # 3b. (apply)
468 468 if dopatch:
469 469 ui.debug('applying patch\n')
470 470 ui.debug(fp.getvalue())
471 471 patch.internalpatch(fp, ui, 1, repo.root)
472 472 del fp
473 473
474 474 # 4. We prepared working directory according to filtered patch.
475 475 # Now is the time to delegate the job to commit/qrefresh or the like!
476 476
477 477 # it is important to first chdir to repo root -- we'll call a
478 478 # highlevel command with list of pathnames relative to repo root
479 479 cwd = os.getcwd()
480 480 os.chdir(repo.root)
481 481 try:
482 482 committer(ui, repo, newfiles, opts)
483 483 finally:
484 484 os.chdir(cwd)
485 485
486 486 return 0
487 487 finally:
488 488 # 5. finally restore backed-up files
489 489 try:
490 490 for realname, tmpname in backups.iteritems():
491 491 ui.debug('restoring %r to %r\n' % (tmpname, realname))
492 492 util.copyfile(tmpname, repo.wjoin(realname))
493 493 os.unlink(tmpname)
494 494 os.rmdir(backupdir)
495 495 except OSError:
496 496 pass
497 497 return cmdutil.commit(ui, repo, recordfunc, pats, opts)
498 498
499 499 cmdtable = {
500 500 "record":
501 501 (record,
502 502
503 503 # add commit options
504 504 commands.table['^commit|ci'][1],
505 505
506 506 _('hg record [OPTION]... [FILE]...')),
507 507 }
508 508
509 509
510 510 def extsetup():
511 511 try:
512 512 mq = extensions.find('mq')
513 513 except KeyError:
514 514 return
515 515
516 516 qcmdtable = {
517 517 "qrecord":
518 518 (qrecord,
519 519
520 520 # add qnew options, except '--force'
521 521 [opt for opt in mq.cmdtable['qnew'][1] if opt[1] != 'force'],
522 522
523 523 _('hg qrecord [OPTION]... PATCH [FILE]...')),
524 524 }
525 525
526 526 cmdtable.update(qcmdtable)
527 527
@@ -1,597 +1,597 b''
1 1 # Patch transplanting extension for Mercurial
2 2 #
3 3 # Copyright 2006, 2007 Brendan Cully <brendan@kublai.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from mercurial.i18n import _
9 9 import os, tempfile
10 from mercurial import bundlerepo, changegroup, cmdutil, commands, hg, merge
10 from mercurial import bundlerepo, changegroup, cmdutil, hg, merge
11 11 from mercurial import patch, revlog, util
12 12
13 13 '''patch transplanting tool
14 14
15 15 This extension allows you to transplant patches from another branch.
16 16
17 17 Transplanted patches are recorded in .hg/transplant/transplants, as a map
18 18 from a changeset hash to its hash in the source repository.
19 19 '''
20 20
21 21 class transplantentry:
22 22 def __init__(self, lnode, rnode):
23 23 self.lnode = lnode
24 24 self.rnode = rnode
25 25
26 26 class transplants:
27 27 def __init__(self, path=None, transplantfile=None, opener=None):
28 28 self.path = path
29 29 self.transplantfile = transplantfile
30 30 self.opener = opener
31 31
32 32 if not opener:
33 33 self.opener = util.opener(self.path)
34 34 self.transplants = []
35 35 self.dirty = False
36 36 self.read()
37 37
38 38 def read(self):
39 39 abspath = os.path.join(self.path, self.transplantfile)
40 40 if self.transplantfile and os.path.exists(abspath):
41 41 for line in self.opener(self.transplantfile).read().splitlines():
42 42 lnode, rnode = map(revlog.bin, line.split(':'))
43 43 self.transplants.append(transplantentry(lnode, rnode))
44 44
45 45 def write(self):
46 46 if self.dirty and self.transplantfile:
47 47 if not os.path.isdir(self.path):
48 48 os.mkdir(self.path)
49 49 fp = self.opener(self.transplantfile, 'w')
50 50 for c in self.transplants:
51 51 l, r = map(revlog.hex, (c.lnode, c.rnode))
52 52 fp.write(l + ':' + r + '\n')
53 53 fp.close()
54 54 self.dirty = False
55 55
56 56 def get(self, rnode):
57 57 return [t for t in self.transplants if t.rnode == rnode]
58 58
59 59 def set(self, lnode, rnode):
60 60 self.transplants.append(transplantentry(lnode, rnode))
61 61 self.dirty = True
62 62
63 63 def remove(self, transplant):
64 64 del self.transplants[self.transplants.index(transplant)]
65 65 self.dirty = True
66 66
67 67 class transplanter:
68 68 def __init__(self, ui, repo):
69 69 self.ui = ui
70 70 self.path = repo.join('transplant')
71 71 self.opener = util.opener(self.path)
72 72 self.transplants = transplants(self.path, 'transplants', opener=self.opener)
73 73
74 74 def applied(self, repo, node, parent):
75 75 '''returns True if a node is already an ancestor of parent
76 76 or has already been transplanted'''
77 77 if hasnode(repo, node):
78 78 if node in repo.changelog.reachable(parent, stop=node):
79 79 return True
80 80 for t in self.transplants.get(node):
81 81 # it might have been stripped
82 82 if not hasnode(repo, t.lnode):
83 83 self.transplants.remove(t)
84 84 return False
85 85 if t.lnode in repo.changelog.reachable(parent, stop=t.lnode):
86 86 return True
87 87 return False
88 88
89 89 def apply(self, repo, source, revmap, merges, opts={}):
90 90 '''apply the revisions in revmap one by one in revision order'''
91 91 revs = revmap.keys()
92 92 revs.sort()
93 93
94 94 p1, p2 = repo.dirstate.parents()
95 95 pulls = []
96 96 diffopts = patch.diffopts(self.ui, opts)
97 97 diffopts.git = True
98 98
99 99 lock = wlock = None
100 100 try:
101 101 wlock = repo.wlock()
102 102 lock = repo.lock()
103 103 for rev in revs:
104 104 node = revmap[rev]
105 105 revstr = '%s:%s' % (rev, revlog.short(node))
106 106
107 107 if self.applied(repo, node, p1):
108 108 self.ui.warn(_('skipping already applied revision %s\n') %
109 109 revstr)
110 110 continue
111 111
112 112 parents = source.changelog.parents(node)
113 113 if not opts.get('filter'):
114 114 # If the changeset parent is the same as the wdir's parent,
115 115 # just pull it.
116 116 if parents[0] == p1:
117 117 pulls.append(node)
118 118 p1 = node
119 119 continue
120 120 if pulls:
121 121 if source != repo:
122 122 repo.pull(source, heads=pulls)
123 123 merge.update(repo, pulls[-1], False, False, None)
124 124 p1, p2 = repo.dirstate.parents()
125 125 pulls = []
126 126
127 127 domerge = False
128 128 if node in merges:
129 129 # pulling all the merge revs at once would mean we couldn't
130 130 # transplant after the latest even if transplants before them
131 131 # fail.
132 132 domerge = True
133 133 if not hasnode(repo, node):
134 134 repo.pull(source, heads=[node])
135 135
136 136 if parents[1] != revlog.nullid:
137 137 self.ui.note(_('skipping merge changeset %s:%s\n')
138 138 % (rev, revlog.short(node)))
139 139 patchfile = None
140 140 else:
141 141 fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
142 142 fp = os.fdopen(fd, 'w')
143 143 patch.diff(source, parents[0], node, fp=fp, opts=diffopts)
144 144 fp.close()
145 145
146 146 del revmap[rev]
147 147 if patchfile or domerge:
148 148 try:
149 149 n = self.applyone(repo, node,
150 150 source.changelog.read(node),
151 151 patchfile, merge=domerge,
152 152 log=opts.get('log'),
153 153 filter=opts.get('filter'))
154 154 if n and domerge:
155 155 self.ui.status(_('%s merged at %s\n') % (revstr,
156 156 revlog.short(n)))
157 157 elif n:
158 158 self.ui.status(_('%s transplanted to %s\n') % (revlog.short(node),
159 159 revlog.short(n)))
160 160 finally:
161 161 if patchfile:
162 162 os.unlink(patchfile)
163 163 if pulls:
164 164 repo.pull(source, heads=pulls)
165 165 merge.update(repo, pulls[-1], False, False, None)
166 166 finally:
167 167 self.saveseries(revmap, merges)
168 168 self.transplants.write()
169 169 del lock, wlock
170 170
171 171 def filter(self, filter, changelog, patchfile):
172 172 '''arbitrarily rewrite changeset before applying it'''
173 173
174 174 self.ui.status('filtering %s\n' % patchfile)
175 175 user, date, msg = (changelog[1], changelog[2], changelog[4])
176 176
177 177 fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
178 178 fp = os.fdopen(fd, 'w')
179 179 fp.write("# HG changeset patch\n")
180 180 fp.write("# User %s\n" % user)
181 181 fp.write("# Date %d %d\n" % date)
182 182 fp.write(changelog[4])
183 183 fp.close()
184 184
185 185 try:
186 186 util.system('%s %s %s' % (filter, util.shellquote(headerfile),
187 187 util.shellquote(patchfile)),
188 188 environ={'HGUSER': changelog[1]},
189 189 onerr=util.Abort, errprefix=_('filter failed'))
190 190 user, date, msg = self.parselog(file(headerfile))[1:4]
191 191 finally:
192 192 os.unlink(headerfile)
193 193
194 194 return (user, date, msg)
195 195
196 196 def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
197 197 filter=None):
198 198 '''apply the patch in patchfile to the repository as a transplant'''
199 199 (manifest, user, (time, timezone), files, message) = cl[:5]
200 200 date = "%d %d" % (time, timezone)
201 201 extra = {'transplant_source': node}
202 202 if filter:
203 203 (user, date, message) = self.filter(filter, cl, patchfile)
204 204
205 205 if log:
206 206 message += '\n(transplanted from %s)' % revlog.hex(node)
207 207
208 208 self.ui.status(_('applying %s\n') % revlog.short(node))
209 209 self.ui.note('%s %s\n%s\n' % (user, date, message))
210 210
211 211 if not patchfile and not merge:
212 212 raise util.Abort(_('can only omit patchfile if merging'))
213 213 if patchfile:
214 214 try:
215 215 files = {}
216 216 try:
217 217 fuzz = patch.patch(patchfile, self.ui, cwd=repo.root,
218 218 files=files)
219 219 if not files:
220 220 self.ui.warn(_('%s: empty changeset') % revlog.hex(node))
221 221 return None
222 222 finally:
223 223 files = patch.updatedir(self.ui, repo, files)
224 224 except Exception, inst:
225 225 if filter:
226 226 os.unlink(patchfile)
227 227 seriespath = os.path.join(self.path, 'series')
228 228 if os.path.exists(seriespath):
229 229 os.unlink(seriespath)
230 230 p1 = repo.dirstate.parents()[0]
231 231 p2 = node
232 232 self.log(user, date, message, p1, p2, merge=merge)
233 233 self.ui.write(str(inst) + '\n')
234 234 raise util.Abort(_('Fix up the merge and run hg transplant --continue'))
235 235 else:
236 236 files = None
237 237 if merge:
238 238 p1, p2 = repo.dirstate.parents()
239 239 repo.dirstate.setparents(p1, node)
240 240
241 241 n = repo.commit(files, message, user, date, extra=extra)
242 242 if not merge:
243 243 self.transplants.set(n, node)
244 244
245 245 return n
246 246
247 247 def resume(self, repo, source, opts=None):
248 248 '''recover last transaction and apply remaining changesets'''
249 249 if os.path.exists(os.path.join(self.path, 'journal')):
250 250 n, node = self.recover(repo)
251 251 self.ui.status(_('%s transplanted as %s\n') % (revlog.short(node),
252 252 revlog.short(n)))
253 253 seriespath = os.path.join(self.path, 'series')
254 254 if not os.path.exists(seriespath):
255 255 self.transplants.write()
256 256 return
257 257 nodes, merges = self.readseries()
258 258 revmap = {}
259 259 for n in nodes:
260 260 revmap[source.changelog.rev(n)] = n
261 261 os.unlink(seriespath)
262 262
263 263 self.apply(repo, source, revmap, merges, opts)
264 264
265 265 def recover(self, repo):
266 266 '''commit working directory using journal metadata'''
267 267 node, user, date, message, parents = self.readlog()
268 268 merge = len(parents) == 2
269 269
270 270 if not user or not date or not message or not parents[0]:
271 271 raise util.Abort(_('transplant log file is corrupt'))
272 272
273 273 extra = {'transplant_source': node}
274 274 wlock = repo.wlock()
275 275 try:
276 276 p1, p2 = repo.dirstate.parents()
277 277 if p1 != parents[0]:
278 278 raise util.Abort(
279 279 _('working dir not at transplant parent %s') %
280 280 revlog.hex(parents[0]))
281 281 if merge:
282 282 repo.dirstate.setparents(p1, parents[1])
283 283 n = repo.commit(None, message, user, date, extra=extra)
284 284 if not n:
285 285 raise util.Abort(_('commit failed'))
286 286 if not merge:
287 287 self.transplants.set(n, node)
288 288 self.unlog()
289 289
290 290 return n, node
291 291 finally:
292 292 del wlock
293 293
294 294 def readseries(self):
295 295 nodes = []
296 296 merges = []
297 297 cur = nodes
298 298 for line in self.opener('series').read().splitlines():
299 299 if line.startswith('# Merges'):
300 300 cur = merges
301 301 continue
302 302 cur.append(revlog.bin(line))
303 303
304 304 return (nodes, merges)
305 305
306 306 def saveseries(self, revmap, merges):
307 307 if not revmap:
308 308 return
309 309
310 310 if not os.path.isdir(self.path):
311 311 os.mkdir(self.path)
312 312 series = self.opener('series', 'w')
313 313 revs = revmap.keys()
314 314 revs.sort()
315 315 for rev in revs:
316 316 series.write(revlog.hex(revmap[rev]) + '\n')
317 317 if merges:
318 318 series.write('# Merges\n')
319 319 for m in merges:
320 320 series.write(revlog.hex(m) + '\n')
321 321 series.close()
322 322
323 323 def parselog(self, fp):
324 324 parents = []
325 325 message = []
326 326 node = revlog.nullid
327 327 inmsg = False
328 328 for line in fp.read().splitlines():
329 329 if inmsg:
330 330 message.append(line)
331 331 elif line.startswith('# User '):
332 332 user = line[7:]
333 333 elif line.startswith('# Date '):
334 334 date = line[7:]
335 335 elif line.startswith('# Node ID '):
336 336 node = revlog.bin(line[10:])
337 337 elif line.startswith('# Parent '):
338 338 parents.append(revlog.bin(line[9:]))
339 339 elif not line.startswith('#'):
340 340 inmsg = True
341 341 message.append(line)
342 342 return (node, user, date, '\n'.join(message), parents)
343 343
344 344 def log(self, user, date, message, p1, p2, merge=False):
345 345 '''journal changelog metadata for later recover'''
346 346
347 347 if not os.path.isdir(self.path):
348 348 os.mkdir(self.path)
349 349 fp = self.opener('journal', 'w')
350 350 fp.write('# User %s\n' % user)
351 351 fp.write('# Date %s\n' % date)
352 352 fp.write('# Node ID %s\n' % revlog.hex(p2))
353 353 fp.write('# Parent ' + revlog.hex(p1) + '\n')
354 354 if merge:
355 355 fp.write('# Parent ' + revlog.hex(p2) + '\n')
356 356 fp.write(message.rstrip() + '\n')
357 357 fp.close()
358 358
359 359 def readlog(self):
360 360 return self.parselog(self.opener('journal'))
361 361
362 362 def unlog(self):
363 363 '''remove changelog journal'''
364 364 absdst = os.path.join(self.path, 'journal')
365 365 if os.path.exists(absdst):
366 366 os.unlink(absdst)
367 367
368 368 def transplantfilter(self, repo, source, root):
369 369 def matchfn(node):
370 370 if self.applied(repo, node, root):
371 371 return False
372 372 if source.changelog.parents(node)[1] != revlog.nullid:
373 373 return False
374 374 extra = source.changelog.read(node)[5]
375 375 cnode = extra.get('transplant_source')
376 376 if cnode and self.applied(repo, cnode, root):
377 377 return False
378 378 return True
379 379
380 380 return matchfn
381 381
382 382 def hasnode(repo, node):
383 383 try:
384 384 return repo.changelog.rev(node) != None
385 385 except revlog.RevlogError:
386 386 return False
387 387
388 388 def browserevs(ui, repo, nodes, opts):
389 389 '''interactively transplant changesets'''
390 390 def browsehelp(ui):
391 391 ui.write('y: transplant this changeset\n'
392 392 'n: skip this changeset\n'
393 393 'm: merge at this changeset\n'
394 394 'p: show patch\n'
395 395 'c: commit selected changesets\n'
396 396 'q: cancel transplant\n'
397 397 '?: show this help\n')
398 398
399 399 displayer = cmdutil.show_changeset(ui, repo, opts)
400 400 transplants = []
401 401 merges = []
402 402 for node in nodes:
403 403 displayer.show(changenode=node)
404 404 action = None
405 405 while not action:
406 406 action = ui.prompt(_('apply changeset? [ynmpcq?]:'))
407 407 if action == '?':
408 408 browsehelp(ui)
409 409 action = None
410 410 elif action == 'p':
411 411 parent = repo.changelog.parents(node)[0]
412 412 patch.diff(repo, parent, node)
413 413 action = None
414 414 elif action not in ('y', 'n', 'm', 'c', 'q'):
415 415 ui.write('no such option\n')
416 416 action = None
417 417 if action == 'y':
418 418 transplants.append(node)
419 419 elif action == 'm':
420 420 merges.append(node)
421 421 elif action == 'c':
422 422 break
423 423 elif action == 'q':
424 424 transplants = ()
425 425 merges = ()
426 426 break
427 427 return (transplants, merges)
428 428
429 429 def transplant(ui, repo, *revs, **opts):
430 430 '''transplant changesets from another branch
431 431
432 432 Selected changesets will be applied on top of the current working
433 433 directory with the log of the original changeset. If --log is
434 434 specified, log messages will have a comment appended of the form:
435 435
436 436 (transplanted from CHANGESETHASH)
437 437
438 438 You can rewrite the changelog message with the --filter option.
439 439 Its argument will be invoked with the current changelog message
440 440 as $1 and the patch as $2.
441 441
442 442 If --source is specified, selects changesets from the named
443 443 repository. If --branch is specified, selects changesets from the
444 444 branch holding the named revision, up to that revision. If --all
445 445 is specified, all changesets on the branch will be transplanted,
446 446 otherwise you will be prompted to select the changesets you want.
447 447
448 448 hg transplant --branch REVISION --all will rebase the selected branch
449 449 (up to the named revision) onto your current working directory.
450 450
451 451 You can optionally mark selected transplanted changesets as
452 452 merge changesets. You will not be prompted to transplant any
453 453 ancestors of a merged transplant, and you can merge descendants
454 454 of them normally instead of transplanting them.
455 455
456 456 If no merges or revisions are provided, hg transplant will start
457 457 an interactive changeset browser.
458 458
459 459 If a changeset application fails, you can fix the merge by hand and
460 460 then resume where you left off by calling hg transplant --continue.
461 461 '''
462 462 def getoneitem(opts, item, errmsg):
463 463 val = opts.get(item)
464 464 if val:
465 465 if len(val) > 1:
466 466 raise util.Abort(errmsg)
467 467 else:
468 468 return val[0]
469 469
470 470 def getremotechanges(repo, url):
471 471 sourcerepo = ui.expandpath(url)
472 472 source = hg.repository(ui, sourcerepo)
473 473 incoming = repo.findincoming(source, force=True)
474 474 if not incoming:
475 475 return (source, None, None)
476 476
477 477 bundle = None
478 478 if not source.local():
479 479 cg = source.changegroup(incoming, 'incoming')
480 480 bundle = changegroup.writebundle(cg, None, 'HG10UN')
481 481 source = bundlerepo.bundlerepository(ui, repo.root, bundle)
482 482
483 483 return (source, incoming, bundle)
484 484
485 485 def incwalk(repo, incoming, branches, match=util.always):
486 486 if not branches:
487 487 branches=None
488 488 for node in repo.changelog.nodesbetween(incoming, branches)[0]:
489 489 if match(node):
490 490 yield node
491 491
492 492 def transplantwalk(repo, root, branches, match=util.always):
493 493 if not branches:
494 494 branches = repo.heads()
495 495 ancestors = []
496 496 for branch in branches:
497 497 ancestors.append(repo.changelog.ancestor(root, branch))
498 498 for node in repo.changelog.nodesbetween(ancestors, branches)[0]:
499 499 if match(node):
500 500 yield node
501 501
502 502 def checkopts(opts, revs):
503 503 if opts.get('continue'):
504 504 if filter(lambda opt: opts.get(opt), ('branch', 'all', 'merge')):
505 505 raise util.Abort(_('--continue is incompatible with branch, all or merge'))
506 506 return
507 507 if not (opts.get('source') or revs or
508 508 opts.get('merge') or opts.get('branch')):
509 509 raise util.Abort(_('no source URL, branch tag or revision list provided'))
510 510 if opts.get('all'):
511 511 if not opts.get('branch'):
512 512 raise util.Abort(_('--all requires a branch revision'))
513 513 if revs:
514 514 raise util.Abort(_('--all is incompatible with a revision list'))
515 515
516 516 checkopts(opts, revs)
517 517
518 518 if not opts.get('log'):
519 519 opts['log'] = ui.config('transplant', 'log')
520 520 if not opts.get('filter'):
521 521 opts['filter'] = ui.config('transplant', 'filter')
522 522
523 523 tp = transplanter(ui, repo)
524 524
525 525 p1, p2 = repo.dirstate.parents()
526 526 if p1 == revlog.nullid:
527 527 raise util.Abort(_('no revision checked out'))
528 528 if not opts.get('continue'):
529 529 if p2 != revlog.nullid:
530 530 raise util.Abort(_('outstanding uncommitted merges'))
531 531 m, a, r, d = repo.status()[:4]
532 532 if m or a or r or d:
533 533 raise util.Abort(_('outstanding local changes'))
534 534
535 535 bundle = None
536 536 source = opts.get('source')
537 537 if source:
538 538 (source, incoming, bundle) = getremotechanges(repo, source)
539 539 else:
540 540 source = repo
541 541
542 542 try:
543 543 if opts.get('continue'):
544 544 tp.resume(repo, source, opts)
545 545 return
546 546
547 547 tf=tp.transplantfilter(repo, source, p1)
548 548 if opts.get('prune'):
549 549 prune = [source.lookup(r)
550 550 for r in cmdutil.revrange(source, opts.get('prune'))]
551 551 matchfn = lambda x: tf(x) and x not in prune
552 552 else:
553 553 matchfn = tf
554 554 branches = map(source.lookup, opts.get('branch', ()))
555 555 merges = map(source.lookup, opts.get('merge', ()))
556 556 revmap = {}
557 557 if revs:
558 558 for r in cmdutil.revrange(source, revs):
559 559 revmap[int(r)] = source.lookup(r)
560 560 elif opts.get('all') or not merges:
561 561 if source != repo:
562 562 alltransplants = incwalk(source, incoming, branches, match=matchfn)
563 563 else:
564 564 alltransplants = transplantwalk(source, p1, branches, match=matchfn)
565 565 if opts.get('all'):
566 566 revs = alltransplants
567 567 else:
568 568 revs, newmerges = browserevs(ui, source, alltransplants, opts)
569 569 merges.extend(newmerges)
570 570 for r in revs:
571 571 revmap[source.changelog.rev(r)] = r
572 572 for r in merges:
573 573 revmap[source.changelog.rev(r)] = r
574 574
575 575 revs = revmap.keys()
576 576 revs.sort()
577 577 pulls = []
578 578
579 579 tp.apply(repo, source, revmap, merges, opts)
580 580 finally:
581 581 if bundle:
582 582 source.close()
583 583 os.unlink(bundle)
584 584
585 585 cmdtable = {
586 586 "transplant":
587 587 (transplant,
588 588 [('s', 'source', '', _('pull patches from REPOSITORY')),
589 589 ('b', 'branch', [], _('pull patches from branch BRANCH')),
590 590 ('a', 'all', None, _('pull all changesets up to BRANCH')),
591 591 ('p', 'prune', [], _('skip over REV')),
592 592 ('m', 'merge', [], _('merge at REV')),
593 593 ('', 'log', None, _('append transplant info to log message')),
594 594 ('c', 'continue', None, _('continue last transplant session after repair')),
595 595 ('', 'filter', '', _('filter changesets through FILTER'))],
596 596 _('hg transplant [-s REPOSITORY] [-b BRANCH [-a]] [-p REV] [-m REV] [REV]...'))
597 597 }
@@ -1,107 +1,106 b''
1 1 # win32text.py - LF <-> CRLF translation utilities for Windows users
2 2 #
3 3 # This software may be used and distributed according to the terms
4 4 # of the GNU General Public License, incorporated herein by reference.
5 5 #
6 6 # To perform automatic newline conversion, use:
7 7 #
8 8 # [extensions]
9 9 # hgext.win32text =
10 10 # [encode]
11 11 # ** = cleverencode:
12 12 # [decode]
13 13 # ** = cleverdecode:
14 14 #
15 15 # If not doing conversion, to make sure you do not commit CRLF by accident:
16 16 #
17 17 # [hooks]
18 18 # pretxncommit.crlf = python:hgext.win32text.forbidcrlf
19 19 #
20 20 # To do the same check on a server to prevent CRLF from being pushed or pulled:
21 21 #
22 22 # [hooks]
23 23 # pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf
24 24
25 from mercurial import util, ui
26 25 from mercurial.i18n import gettext as _
27 26 from mercurial.node import bin, short
28 27 import re
29 28
30 29 # regexp for single LF without CR preceding.
31 30 re_single_lf = re.compile('(^|[^\r])\n', re.MULTILINE)
32 31
33 32 def dumbdecode(s, cmd, ui=None, repo=None, filename=None, **kwargs):
34 33 # warn if already has CRLF in repository.
35 34 # it might cause unexpected eol conversion.
36 35 # see issue 302:
37 36 # http://www.selenic.com/mercurial/bts/issue302
38 37 if '\r\n' in s and ui and filename and repo:
39 38 ui.warn(_('WARNING: %s already has CRLF line endings\n'
40 39 'and does not need EOL conversion by the win32text plugin.\n'
41 40 'Before your next commit, please reconsider your '
42 41 'encode/decode settings in \nMercurial.ini or %s.\n') %
43 42 (filename, repo.join('hgrc')))
44 43 # replace single LF to CRLF
45 44 return re_single_lf.sub('\\1\r\n', s)
46 45
47 46 def dumbencode(s, cmd):
48 47 return s.replace('\r\n', '\n')
49 48
50 49 def clevertest(s, cmd):
51 50 if '\0' in s: return False
52 51 return True
53 52
54 53 def cleverdecode(s, cmd, **kwargs):
55 54 if clevertest(s, cmd):
56 55 return dumbdecode(s, cmd, **kwargs)
57 56 return s
58 57
59 58 def cleverencode(s, cmd):
60 59 if clevertest(s, cmd):
61 60 return dumbencode(s, cmd)
62 61 return s
63 62
64 63 _filters = {
65 64 'dumbdecode:': dumbdecode,
66 65 'dumbencode:': dumbencode,
67 66 'cleverdecode:': cleverdecode,
68 67 'cleverencode:': cleverencode,
69 68 }
70 69
71 70 def forbidcrlf(ui, repo, hooktype, node, **kwargs):
72 71 halt = False
73 72 for rev in xrange(repo.changelog.rev(bin(node)), repo.changelog.count()):
74 73 c = repo.changectx(rev)
75 74 for f in c.files():
76 75 if f not in c:
77 76 continue
78 77 data = c[f].data()
79 78 if '\0' not in data and '\r\n' in data:
80 79 if not halt:
81 80 ui.warn(_('Attempt to commit or push text file(s) '
82 81 'using CRLF line endings\n'))
83 82 ui.warn(_('in %s: %s\n') % (short(c.node()), f))
84 83 halt = True
85 84 if halt and hooktype == 'pretxnchangegroup':
86 85 ui.warn(_('\nTo prevent this mistake in your local repository,\n'
87 86 'add to Mercurial.ini or .hg/hgrc:\n'
88 87 '\n'
89 88 '[hooks]\n'
90 89 'pretxncommit.crlf = python:hgext.win32text.forbidcrlf\n'
91 90 '\n'
92 91 'and also consider adding:\n'
93 92 '\n'
94 93 '[extensions]\n'
95 94 'hgext.win32text =\n'
96 95 '[encode]\n'
97 96 '** = cleverencode:\n'
98 97 '[decode]\n'
99 98 '** = cleverdecode:\n'))
100 99 return halt
101 100
102 101 def reposetup(ui, repo):
103 102 if not repo.local():
104 103 return
105 104 for name, fn in _filters.iteritems():
106 105 repo.adddatafilter(name, fn)
107 106
@@ -1,193 +1,192 b''
1 1 # changelog.py - changelog class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import bin, hex, nullid
9 9 from revlog import revlog
10 from i18n import _
11 import os, time, util
10 import util
12 11
13 12 def _string_escape(text):
14 13 """
15 14 >>> d = {'nl': chr(10), 'bs': chr(92), 'cr': chr(13), 'nul': chr(0)}
16 15 >>> s = "ab%(nl)scd%(bs)s%(bs)sn%(nul)sab%(cr)scd%(bs)s%(nl)s" % d
17 16 >>> s
18 17 'ab\\ncd\\\\\\\\n\\x00ab\\rcd\\\\\\n'
19 18 >>> res = _string_escape(s)
20 19 >>> s == res.decode('string_escape')
21 20 True
22 21 """
23 22 # subset of the string_escape codec
24 23 text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r')
25 24 return text.replace('\0', '\\0')
26 25
27 26 class appender:
28 27 '''the changelog index must be update last on disk, so we use this class
29 28 to delay writes to it'''
30 29 def __init__(self, fp, buf):
31 30 self.data = buf
32 31 self.fp = fp
33 32 self.offset = fp.tell()
34 33 self.size = util.fstat(fp).st_size
35 34
36 35 def end(self):
37 36 return self.size + len("".join(self.data))
38 37 def tell(self):
39 38 return self.offset
40 39 def flush(self):
41 40 pass
42 41 def close(self):
43 42 self.fp.close()
44 43
45 44 def seek(self, offset, whence=0):
46 45 '''virtual file offset spans real file and data'''
47 46 if whence == 0:
48 47 self.offset = offset
49 48 elif whence == 1:
50 49 self.offset += offset
51 50 elif whence == 2:
52 51 self.offset = self.end() + offset
53 52 if self.offset < self.size:
54 53 self.fp.seek(self.offset)
55 54
56 55 def read(self, count=-1):
57 56 '''only trick here is reads that span real file and data'''
58 57 ret = ""
59 58 if self.offset < self.size:
60 59 s = self.fp.read(count)
61 60 ret = s
62 61 self.offset += len(s)
63 62 if count > 0:
64 63 count -= len(s)
65 64 if count != 0:
66 65 doff = self.offset - self.size
67 66 self.data.insert(0, "".join(self.data))
68 67 del self.data[1:]
69 68 s = self.data[0][doff:doff+count]
70 69 self.offset += len(s)
71 70 ret += s
72 71 return ret
73 72
74 73 def write(self, s):
75 74 self.data.append(str(s))
76 75 self.offset += len(s)
77 76
78 77 class changelog(revlog):
79 78 def __init__(self, opener):
80 79 revlog.__init__(self, opener, "00changelog.i")
81 80
82 81 def delayupdate(self):
83 82 "delay visibility of index updates to other readers"
84 83 self._realopener = self.opener
85 84 self.opener = self._delayopener
86 85 self._delaycount = self.count()
87 86 self._delaybuf = []
88 87 self._delayname = None
89 88
90 89 def finalize(self, tr):
91 90 "finalize index updates"
92 91 self.opener = self._realopener
93 92 # move redirected index data back into place
94 93 if self._delayname:
95 94 util.rename(self._delayname + ".a", self._delayname)
96 95 elif self._delaybuf:
97 96 fp = self.opener(self.indexfile, 'a')
98 97 fp.write("".join(self._delaybuf))
99 98 fp.close()
100 99 del self._delaybuf
101 100 # split when we're done
102 101 self.checkinlinesize(tr)
103 102
104 103 def _delayopener(self, name, mode='r'):
105 104 fp = self._realopener(name, mode)
106 105 # only divert the index
107 106 if not name == self.indexfile:
108 107 return fp
109 108 # if we're doing an initial clone, divert to another file
110 109 if self._delaycount == 0:
111 110 self._delayname = fp.name
112 111 return self._realopener(name + ".a", mode)
113 112 # otherwise, divert to memory
114 113 return appender(fp, self._delaybuf)
115 114
116 115 def checkinlinesize(self, tr, fp=None):
117 116 if self.opener == self._delayopener:
118 117 return
119 118 return revlog.checkinlinesize(self, tr, fp)
120 119
121 120 def decode_extra(self, text):
122 121 extra = {}
123 122 for l in text.split('\0'):
124 123 if l:
125 124 k, v = l.decode('string_escape').split(':', 1)
126 125 extra[k] = v
127 126 return extra
128 127
129 128 def encode_extra(self, d):
130 129 # keys must be sorted to produce a deterministic changelog entry
131 130 keys = d.keys()
132 131 keys.sort()
133 132 items = [_string_escape('%s:%s' % (k, d[k])) for k in keys]
134 133 return "\0".join(items)
135 134
136 135 def read(self, node):
137 136 """
138 137 format used:
139 138 nodeid\n : manifest node in ascii
140 139 user\n : user, no \n or \r allowed
141 140 time tz extra\n : date (time is int or float, timezone is int)
142 141 : extra is metadatas, encoded and separated by '\0'
143 142 : older versions ignore it
144 143 files\n\n : files modified by the cset, no \n or \r allowed
145 144 (.*) : comment (free text, ideally utf-8)
146 145
147 146 changelog v0 doesn't use extra
148 147 """
149 148 text = self.revision(node)
150 149 if not text:
151 150 return (nullid, "", (0, 0), [], "", {'branch': 'default'})
152 151 last = text.index("\n\n")
153 152 desc = util.tolocal(text[last + 2:])
154 153 l = text[:last].split('\n')
155 154 manifest = bin(l[0])
156 155 user = util.tolocal(l[1])
157 156
158 157 extra_data = l[2].split(' ', 2)
159 158 if len(extra_data) != 3:
160 159 time = float(extra_data.pop(0))
161 160 try:
162 161 # various tools did silly things with the time zone field.
163 162 timezone = int(extra_data[0])
164 163 except:
165 164 timezone = 0
166 165 extra = {}
167 166 else:
168 167 time, timezone, extra = extra_data
169 168 time, timezone = float(time), int(timezone)
170 169 extra = self.decode_extra(extra)
171 170 if not extra.get('branch'):
172 171 extra['branch'] = 'default'
173 172 files = l[3:]
174 173 return (manifest, user, (time, timezone), files, desc, extra)
175 174
176 175 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
177 176 user=None, date=None, extra={}):
178 177
179 178 user, desc = util.fromlocal(user), util.fromlocal(desc)
180 179
181 180 if date:
182 181 parseddate = "%d %d" % util.parsedate(date)
183 182 else:
184 183 parseddate = "%d %d" % util.makedate()
185 184 if extra and extra.get("branch") in ("default", ""):
186 185 del extra["branch"]
187 186 if extra:
188 187 extra = self.encode_extra(extra)
189 188 parseddate = "%s %s" % (parseddate, extra)
190 189 list.sort()
191 190 l = [hex(manifest), user, parseddate] + list + ["", desc]
192 191 text = "\n".join(l)
193 192 return self.addrevision(text, transaction, self.count(), p1, p2)
@@ -1,3179 +1,3179 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, re, sys, urllib
11 11 import hg, util, revlog, bundlerepo, extensions
12 12 import difflib, patch, time, help, mdiff, tempfile
13 import errno, version, socket
13 import version, socket
14 14 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
15 15
16 16 # Commands start here, listed alphabetically
17 17
18 18 def add(ui, repo, *pats, **opts):
19 19 """add the specified files on the next commit
20 20
21 21 Schedule files to be version controlled and added to the repository.
22 22
23 23 The files will be added to the repository at the next commit. To
24 24 undo an add before that, see hg revert.
25 25
26 26 If no names are given, add all files in the repository.
27 27 """
28 28
29 29 rejected = None
30 30 exacts = {}
31 31 names = []
32 32 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
33 33 badmatch=util.always):
34 34 if exact:
35 35 if ui.verbose:
36 36 ui.status(_('adding %s\n') % rel)
37 37 names.append(abs)
38 38 exacts[abs] = 1
39 39 elif abs not in repo.dirstate:
40 40 ui.status(_('adding %s\n') % rel)
41 41 names.append(abs)
42 42 if not opts.get('dry_run'):
43 43 rejected = repo.add(names)
44 44 rejected = [p for p in rejected if p in exacts]
45 45 return rejected and 1 or 0
46 46
47 47 def addremove(ui, repo, *pats, **opts):
48 48 """add all new files, delete all missing files
49 49
50 50 Add all new files and remove all missing files from the repository.
51 51
52 52 New files are ignored if they match any of the patterns in .hgignore. As
53 53 with add, these changes take effect at the next commit.
54 54
55 55 Use the -s option to detect renamed files. With a parameter > 0,
56 56 this compares every removed file with every added file and records
57 57 those similar enough as renames. This option takes a percentage
58 58 between 0 (disabled) and 100 (files must be identical) as its
59 59 parameter. Detecting renamed files this way can be expensive.
60 60 """
61 61 try:
62 62 sim = float(opts.get('similarity') or 0)
63 63 except ValueError:
64 64 raise util.Abort(_('similarity must be a number'))
65 65 if sim < 0 or sim > 100:
66 66 raise util.Abort(_('similarity must be between 0 and 100'))
67 67 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
68 68
69 69 def annotate(ui, repo, *pats, **opts):
70 70 """show changeset information per file line
71 71
72 72 List changes in files, showing the revision id responsible for each line
73 73
74 74 This command is useful to discover who did a change or when a change took
75 75 place.
76 76
77 77 Without the -a option, annotate will avoid processing files it
78 78 detects as binary. With -a, annotate will generate an annotation
79 79 anyway, probably with undesirable results.
80 80 """
81 81 datefunc = ui.quiet and util.shortdate or util.datestr
82 82 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
83 83
84 84 if not pats:
85 85 raise util.Abort(_('at least one file name or pattern required'))
86 86
87 87 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
88 88 ('number', lambda x: str(x[0].rev())),
89 89 ('changeset', lambda x: short(x[0].node())),
90 90 ('date', getdate),
91 91 ('follow', lambda x: x[0].path()),
92 92 ]
93 93
94 94 if (not opts['user'] and not opts['changeset'] and not opts['date']
95 95 and not opts['follow']):
96 96 opts['number'] = 1
97 97
98 98 linenumber = opts.get('line_number') is not None
99 99 if (linenumber and (not opts['changeset']) and (not opts['number'])):
100 100 raise util.Abort(_('at least one of -n/-c is required for -l'))
101 101
102 102 funcmap = [func for op, func in opmap if opts.get(op)]
103 103 if linenumber:
104 104 lastfunc = funcmap[-1]
105 105 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
106 106
107 107 ctx = repo.changectx(opts['rev'])
108 108
109 109 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
110 110 node=ctx.node()):
111 111 fctx = ctx.filectx(abs)
112 112 if not opts['text'] and util.binary(fctx.data()):
113 113 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
114 114 continue
115 115
116 116 lines = fctx.annotate(follow=opts.get('follow'),
117 117 linenumber=linenumber)
118 118 pieces = []
119 119
120 120 for f in funcmap:
121 121 l = [f(n) for n, dummy in lines]
122 122 if l:
123 123 m = max(map(len, l))
124 124 pieces.append(["%*s" % (m, x) for x in l])
125 125
126 126 if pieces:
127 127 for p, l in zip(zip(*pieces), lines):
128 128 ui.write("%s: %s" % (" ".join(p), l[1]))
129 129
130 130 def archive(ui, repo, dest, **opts):
131 131 '''create unversioned archive of a repository revision
132 132
133 133 By default, the revision used is the parent of the working
134 134 directory; use "-r" to specify a different revision.
135 135
136 136 To specify the type of archive to create, use "-t". Valid
137 137 types are:
138 138
139 139 "files" (default): a directory full of files
140 140 "tar": tar archive, uncompressed
141 141 "tbz2": tar archive, compressed using bzip2
142 142 "tgz": tar archive, compressed using gzip
143 143 "uzip": zip archive, uncompressed
144 144 "zip": zip archive, compressed using deflate
145 145
146 146 The exact name of the destination archive or directory is given
147 147 using a format string; see "hg help export" for details.
148 148
149 149 Each member added to an archive file has a directory prefix
150 150 prepended. Use "-p" to specify a format string for the prefix.
151 151 The default is the basename of the archive, with suffixes removed.
152 152 '''
153 153
154 154 ctx = repo.changectx(opts['rev'])
155 155 if not ctx:
156 156 raise util.Abort(_('repository has no revisions'))
157 157 node = ctx.node()
158 158 dest = cmdutil.make_filename(repo, dest, node)
159 159 if os.path.realpath(dest) == repo.root:
160 160 raise util.Abort(_('repository root cannot be destination'))
161 161 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
162 162 kind = opts.get('type') or 'files'
163 163 prefix = opts['prefix']
164 164 if dest == '-':
165 165 if kind == 'files':
166 166 raise util.Abort(_('cannot archive plain files to stdout'))
167 167 dest = sys.stdout
168 168 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
169 169 prefix = cmdutil.make_filename(repo, prefix, node)
170 170 archival.archive(repo, dest, node, kind, not opts['no_decode'],
171 171 matchfn, prefix)
172 172
173 173 def backout(ui, repo, node=None, rev=None, **opts):
174 174 '''reverse effect of earlier changeset
175 175
176 176 Commit the backed out changes as a new changeset. The new
177 177 changeset is a child of the backed out changeset.
178 178
179 179 If you back out a changeset other than the tip, a new head is
180 180 created. This head will be the new tip and you should merge this
181 181 backout changeset with another head (current one by default).
182 182
183 183 The --merge option remembers the parent of the working directory
184 184 before starting the backout, then merges the new head with that
185 185 changeset afterwards. This saves you from doing the merge by
186 186 hand. The result of this merge is not committed, as for a normal
187 187 merge.
188 188
189 189 See 'hg help dates' for a list of formats valid for -d/--date.
190 190 '''
191 191 if rev and node:
192 192 raise util.Abort(_("please specify just one revision"))
193 193
194 194 if not rev:
195 195 rev = node
196 196
197 197 if not rev:
198 198 raise util.Abort(_("please specify a revision to backout"))
199 199
200 200 date = opts.get('date')
201 201 if date:
202 202 opts['date'] = util.parsedate(date)
203 203
204 204 cmdutil.bail_if_changed(repo)
205 205 node = repo.lookup(rev)
206 206
207 207 op1, op2 = repo.dirstate.parents()
208 208 a = repo.changelog.ancestor(op1, node)
209 209 if a != node:
210 210 raise util.Abort(_('cannot back out change on a different branch'))
211 211
212 212 p1, p2 = repo.changelog.parents(node)
213 213 if p1 == nullid:
214 214 raise util.Abort(_('cannot back out a change with no parents'))
215 215 if p2 != nullid:
216 216 if not opts['parent']:
217 217 raise util.Abort(_('cannot back out a merge changeset without '
218 218 '--parent'))
219 219 p = repo.lookup(opts['parent'])
220 220 if p not in (p1, p2):
221 221 raise util.Abort(_('%s is not a parent of %s') %
222 222 (short(p), short(node)))
223 223 parent = p
224 224 else:
225 225 if opts['parent']:
226 226 raise util.Abort(_('cannot use --parent on non-merge changeset'))
227 227 parent = p1
228 228
229 229 hg.clean(repo, node, show_stats=False)
230 230 revert_opts = opts.copy()
231 231 revert_opts['date'] = None
232 232 revert_opts['all'] = True
233 233 revert_opts['rev'] = hex(parent)
234 234 revert_opts['no_backup'] = None
235 235 revert(ui, repo, **revert_opts)
236 236 commit_opts = opts.copy()
237 237 commit_opts['addremove'] = False
238 238 if not commit_opts['message'] and not commit_opts['logfile']:
239 239 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
240 240 commit_opts['force_editor'] = True
241 241 commit(ui, repo, **commit_opts)
242 242 def nice(node):
243 243 return '%d:%s' % (repo.changelog.rev(node), short(node))
244 244 ui.status(_('changeset %s backs out changeset %s\n') %
245 245 (nice(repo.changelog.tip()), nice(node)))
246 246 if op1 != node:
247 247 hg.clean(repo, op1, show_stats=False)
248 248 if opts['merge']:
249 249 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
250 250 hg.merge(repo, hex(repo.changelog.tip()))
251 251 else:
252 252 ui.status(_('the backout changeset is a new head - '
253 253 'do not forget to merge\n'))
254 254 ui.status(_('(use "backout --merge" '
255 255 'if you want to auto-merge)\n'))
256 256
257 257 def bisect(ui, repo, rev=None, extra=None,
258 258 reset=None, good=None, bad=None, skip=None, noupdate=None):
259 259 """subdivision search of changesets
260 260
261 261 This command helps to find changesets which introduce problems.
262 262 To use, mark the earliest changeset you know exhibits the problem
263 263 as bad, then mark the latest changeset which is free from the
264 264 problem as good. Bisect will update your working directory to a
265 265 revision for testing. Once you have performed tests, mark the
266 266 working directory as bad or good and bisect will either update to
267 267 another candidate changeset or announce that it has found the bad
268 268 revision.
269 269 """
270 270 # backward compatibility
271 271 if rev in "good bad reset init".split():
272 272 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
273 273 cmd, rev, extra = rev, extra, None
274 274 if cmd == "good":
275 275 good = True
276 276 elif cmd == "bad":
277 277 bad = True
278 278 else:
279 279 reset = True
280 280 elif extra or good + bad + skip + reset > 1:
281 281 raise util.Abort("Incompatible arguments")
282 282
283 283 if reset:
284 284 p = repo.join("bisect.state")
285 285 if os.path.exists(p):
286 286 os.unlink(p)
287 287 return
288 288
289 289 # load state
290 290 state = {'good': [], 'bad': [], 'skip': []}
291 291 if os.path.exists(repo.join("bisect.state")):
292 292 for l in repo.opener("bisect.state"):
293 293 kind, node = l[:-1].split()
294 294 node = repo.lookup(node)
295 295 if kind not in state:
296 296 raise util.Abort(_("unknown bisect kind %s") % kind)
297 297 state[kind].append(node)
298 298
299 299 # update state
300 300 node = repo.lookup(rev or '.')
301 301 if good:
302 302 state['good'].append(node)
303 303 elif bad:
304 304 state['bad'].append(node)
305 305 elif skip:
306 306 state['skip'].append(node)
307 307
308 308 # save state
309 309 f = repo.opener("bisect.state", "w", atomictemp=True)
310 310 wlock = repo.wlock()
311 311 try:
312 312 for kind in state:
313 313 for node in state[kind]:
314 314 f.write("%s %s\n" % (kind, hg.hex(node)))
315 315 f.rename()
316 316 finally:
317 317 del wlock
318 318
319 319 if not state['good'] or not state['bad']:
320 320 return
321 321
322 322 # actually bisect
323 323 node, changesets, good = hbisect.bisect(repo.changelog, state)
324 324 if changesets == 0:
325 325 ui.write(_("The first %s revision is:\n") % (good and "good" or "bad"))
326 326 displayer = cmdutil.show_changeset(ui, repo, {})
327 327 displayer.show(changenode=node)
328 328 elif node is not None:
329 329 # compute the approximate number of remaining tests
330 330 tests, size = 0, 2
331 331 while size <= changesets:
332 332 tests, size = tests + 1, size * 2
333 333 rev = repo.changelog.rev(node)
334 334 ui.write(_("Testing changeset %s:%s "
335 335 "(%s changesets remaining, ~%s tests)\n")
336 336 % (rev, hg.short(node), changesets, tests))
337 337 if not noupdate:
338 338 cmdutil.bail_if_changed(repo)
339 339 return hg.clean(repo, node)
340 340
341 341 def branch(ui, repo, label=None, **opts):
342 342 """set or show the current branch name
343 343
344 344 With no argument, show the current branch name. With one argument,
345 345 set the working directory branch name (the branch does not exist in
346 346 the repository until the next commit).
347 347
348 348 Unless --force is specified, branch will not let you set a
349 349 branch name that shadows an existing branch.
350 350
351 351 Use the command 'hg update' to switch to an existing branch.
352 352 """
353 353
354 354 if label:
355 355 if not opts.get('force') and label in repo.branchtags():
356 356 if label not in [p.branch() for p in repo.workingctx().parents()]:
357 357 raise util.Abort(_('a branch of the same name already exists'
358 358 ' (use --force to override)'))
359 359 repo.dirstate.setbranch(util.fromlocal(label))
360 360 ui.status(_('marked working directory as branch %s\n') % label)
361 361 else:
362 362 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
363 363
364 364 def branches(ui, repo, active=False):
365 365 """list repository named branches
366 366
367 367 List the repository's named branches, indicating which ones are
368 368 inactive. If active is specified, only show active branches.
369 369
370 370 A branch is considered active if it contains unmerged heads.
371 371
372 372 Use the command 'hg update' to switch to an existing branch.
373 373 """
374 374 b = repo.branchtags()
375 375 heads = dict.fromkeys(repo.heads(), 1)
376 376 l = [((n in heads), repo.changelog.rev(n), n, t) for t, n in b.items()]
377 377 l.sort()
378 378 l.reverse()
379 379 for ishead, r, n, t in l:
380 380 if active and not ishead:
381 381 # If we're only displaying active branches, abort the loop on
382 382 # encountering the first inactive head
383 383 break
384 384 else:
385 385 hexfunc = ui.debugflag and hex or short
386 386 if ui.quiet:
387 387 ui.write("%s\n" % t)
388 388 else:
389 389 spaces = " " * (30 - util.locallen(t))
390 390 # The code only gets here if inactive branches are being
391 391 # displayed or the branch is active.
392 392 isinactive = ((not ishead) and " (inactive)") or ''
393 393 ui.write("%s%s %s:%s%s\n" % (t, spaces, r, hexfunc(n), isinactive))
394 394
395 395 def bundle(ui, repo, fname, dest=None, **opts):
396 396 """create a changegroup file
397 397
398 398 Generate a compressed changegroup file collecting changesets not
399 399 found in the other repository.
400 400
401 401 If no destination repository is specified the destination is
402 402 assumed to have all the nodes specified by one or more --base
403 403 parameters. To create a bundle containing all changesets, use
404 404 --all (or --base null).
405 405
406 406 The bundle file can then be transferred using conventional means and
407 407 applied to another repository with the unbundle or pull command.
408 408 This is useful when direct push and pull are not available or when
409 409 exporting an entire repository is undesirable.
410 410
411 411 Applying bundles preserves all changeset contents including
412 412 permissions, copy/rename information, and revision history.
413 413 """
414 414 revs = opts.get('rev') or None
415 415 if revs:
416 416 revs = [repo.lookup(rev) for rev in revs]
417 417 if opts.get('all'):
418 418 base = ['null']
419 419 else:
420 420 base = opts.get('base')
421 421 if base:
422 422 if dest:
423 423 raise util.Abort(_("--base is incompatible with specifiying "
424 424 "a destination"))
425 425 base = [repo.lookup(rev) for rev in base]
426 426 # create the right base
427 427 # XXX: nodesbetween / changegroup* should be "fixed" instead
428 428 o = []
429 429 has = {nullid: None}
430 430 for n in base:
431 431 has.update(repo.changelog.reachable(n))
432 432 if revs:
433 433 visit = list(revs)
434 434 else:
435 435 visit = repo.changelog.heads()
436 436 seen = {}
437 437 while visit:
438 438 n = visit.pop(0)
439 439 parents = [p for p in repo.changelog.parents(n) if p not in has]
440 440 if len(parents) == 0:
441 441 o.insert(0, n)
442 442 else:
443 443 for p in parents:
444 444 if p not in seen:
445 445 seen[p] = 1
446 446 visit.append(p)
447 447 else:
448 448 cmdutil.setremoteconfig(ui, opts)
449 449 dest, revs, checkout = hg.parseurl(
450 450 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
451 451 other = hg.repository(ui, dest)
452 452 o = repo.findoutgoing(other, force=opts['force'])
453 453
454 454 if revs:
455 455 cg = repo.changegroupsubset(o, revs, 'bundle')
456 456 else:
457 457 cg = repo.changegroup(o, 'bundle')
458 458 changegroup.writebundle(cg, fname, "HG10BZ")
459 459
460 460 def cat(ui, repo, file1, *pats, **opts):
461 461 """output the current or given revision of files
462 462
463 463 Print the specified files as they were at the given revision.
464 464 If no revision is given, the parent of the working directory is used,
465 465 or tip if no revision is checked out.
466 466
467 467 Output may be to a file, in which case the name of the file is
468 468 given using a format string. The formatting rules are the same as
469 469 for the export command, with the following additions:
470 470
471 471 %s basename of file being printed
472 472 %d dirname of file being printed, or '.' if in repo root
473 473 %p root-relative path name of file being printed
474 474 """
475 475 ctx = repo.changectx(opts['rev'])
476 476 err = 1
477 477 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
478 478 ctx.node()):
479 479 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
480 480 data = ctx.filectx(abs).data()
481 481 if opts.get('decode'):
482 482 data = repo.wwritedata(abs, data)
483 483 fp.write(data)
484 484 err = 0
485 485 return err
486 486
487 487 def clone(ui, source, dest=None, **opts):
488 488 """make a copy of an existing repository
489 489
490 490 Create a copy of an existing repository in a new directory.
491 491
492 492 If no destination directory name is specified, it defaults to the
493 493 basename of the source.
494 494
495 495 The location of the source is added to the new repository's
496 496 .hg/hgrc file, as the default to be used for future pulls.
497 497
498 498 For efficiency, hardlinks are used for cloning whenever the source
499 499 and destination are on the same filesystem (note this applies only
500 500 to the repository data, not to the checked out files). Some
501 501 filesystems, such as AFS, implement hardlinking incorrectly, but
502 502 do not report errors. In these cases, use the --pull option to
503 503 avoid hardlinking.
504 504
505 505 You can safely clone repositories and checked out files using full
506 506 hardlinks with
507 507
508 508 $ cp -al REPO REPOCLONE
509 509
510 510 which is the fastest way to clone. However, the operation is not
511 511 atomic (making sure REPO is not modified during the operation is
512 512 up to you) and you have to make sure your editor breaks hardlinks
513 513 (Emacs and most Linux Kernel tools do so).
514 514
515 515 If you use the -r option to clone up to a specific revision, no
516 516 subsequent revisions will be present in the cloned repository.
517 517 This option implies --pull, even on local repositories.
518 518
519 519 See pull for valid source format details.
520 520
521 521 It is possible to specify an ssh:// URL as the destination, but no
522 522 .hg/hgrc and working directory will be created on the remote side.
523 523 Look at the help text for the pull command for important details
524 524 about ssh:// URLs.
525 525 """
526 526 cmdutil.setremoteconfig(ui, opts)
527 527 hg.clone(ui, source, dest,
528 528 pull=opts['pull'],
529 529 stream=opts['uncompressed'],
530 530 rev=opts['rev'],
531 531 update=not opts['noupdate'])
532 532
533 533 def commit(ui, repo, *pats, **opts):
534 534 """commit the specified files or all outstanding changes
535 535
536 536 Commit changes to the given files into the repository.
537 537
538 538 If a list of files is omitted, all changes reported by "hg status"
539 539 will be committed.
540 540
541 541 If no commit message is specified, the configured editor is started to
542 542 enter a message.
543 543
544 544 See 'hg help dates' for a list of formats valid for -d/--date.
545 545 """
546 546 def commitfunc(ui, repo, files, message, match, opts):
547 547 return repo.commit(files, message, opts['user'], opts['date'], match,
548 548 force_editor=opts.get('force_editor'))
549 549 cmdutil.commit(ui, repo, commitfunc, pats, opts)
550 550
551 551 def copy(ui, repo, *pats, **opts):
552 552 """mark files as copied for the next commit
553 553
554 554 Mark dest as having copies of source files. If dest is a
555 555 directory, copies are put in that directory. If dest is a file,
556 556 there can only be one source.
557 557
558 558 By default, this command copies the contents of files as they
559 559 stand in the working directory. If invoked with --after, the
560 560 operation is recorded, but no copying is performed.
561 561
562 562 This command takes effect in the next commit. To undo a copy
563 563 before that, see hg revert.
564 564 """
565 565 wlock = repo.wlock(False)
566 566 try:
567 567 return cmdutil.copy(ui, repo, pats, opts)
568 568 finally:
569 569 del wlock
570 570
571 571 def debugancestor(ui, repo, *args):
572 572 """find the ancestor revision of two revisions in a given index"""
573 573 if len(args) == 3:
574 574 index, rev1, rev2 = args
575 575 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
576 576 elif len(args) == 2:
577 577 if not repo:
578 578 raise util.Abort(_("There is no Mercurial repository here "
579 579 "(.hg not found)"))
580 580 rev1, rev2 = args
581 581 r = repo.changelog
582 582 else:
583 583 raise util.Abort(_('either two or three arguments required'))
584 584 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
585 585 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
586 586
587 587 def debugcomplete(ui, cmd='', **opts):
588 588 """returns the completion list associated with the given command"""
589 589
590 590 if opts['options']:
591 591 options = []
592 592 otables = [globalopts]
593 593 if cmd:
594 594 aliases, entry = cmdutil.findcmd(ui, cmd, table)
595 595 otables.append(entry[1])
596 596 for t in otables:
597 597 for o in t:
598 598 if o[0]:
599 599 options.append('-%s' % o[0])
600 600 options.append('--%s' % o[1])
601 601 ui.write("%s\n" % "\n".join(options))
602 602 return
603 603
604 604 clist = cmdutil.findpossible(ui, cmd, table).keys()
605 605 clist.sort()
606 606 ui.write("%s\n" % "\n".join(clist))
607 607
608 608 def debugfsinfo(ui, path = "."):
609 609 file('.debugfsinfo', 'w').write('')
610 610 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
611 611 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
612 612 ui.write('case-sensitive: %s\n' % (util.checkfolding('.debugfsinfo')
613 613 and 'yes' or 'no'))
614 614 os.unlink('.debugfsinfo')
615 615
616 616 def debugrebuildstate(ui, repo, rev=""):
617 617 """rebuild the dirstate as it would look like for the given revision"""
618 618 if rev == "":
619 619 rev = repo.changelog.tip()
620 620 ctx = repo.changectx(rev)
621 621 files = ctx.manifest()
622 622 wlock = repo.wlock()
623 623 try:
624 624 repo.dirstate.rebuild(rev, files)
625 625 finally:
626 626 del wlock
627 627
628 628 def debugcheckstate(ui, repo):
629 629 """validate the correctness of the current dirstate"""
630 630 parent1, parent2 = repo.dirstate.parents()
631 631 m1 = repo.changectx(parent1).manifest()
632 632 m2 = repo.changectx(parent2).manifest()
633 633 errors = 0
634 634 for f in repo.dirstate:
635 635 state = repo.dirstate[f]
636 636 if state in "nr" and f not in m1:
637 637 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
638 638 errors += 1
639 639 if state in "a" and f in m1:
640 640 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
641 641 errors += 1
642 642 if state in "m" and f not in m1 and f not in m2:
643 643 ui.warn(_("%s in state %s, but not in either manifest\n") %
644 644 (f, state))
645 645 errors += 1
646 646 for f in m1:
647 647 state = repo.dirstate[f]
648 648 if state not in "nrm":
649 649 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
650 650 errors += 1
651 651 if errors:
652 652 error = _(".hg/dirstate inconsistent with current parent's manifest")
653 653 raise util.Abort(error)
654 654
655 655 def showconfig(ui, repo, *values, **opts):
656 656 """show combined config settings from all hgrc files
657 657
658 658 With no args, print names and values of all config items.
659 659
660 660 With one arg of the form section.name, print just the value of
661 661 that config item.
662 662
663 663 With multiple args, print names and values of all config items
664 664 with matching section names."""
665 665
666 666 untrusted = bool(opts.get('untrusted'))
667 667 if values:
668 668 if len([v for v in values if '.' in v]) > 1:
669 669 raise util.Abort(_('only one config item permitted'))
670 670 for section, name, value in ui.walkconfig(untrusted=untrusted):
671 671 sectname = section + '.' + name
672 672 if values:
673 673 for v in values:
674 674 if v == section:
675 675 ui.write('%s=%s\n' % (sectname, value))
676 676 elif v == sectname:
677 677 ui.write(value, '\n')
678 678 else:
679 679 ui.write('%s=%s\n' % (sectname, value))
680 680
681 681 def debugsetparents(ui, repo, rev1, rev2=None):
682 682 """manually set the parents of the current working directory
683 683
684 684 This is useful for writing repository conversion tools, but should
685 685 be used with care.
686 686 """
687 687
688 688 if not rev2:
689 689 rev2 = hex(nullid)
690 690
691 691 wlock = repo.wlock()
692 692 try:
693 693 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
694 694 finally:
695 695 del wlock
696 696
697 697 def debugstate(ui, repo):
698 698 """show the contents of the current dirstate"""
699 699 k = repo.dirstate._map.items()
700 700 k.sort()
701 701 for file_, ent in k:
702 702 if ent[3] == -1:
703 703 # Pad or slice to locale representation
704 704 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(0)))
705 705 timestr = 'unset'
706 706 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
707 707 else:
708 708 timestr = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(ent[3]))
709 709 if ent[1] & 020000:
710 710 mode = 'lnk'
711 711 else:
712 712 mode = '%3o' % (ent[1] & 0777)
713 713 ui.write("%c %s %10d %s %s\n" % (ent[0], mode, ent[2], timestr, file_))
714 714 for f in repo.dirstate.copies():
715 715 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
716 716
717 717 def debugdata(ui, file_, rev):
718 718 """dump the contents of a data file revision"""
719 719 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
720 720 try:
721 721 ui.write(r.revision(r.lookup(rev)))
722 722 except KeyError:
723 723 raise util.Abort(_('invalid revision identifier %s') % rev)
724 724
725 725 def debugdate(ui, date, range=None, **opts):
726 726 """parse and display a date"""
727 727 if opts["extended"]:
728 728 d = util.parsedate(date, util.extendeddateformats)
729 729 else:
730 730 d = util.parsedate(date)
731 731 ui.write("internal: %s %s\n" % d)
732 732 ui.write("standard: %s\n" % util.datestr(d))
733 733 if range:
734 734 m = util.matchdate(range)
735 735 ui.write("match: %s\n" % m(d[0]))
736 736
737 737 def debugindex(ui, file_):
738 738 """dump the contents of an index file"""
739 739 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
740 740 ui.write(" rev offset length base linkrev" +
741 741 " nodeid p1 p2\n")
742 742 for i in xrange(r.count()):
743 743 node = r.node(i)
744 744 try:
745 745 pp = r.parents(node)
746 746 except:
747 747 pp = [nullid, nullid]
748 748 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
749 749 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
750 750 short(node), short(pp[0]), short(pp[1])))
751 751
752 752 def debugindexdot(ui, file_):
753 753 """dump an index DAG as a .dot file"""
754 754 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
755 755 ui.write("digraph G {\n")
756 756 for i in xrange(r.count()):
757 757 node = r.node(i)
758 758 pp = r.parents(node)
759 759 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
760 760 if pp[1] != nullid:
761 761 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
762 762 ui.write("}\n")
763 763
764 764 def debuginstall(ui):
765 765 '''test Mercurial installation'''
766 766
767 767 def writetemp(contents):
768 768 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
769 769 f = os.fdopen(fd, "wb")
770 770 f.write(contents)
771 771 f.close()
772 772 return name
773 773
774 774 problems = 0
775 775
776 776 # encoding
777 777 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
778 778 try:
779 779 util.fromlocal("test")
780 780 except util.Abort, inst:
781 781 ui.write(" %s\n" % inst)
782 782 ui.write(_(" (check that your locale is properly set)\n"))
783 783 problems += 1
784 784
785 785 # compiled modules
786 786 ui.status(_("Checking extensions...\n"))
787 787 try:
788 788 import bdiff, mpatch, base85
789 789 except Exception, inst:
790 790 ui.write(" %s\n" % inst)
791 791 ui.write(_(" One or more extensions could not be found"))
792 792 ui.write(_(" (check that you compiled the extensions)\n"))
793 793 problems += 1
794 794
795 795 # templates
796 796 ui.status(_("Checking templates...\n"))
797 797 try:
798 798 import templater
799 799 t = templater.templater(templater.templatepath("map-cmdline.default"))
800 800 except Exception, inst:
801 801 ui.write(" %s\n" % inst)
802 802 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
803 803 problems += 1
804 804
805 805 # patch
806 806 ui.status(_("Checking patch...\n"))
807 807 patchproblems = 0
808 808 a = "1\n2\n3\n4\n"
809 809 b = "1\n2\n3\ninsert\n4\n"
810 810 fa = writetemp(a)
811 811 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
812 812 os.path.basename(fa))
813 813 fd = writetemp(d)
814 814
815 815 files = {}
816 816 try:
817 817 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
818 818 except util.Abort, e:
819 819 ui.write(_(" patch call failed:\n"))
820 820 ui.write(" " + str(e) + "\n")
821 821 patchproblems += 1
822 822 else:
823 823 if list(files) != [os.path.basename(fa)]:
824 824 ui.write(_(" unexpected patch output!\n"))
825 825 patchproblems += 1
826 826 a = file(fa).read()
827 827 if a != b:
828 828 ui.write(_(" patch test failed!\n"))
829 829 patchproblems += 1
830 830
831 831 if patchproblems:
832 832 if ui.config('ui', 'patch'):
833 833 ui.write(_(" (Current patch tool may be incompatible with patch,"
834 834 " or misconfigured. Please check your .hgrc file)\n"))
835 835 else:
836 836 ui.write(_(" Internal patcher failure, please report this error"
837 837 " to http://www.selenic.com/mercurial/bts\n"))
838 838 problems += patchproblems
839 839
840 840 os.unlink(fa)
841 841 os.unlink(fd)
842 842
843 843 # editor
844 844 ui.status(_("Checking commit editor...\n"))
845 845 editor = ui.geteditor()
846 846 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
847 847 if not cmdpath:
848 848 if editor == 'vi':
849 849 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
850 850 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
851 851 else:
852 852 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
853 853 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
854 854 problems += 1
855 855
856 856 # check username
857 857 ui.status(_("Checking username...\n"))
858 858 user = os.environ.get("HGUSER")
859 859 if user is None:
860 860 user = ui.config("ui", "username")
861 861 if user is None:
862 862 user = os.environ.get("EMAIL")
863 863 if not user:
864 864 ui.warn(" ")
865 865 ui.username()
866 866 ui.write(_(" (specify a username in your .hgrc file)\n"))
867 867
868 868 if not problems:
869 869 ui.status(_("No problems detected\n"))
870 870 else:
871 871 ui.write(_("%s problems detected,"
872 872 " please check your install!\n") % problems)
873 873
874 874 return problems
875 875
876 876 def debugrename(ui, repo, file1, *pats, **opts):
877 877 """dump rename information"""
878 878
879 879 ctx = repo.changectx(opts.get('rev', 'tip'))
880 880 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
881 881 ctx.node()):
882 882 fctx = ctx.filectx(abs)
883 883 m = fctx.filelog().renamed(fctx.filenode())
884 884 if m:
885 885 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
886 886 else:
887 887 ui.write(_("%s not renamed\n") % rel)
888 888
889 889 def debugwalk(ui, repo, *pats, **opts):
890 890 """show how files match on given patterns"""
891 891 items = list(cmdutil.walk(repo, pats, opts))
892 892 if not items:
893 893 return
894 894 fmt = '%%s %%-%ds %%-%ds %%s' % (
895 895 max([len(abs) for (src, abs, rel, exact) in items]),
896 896 max([len(rel) for (src, abs, rel, exact) in items]))
897 897 for src, abs, rel, exact in items:
898 898 line = fmt % (src, abs, rel, exact and 'exact' or '')
899 899 ui.write("%s\n" % line.rstrip())
900 900
901 901 def diff(ui, repo, *pats, **opts):
902 902 """diff repository (or selected files)
903 903
904 904 Show differences between revisions for the specified files.
905 905
906 906 Differences between files are shown using the unified diff format.
907 907
908 908 NOTE: diff may generate unexpected results for merges, as it will
909 909 default to comparing against the working directory's first parent
910 910 changeset if no revisions are specified.
911 911
912 912 When two revision arguments are given, then changes are shown
913 913 between those revisions. If only one revision is specified then
914 914 that revision is compared to the working directory, and, when no
915 915 revisions are specified, the working directory files are compared
916 916 to its parent.
917 917
918 918 Without the -a option, diff will avoid generating diffs of files
919 919 it detects as binary. With -a, diff will generate a diff anyway,
920 920 probably with undesirable results.
921 921 """
922 922 node1, node2 = cmdutil.revpair(repo, opts['rev'])
923 923
924 924 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
925 925
926 926 patch.diff(repo, node1, node2, fns, match=matchfn,
927 927 opts=patch.diffopts(ui, opts))
928 928
929 929 def export(ui, repo, *changesets, **opts):
930 930 """dump the header and diffs for one or more changesets
931 931
932 932 Print the changeset header and diffs for one or more revisions.
933 933
934 934 The information shown in the changeset header is: author,
935 935 changeset hash, parent(s) and commit comment.
936 936
937 937 NOTE: export may generate unexpected diff output for merge changesets,
938 938 as it will compare the merge changeset against its first parent only.
939 939
940 940 Output may be to a file, in which case the name of the file is
941 941 given using a format string. The formatting rules are as follows:
942 942
943 943 %% literal "%" character
944 944 %H changeset hash (40 bytes of hexadecimal)
945 945 %N number of patches being generated
946 946 %R changeset revision number
947 947 %b basename of the exporting repository
948 948 %h short-form changeset hash (12 bytes of hexadecimal)
949 949 %n zero-padded sequence number, starting at 1
950 950 %r zero-padded changeset revision number
951 951
952 952 Without the -a option, export will avoid generating diffs of files
953 953 it detects as binary. With -a, export will generate a diff anyway,
954 954 probably with undesirable results.
955 955
956 956 With the --switch-parent option, the diff will be against the second
957 957 parent. It can be useful to review a merge.
958 958 """
959 959 if not changesets:
960 960 raise util.Abort(_("export requires at least one changeset"))
961 961 revs = cmdutil.revrange(repo, changesets)
962 962 if len(revs) > 1:
963 963 ui.note(_('exporting patches:\n'))
964 964 else:
965 965 ui.note(_('exporting patch:\n'))
966 966 patch.export(repo, revs, template=opts['output'],
967 967 switch_parent=opts['switch_parent'],
968 968 opts=patch.diffopts(ui, opts))
969 969
970 970 def grep(ui, repo, pattern, *pats, **opts):
971 971 """search for a pattern in specified files and revisions
972 972
973 973 Search revisions of files for a regular expression.
974 974
975 975 This command behaves differently than Unix grep. It only accepts
976 976 Python/Perl regexps. It searches repository history, not the
977 977 working directory. It always prints the revision number in which
978 978 a match appears.
979 979
980 980 By default, grep only prints output for the first revision of a
981 981 file in which it finds a match. To get it to print every revision
982 982 that contains a change in match status ("-" for a match that
983 983 becomes a non-match, or "+" for a non-match that becomes a match),
984 984 use the --all flag.
985 985 """
986 986 reflags = 0
987 987 if opts['ignore_case']:
988 988 reflags |= re.I
989 989 try:
990 990 regexp = re.compile(pattern, reflags)
991 991 except Exception, inst:
992 992 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
993 993 return None
994 994 sep, eol = ':', '\n'
995 995 if opts['print0']:
996 996 sep = eol = '\0'
997 997
998 998 fcache = {}
999 999 def getfile(fn):
1000 1000 if fn not in fcache:
1001 1001 fcache[fn] = repo.file(fn)
1002 1002 return fcache[fn]
1003 1003
1004 1004 def matchlines(body):
1005 1005 begin = 0
1006 1006 linenum = 0
1007 1007 while True:
1008 1008 match = regexp.search(body, begin)
1009 1009 if not match:
1010 1010 break
1011 1011 mstart, mend = match.span()
1012 1012 linenum += body.count('\n', begin, mstart) + 1
1013 1013 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1014 1014 lend = body.find('\n', mend)
1015 1015 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1016 1016 begin = lend + 1
1017 1017
1018 1018 class linestate(object):
1019 1019 def __init__(self, line, linenum, colstart, colend):
1020 1020 self.line = line
1021 1021 self.linenum = linenum
1022 1022 self.colstart = colstart
1023 1023 self.colend = colend
1024 1024
1025 1025 def __eq__(self, other):
1026 1026 return self.line == other.line
1027 1027
1028 1028 matches = {}
1029 1029 copies = {}
1030 1030 def grepbody(fn, rev, body):
1031 1031 matches[rev].setdefault(fn, [])
1032 1032 m = matches[rev][fn]
1033 1033 for lnum, cstart, cend, line in matchlines(body):
1034 1034 s = linestate(line, lnum, cstart, cend)
1035 1035 m.append(s)
1036 1036
1037 1037 def difflinestates(a, b):
1038 1038 sm = difflib.SequenceMatcher(None, a, b)
1039 1039 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1040 1040 if tag == 'insert':
1041 1041 for i in xrange(blo, bhi):
1042 1042 yield ('+', b[i])
1043 1043 elif tag == 'delete':
1044 1044 for i in xrange(alo, ahi):
1045 1045 yield ('-', a[i])
1046 1046 elif tag == 'replace':
1047 1047 for i in xrange(alo, ahi):
1048 1048 yield ('-', a[i])
1049 1049 for i in xrange(blo, bhi):
1050 1050 yield ('+', b[i])
1051 1051
1052 1052 prev = {}
1053 1053 def display(fn, rev, states, prevstates):
1054 1054 datefunc = ui.quiet and util.shortdate or util.datestr
1055 1055 found = False
1056 1056 filerevmatches = {}
1057 1057 r = prev.get(fn, -1)
1058 1058 if opts['all']:
1059 1059 iter = difflinestates(states, prevstates)
1060 1060 else:
1061 1061 iter = [('', l) for l in prevstates]
1062 1062 for change, l in iter:
1063 1063 cols = [fn, str(r)]
1064 1064 if opts['line_number']:
1065 1065 cols.append(str(l.linenum))
1066 1066 if opts['all']:
1067 1067 cols.append(change)
1068 1068 if opts['user']:
1069 1069 cols.append(ui.shortuser(get(r)[1]))
1070 1070 if opts.get('date'):
1071 1071 cols.append(datefunc(get(r)[2]))
1072 1072 if opts['files_with_matches']:
1073 1073 c = (fn, r)
1074 1074 if c in filerevmatches:
1075 1075 continue
1076 1076 filerevmatches[c] = 1
1077 1077 else:
1078 1078 cols.append(l.line)
1079 1079 ui.write(sep.join(cols), eol)
1080 1080 found = True
1081 1081 return found
1082 1082
1083 1083 fstate = {}
1084 1084 skip = {}
1085 1085 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1086 1086 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1087 1087 found = False
1088 1088 follow = opts.get('follow')
1089 1089 for st, rev, fns in changeiter:
1090 1090 if st == 'window':
1091 1091 matches.clear()
1092 1092 elif st == 'add':
1093 1093 ctx = repo.changectx(rev)
1094 1094 matches[rev] = {}
1095 1095 for fn in fns:
1096 1096 if fn in skip:
1097 1097 continue
1098 1098 try:
1099 1099 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1100 1100 fstate.setdefault(fn, [])
1101 1101 if follow:
1102 1102 copied = getfile(fn).renamed(ctx.filenode(fn))
1103 1103 if copied:
1104 1104 copies.setdefault(rev, {})[fn] = copied[0]
1105 1105 except revlog.LookupError:
1106 1106 pass
1107 1107 elif st == 'iter':
1108 1108 states = matches[rev].items()
1109 1109 states.sort()
1110 1110 for fn, m in states:
1111 1111 copy = copies.get(rev, {}).get(fn)
1112 1112 if fn in skip:
1113 1113 if copy:
1114 1114 skip[copy] = True
1115 1115 continue
1116 1116 if fn in prev or fstate[fn]:
1117 1117 r = display(fn, rev, m, fstate[fn])
1118 1118 found = found or r
1119 1119 if r and not opts['all']:
1120 1120 skip[fn] = True
1121 1121 if copy:
1122 1122 skip[copy] = True
1123 1123 fstate[fn] = m
1124 1124 if copy:
1125 1125 fstate[copy] = m
1126 1126 prev[fn] = rev
1127 1127
1128 1128 fstate = fstate.items()
1129 1129 fstate.sort()
1130 1130 for fn, state in fstate:
1131 1131 if fn in skip:
1132 1132 continue
1133 1133 if fn not in copies.get(prev[fn], {}):
1134 1134 found = display(fn, rev, {}, state) or found
1135 1135 return (not found and 1) or 0
1136 1136
1137 1137 def heads(ui, repo, *branchrevs, **opts):
1138 1138 """show current repository heads or show branch heads
1139 1139
1140 1140 With no arguments, show all repository head changesets.
1141 1141
1142 1142 If branch or revisions names are given this will show the heads of
1143 1143 the specified branches or the branches those revisions are tagged
1144 1144 with.
1145 1145
1146 1146 Repository "heads" are changesets that don't have child
1147 1147 changesets. They are where development generally takes place and
1148 1148 are the usual targets for update and merge operations.
1149 1149
1150 1150 Branch heads are changesets that have a given branch tag, but have
1151 1151 no child changesets with that tag. They are usually where
1152 1152 development on the given branch takes place.
1153 1153 """
1154 1154 if opts['rev']:
1155 1155 start = repo.lookup(opts['rev'])
1156 1156 else:
1157 1157 start = None
1158 1158 if not branchrevs:
1159 1159 # Assume we're looking repo-wide heads if no revs were specified.
1160 1160 heads = repo.heads(start)
1161 1161 else:
1162 1162 heads = []
1163 1163 visitedset = util.set()
1164 1164 for branchrev in branchrevs:
1165 1165 branch = repo.changectx(branchrev).branch()
1166 1166 if branch in visitedset:
1167 1167 continue
1168 1168 visitedset.add(branch)
1169 1169 bheads = repo.branchheads(branch, start)
1170 1170 if not bheads:
1171 1171 if branch != branchrev:
1172 1172 ui.warn(_("no changes on branch %s containing %s are "
1173 1173 "reachable from %s\n")
1174 1174 % (branch, branchrev, opts['rev']))
1175 1175 else:
1176 1176 ui.warn(_("no changes on branch %s are reachable from %s\n")
1177 1177 % (branch, opts['rev']))
1178 1178 heads.extend(bheads)
1179 1179 if not heads:
1180 1180 return 1
1181 1181 displayer = cmdutil.show_changeset(ui, repo, opts)
1182 1182 for n in heads:
1183 1183 displayer.show(changenode=n)
1184 1184
1185 1185 def help_(ui, name=None, with_version=False):
1186 1186 """show help for a command, extension, or list of commands
1187 1187
1188 1188 With no arguments, print a list of commands and short help.
1189 1189
1190 1190 Given a command name, print help for that command.
1191 1191
1192 1192 Given an extension name, print help for that extension, and the
1193 1193 commands it provides."""
1194 1194 option_lists = []
1195 1195
1196 1196 def addglobalopts(aliases):
1197 1197 if ui.verbose:
1198 1198 option_lists.append((_("global options:"), globalopts))
1199 1199 if name == 'shortlist':
1200 1200 option_lists.append((_('use "hg help" for the full list '
1201 1201 'of commands'), ()))
1202 1202 else:
1203 1203 if name == 'shortlist':
1204 1204 msg = _('use "hg help" for the full list of commands '
1205 1205 'or "hg -v" for details')
1206 1206 elif aliases:
1207 1207 msg = _('use "hg -v help%s" to show aliases and '
1208 1208 'global options') % (name and " " + name or "")
1209 1209 else:
1210 1210 msg = _('use "hg -v help %s" to show global options') % name
1211 1211 option_lists.append((msg, ()))
1212 1212
1213 1213 def helpcmd(name):
1214 1214 if with_version:
1215 1215 version_(ui)
1216 1216 ui.write('\n')
1217 1217 aliases, i = cmdutil.findcmd(ui, name, table)
1218 1218 # synopsis
1219 1219 ui.write("%s\n" % i[2])
1220 1220
1221 1221 # aliases
1222 1222 if not ui.quiet and len(aliases) > 1:
1223 1223 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1224 1224
1225 1225 # description
1226 1226 doc = i[0].__doc__
1227 1227 if not doc:
1228 1228 doc = _("(No help text available)")
1229 1229 if ui.quiet:
1230 1230 doc = doc.splitlines(0)[0]
1231 1231 ui.write("\n%s\n" % doc.rstrip())
1232 1232
1233 1233 if not ui.quiet:
1234 1234 # options
1235 1235 if i[1]:
1236 1236 option_lists.append((_("options:\n"), i[1]))
1237 1237
1238 1238 addglobalopts(False)
1239 1239
1240 1240 def helplist(header, select=None):
1241 1241 h = {}
1242 1242 cmds = {}
1243 1243 for c, e in table.items():
1244 1244 f = c.split("|", 1)[0]
1245 1245 if select and not select(f):
1246 1246 continue
1247 1247 if name == "shortlist" and not f.startswith("^"):
1248 1248 continue
1249 1249 f = f.lstrip("^")
1250 1250 if not ui.debugflag and f.startswith("debug"):
1251 1251 continue
1252 1252 doc = e[0].__doc__
1253 1253 if not doc:
1254 1254 doc = _("(No help text available)")
1255 1255 h[f] = doc.splitlines(0)[0].rstrip()
1256 1256 cmds[f] = c.lstrip("^")
1257 1257
1258 1258 if not h:
1259 1259 ui.status(_('no commands defined\n'))
1260 1260 return
1261 1261
1262 1262 ui.status(header)
1263 1263 fns = h.keys()
1264 1264 fns.sort()
1265 1265 m = max(map(len, fns))
1266 1266 for f in fns:
1267 1267 if ui.verbose:
1268 1268 commands = cmds[f].replace("|",", ")
1269 1269 ui.write(" %s:\n %s\n"%(commands, h[f]))
1270 1270 else:
1271 1271 ui.write(' %-*s %s\n' % (m, f, h[f]))
1272 1272
1273 1273 if not ui.quiet:
1274 1274 addglobalopts(True)
1275 1275
1276 1276 def helptopic(name):
1277 1277 v = None
1278 1278 for i in help.helptable:
1279 1279 l = i.split('|')
1280 1280 if name in l:
1281 1281 v = i
1282 1282 header = l[-1]
1283 1283 if not v:
1284 1284 raise cmdutil.UnknownCommand(name)
1285 1285
1286 1286 # description
1287 1287 doc = help.helptable[v]
1288 1288 if not doc:
1289 1289 doc = _("(No help text available)")
1290 1290 if callable(doc):
1291 1291 doc = doc()
1292 1292
1293 1293 ui.write("%s\n" % header)
1294 1294 ui.write("%s\n" % doc.rstrip())
1295 1295
1296 1296 def helpext(name):
1297 1297 try:
1298 1298 mod = extensions.find(name)
1299 1299 except KeyError:
1300 1300 raise cmdutil.UnknownCommand(name)
1301 1301
1302 1302 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1303 1303 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1304 1304 for d in doc[1:]:
1305 1305 ui.write(d, '\n')
1306 1306
1307 1307 ui.status('\n')
1308 1308
1309 1309 try:
1310 1310 ct = mod.cmdtable
1311 1311 except AttributeError:
1312 1312 ct = {}
1313 1313
1314 1314 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1315 1315 helplist(_('list of commands:\n\n'), modcmds.has_key)
1316 1316
1317 1317 if name and name != 'shortlist':
1318 1318 i = None
1319 1319 for f in (helpcmd, helptopic, helpext):
1320 1320 try:
1321 1321 f(name)
1322 1322 i = None
1323 1323 break
1324 1324 except cmdutil.UnknownCommand, inst:
1325 1325 i = inst
1326 1326 if i:
1327 1327 raise i
1328 1328
1329 1329 else:
1330 1330 # program name
1331 1331 if ui.verbose or with_version:
1332 1332 version_(ui)
1333 1333 else:
1334 1334 ui.status(_("Mercurial Distributed SCM\n"))
1335 1335 ui.status('\n')
1336 1336
1337 1337 # list of commands
1338 1338 if name == "shortlist":
1339 1339 header = _('basic commands:\n\n')
1340 1340 else:
1341 1341 header = _('list of commands:\n\n')
1342 1342
1343 1343 helplist(header)
1344 1344
1345 1345 # list all option lists
1346 1346 opt_output = []
1347 1347 for title, options in option_lists:
1348 1348 opt_output.append(("\n%s" % title, None))
1349 1349 for shortopt, longopt, default, desc in options:
1350 1350 if "DEPRECATED" in desc and not ui.verbose: continue
1351 1351 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1352 1352 longopt and " --%s" % longopt),
1353 1353 "%s%s" % (desc,
1354 1354 default
1355 1355 and _(" (default: %s)") % default
1356 1356 or "")))
1357 1357
1358 1358 if opt_output:
1359 1359 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1360 1360 for first, second in opt_output:
1361 1361 if second:
1362 1362 ui.write(" %-*s %s\n" % (opts_len, first, second))
1363 1363 else:
1364 1364 ui.write("%s\n" % first)
1365 1365
1366 1366 def identify(ui, repo, source=None,
1367 1367 rev=None, num=None, id=None, branch=None, tags=None):
1368 1368 """identify the working copy or specified revision
1369 1369
1370 1370 With no revision, print a summary of the current state of the repo.
1371 1371
1372 1372 With a path, do a lookup in another repository.
1373 1373
1374 1374 This summary identifies the repository state using one or two parent
1375 1375 hash identifiers, followed by a "+" if there are uncommitted changes
1376 1376 in the working directory, a list of tags for this revision and a branch
1377 1377 name for non-default branches.
1378 1378 """
1379 1379
1380 1380 if not repo and not source:
1381 1381 raise util.Abort(_("There is no Mercurial repository here "
1382 1382 "(.hg not found)"))
1383 1383
1384 1384 hexfunc = ui.debugflag and hex or short
1385 1385 default = not (num or id or branch or tags)
1386 1386 output = []
1387 1387
1388 1388 if source:
1389 1389 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1390 1390 srepo = hg.repository(ui, source)
1391 1391 if not rev and revs:
1392 1392 rev = revs[0]
1393 1393 if not rev:
1394 1394 rev = "tip"
1395 1395 if num or branch or tags:
1396 1396 raise util.Abort(
1397 1397 "can't query remote revision number, branch, or tags")
1398 1398 output = [hexfunc(srepo.lookup(rev))]
1399 1399 elif not rev:
1400 1400 ctx = repo.workingctx()
1401 1401 parents = ctx.parents()
1402 1402 changed = False
1403 1403 if default or id or num:
1404 1404 changed = ctx.files() + ctx.deleted()
1405 1405 if default or id:
1406 1406 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1407 1407 (changed) and "+" or "")]
1408 1408 if num:
1409 1409 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1410 1410 (changed) and "+" or ""))
1411 1411 else:
1412 1412 ctx = repo.changectx(rev)
1413 1413 if default or id:
1414 1414 output = [hexfunc(ctx.node())]
1415 1415 if num:
1416 1416 output.append(str(ctx.rev()))
1417 1417
1418 1418 if not source and default and not ui.quiet:
1419 1419 b = util.tolocal(ctx.branch())
1420 1420 if b != 'default':
1421 1421 output.append("(%s)" % b)
1422 1422
1423 1423 # multiple tags for a single parent separated by '/'
1424 1424 t = "/".join(ctx.tags())
1425 1425 if t:
1426 1426 output.append(t)
1427 1427
1428 1428 if branch:
1429 1429 output.append(util.tolocal(ctx.branch()))
1430 1430
1431 1431 if tags:
1432 1432 output.extend(ctx.tags())
1433 1433
1434 1434 ui.write("%s\n" % ' '.join(output))
1435 1435
1436 1436 def import_(ui, repo, patch1, *patches, **opts):
1437 1437 """import an ordered set of patches
1438 1438
1439 1439 Import a list of patches and commit them individually.
1440 1440
1441 1441 If there are outstanding changes in the working directory, import
1442 1442 will abort unless given the -f flag.
1443 1443
1444 1444 You can import a patch straight from a mail message. Even patches
1445 1445 as attachments work (body part must be type text/plain or
1446 1446 text/x-patch to be used). From and Subject headers of email
1447 1447 message are used as default committer and commit message. All
1448 1448 text/plain body parts before first diff are added to commit
1449 1449 message.
1450 1450
1451 1451 If the imported patch was generated by hg export, user and description
1452 1452 from patch override values from message headers and body. Values
1453 1453 given on command line with -m and -u override these.
1454 1454
1455 1455 If --exact is specified, import will set the working directory
1456 1456 to the parent of each patch before applying it, and will abort
1457 1457 if the resulting changeset has a different ID than the one
1458 1458 recorded in the patch. This may happen due to character set
1459 1459 problems or other deficiencies in the text patch format.
1460 1460
1461 1461 To read a patch from standard input, use patch name "-".
1462 1462 See 'hg help dates' for a list of formats valid for -d/--date.
1463 1463 """
1464 1464 patches = (patch1,) + patches
1465 1465
1466 1466 date = opts.get('date')
1467 1467 if date:
1468 1468 opts['date'] = util.parsedate(date)
1469 1469
1470 1470 if opts.get('exact') or not opts['force']:
1471 1471 cmdutil.bail_if_changed(repo)
1472 1472
1473 1473 d = opts["base"]
1474 1474 strip = opts["strip"]
1475 1475 wlock = lock = None
1476 1476 try:
1477 1477 wlock = repo.wlock()
1478 1478 lock = repo.lock()
1479 1479 for p in patches:
1480 1480 pf = os.path.join(d, p)
1481 1481
1482 1482 if pf == '-':
1483 1483 ui.status(_("applying patch from stdin\n"))
1484 1484 data = patch.extract(ui, sys.stdin)
1485 1485 else:
1486 1486 ui.status(_("applying %s\n") % p)
1487 1487 if os.path.exists(pf):
1488 1488 data = patch.extract(ui, file(pf, 'rb'))
1489 1489 else:
1490 1490 data = patch.extract(ui, urllib.urlopen(pf))
1491 1491 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1492 1492
1493 1493 if tmpname is None:
1494 1494 raise util.Abort(_('no diffs found'))
1495 1495
1496 1496 try:
1497 1497 cmdline_message = cmdutil.logmessage(opts)
1498 1498 if cmdline_message:
1499 1499 # pickup the cmdline msg
1500 1500 message = cmdline_message
1501 1501 elif message:
1502 1502 # pickup the patch msg
1503 1503 message = message.strip()
1504 1504 else:
1505 1505 # launch the editor
1506 1506 message = None
1507 1507 ui.debug(_('message:\n%s\n') % message)
1508 1508
1509 1509 wp = repo.workingctx().parents()
1510 1510 if opts.get('exact'):
1511 1511 if not nodeid or not p1:
1512 1512 raise util.Abort(_('not a mercurial patch'))
1513 1513 p1 = repo.lookup(p1)
1514 1514 p2 = repo.lookup(p2 or hex(nullid))
1515 1515
1516 1516 if p1 != wp[0].node():
1517 1517 hg.clean(repo, p1)
1518 1518 repo.dirstate.setparents(p1, p2)
1519 1519 elif p2:
1520 1520 try:
1521 1521 p1 = repo.lookup(p1)
1522 1522 p2 = repo.lookup(p2)
1523 1523 if p1 == wp[0].node():
1524 1524 repo.dirstate.setparents(p1, p2)
1525 1525 except hg.RepoError:
1526 1526 pass
1527 1527 if opts.get('exact') or opts.get('import_branch'):
1528 1528 repo.dirstate.setbranch(branch or 'default')
1529 1529
1530 1530 files = {}
1531 1531 try:
1532 1532 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1533 1533 files=files)
1534 1534 finally:
1535 1535 files = patch.updatedir(ui, repo, files)
1536 1536 if not opts.get('no_commit'):
1537 1537 n = repo.commit(files, message, opts.get('user') or user,
1538 1538 opts.get('date') or date)
1539 1539 if opts.get('exact'):
1540 1540 if hex(n) != nodeid:
1541 1541 repo.rollback()
1542 1542 raise util.Abort(_('patch is damaged'
1543 1543 ' or loses information'))
1544 1544 # Force a dirstate write so that the next transaction
1545 1545 # backups an up-do-date file.
1546 1546 repo.dirstate.write()
1547 1547 finally:
1548 1548 os.unlink(tmpname)
1549 1549 finally:
1550 1550 del lock, wlock
1551 1551
1552 1552 def incoming(ui, repo, source="default", **opts):
1553 1553 """show new changesets found in source
1554 1554
1555 1555 Show new changesets found in the specified path/URL or the default
1556 1556 pull location. These are the changesets that would be pulled if a pull
1557 1557 was requested.
1558 1558
1559 1559 For remote repository, using --bundle avoids downloading the changesets
1560 1560 twice if the incoming is followed by a pull.
1561 1561
1562 1562 See pull for valid source format details.
1563 1563 """
1564 1564 limit = cmdutil.loglimit(opts)
1565 1565 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
1566 1566 cmdutil.setremoteconfig(ui, opts)
1567 1567
1568 1568 other = hg.repository(ui, source)
1569 1569 ui.status(_('comparing with %s\n') % util.hidepassword(source))
1570 1570 if revs:
1571 1571 revs = [other.lookup(rev) for rev in revs]
1572 1572 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1573 1573 if not incoming:
1574 1574 try:
1575 1575 os.unlink(opts["bundle"])
1576 1576 except:
1577 1577 pass
1578 1578 ui.status(_("no changes found\n"))
1579 1579 return 1
1580 1580
1581 1581 cleanup = None
1582 1582 try:
1583 1583 fname = opts["bundle"]
1584 1584 if fname or not other.local():
1585 1585 # create a bundle (uncompressed if other repo is not local)
1586 1586 if revs is None:
1587 1587 cg = other.changegroup(incoming, "incoming")
1588 1588 else:
1589 1589 cg = other.changegroupsubset(incoming, revs, 'incoming')
1590 1590 bundletype = other.local() and "HG10BZ" or "HG10UN"
1591 1591 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1592 1592 # keep written bundle?
1593 1593 if opts["bundle"]:
1594 1594 cleanup = None
1595 1595 if not other.local():
1596 1596 # use the created uncompressed bundlerepo
1597 1597 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1598 1598
1599 1599 o = other.changelog.nodesbetween(incoming, revs)[0]
1600 1600 if opts['newest_first']:
1601 1601 o.reverse()
1602 1602 displayer = cmdutil.show_changeset(ui, other, opts)
1603 1603 count = 0
1604 1604 for n in o:
1605 1605 if count >= limit:
1606 1606 break
1607 1607 parents = [p for p in other.changelog.parents(n) if p != nullid]
1608 1608 if opts['no_merges'] and len(parents) == 2:
1609 1609 continue
1610 1610 count += 1
1611 1611 displayer.show(changenode=n)
1612 1612 finally:
1613 1613 if hasattr(other, 'close'):
1614 1614 other.close()
1615 1615 if cleanup:
1616 1616 os.unlink(cleanup)
1617 1617
1618 1618 def init(ui, dest=".", **opts):
1619 1619 """create a new repository in the given directory
1620 1620
1621 1621 Initialize a new repository in the given directory. If the given
1622 1622 directory does not exist, it is created.
1623 1623
1624 1624 If no directory is given, the current directory is used.
1625 1625
1626 1626 It is possible to specify an ssh:// URL as the destination.
1627 1627 Look at the help text for the pull command for important details
1628 1628 about ssh:// URLs.
1629 1629 """
1630 1630 cmdutil.setremoteconfig(ui, opts)
1631 1631 hg.repository(ui, dest, create=1)
1632 1632
1633 1633 def locate(ui, repo, *pats, **opts):
1634 1634 """locate files matching specific patterns
1635 1635
1636 1636 Print all files under Mercurial control whose names match the
1637 1637 given patterns.
1638 1638
1639 1639 This command searches the entire repository by default. To search
1640 1640 just the current directory and its subdirectories, use
1641 1641 "--include .".
1642 1642
1643 1643 If no patterns are given to match, this command prints all file
1644 1644 names.
1645 1645
1646 1646 If you want to feed the output of this command into the "xargs"
1647 1647 command, use the "-0" option to both this command and "xargs".
1648 1648 This will avoid the problem of "xargs" treating single filenames
1649 1649 that contain white space as multiple filenames.
1650 1650 """
1651 1651 end = opts['print0'] and '\0' or '\n'
1652 1652 rev = opts['rev']
1653 1653 if rev:
1654 1654 node = repo.lookup(rev)
1655 1655 else:
1656 1656 node = None
1657 1657
1658 1658 ret = 1
1659 1659 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1660 1660 badmatch=util.always,
1661 1661 default='relglob'):
1662 1662 if src == 'b':
1663 1663 continue
1664 1664 if not node and abs not in repo.dirstate:
1665 1665 continue
1666 1666 if opts['fullpath']:
1667 1667 ui.write(os.path.join(repo.root, abs), end)
1668 1668 else:
1669 1669 ui.write(((pats and rel) or abs), end)
1670 1670 ret = 0
1671 1671
1672 1672 return ret
1673 1673
1674 1674 def log(ui, repo, *pats, **opts):
1675 1675 """show revision history of entire repository or files
1676 1676
1677 1677 Print the revision history of the specified files or the entire
1678 1678 project.
1679 1679
1680 1680 File history is shown without following rename or copy history of
1681 1681 files. Use -f/--follow with a file name to follow history across
1682 1682 renames and copies. --follow without a file name will only show
1683 1683 ancestors or descendants of the starting revision. --follow-first
1684 1684 only follows the first parent of merge revisions.
1685 1685
1686 1686 If no revision range is specified, the default is tip:0 unless
1687 1687 --follow is set, in which case the working directory parent is
1688 1688 used as the starting revision.
1689 1689
1690 1690 See 'hg help dates' for a list of formats valid for -d/--date.
1691 1691
1692 1692 By default this command outputs: changeset id and hash, tags,
1693 1693 non-trivial parents, user, date and time, and a summary for each
1694 1694 commit. When the -v/--verbose switch is used, the list of changed
1695 1695 files and full commit message is shown.
1696 1696
1697 1697 NOTE: log -p may generate unexpected diff output for merge
1698 1698 changesets, as it will compare the merge changeset against its
1699 1699 first parent only. Also, the files: list will only reflect files
1700 1700 that are different from BOTH parents.
1701 1701
1702 1702 """
1703 1703
1704 1704 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1705 1705 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1706 1706
1707 1707 limit = cmdutil.loglimit(opts)
1708 1708 count = 0
1709 1709
1710 1710 if opts['copies'] and opts['rev']:
1711 1711 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1712 1712 else:
1713 1713 endrev = repo.changelog.count()
1714 1714 rcache = {}
1715 1715 ncache = {}
1716 1716 def getrenamed(fn, rev):
1717 1717 '''looks up all renames for a file (up to endrev) the first
1718 1718 time the file is given. It indexes on the changerev and only
1719 1719 parses the manifest if linkrev != changerev.
1720 1720 Returns rename info for fn at changerev rev.'''
1721 1721 if fn not in rcache:
1722 1722 rcache[fn] = {}
1723 1723 ncache[fn] = {}
1724 1724 fl = repo.file(fn)
1725 1725 for i in xrange(fl.count()):
1726 1726 node = fl.node(i)
1727 1727 lr = fl.linkrev(node)
1728 1728 renamed = fl.renamed(node)
1729 1729 rcache[fn][lr] = renamed
1730 1730 if renamed:
1731 1731 ncache[fn][node] = renamed
1732 1732 if lr >= endrev:
1733 1733 break
1734 1734 if rev in rcache[fn]:
1735 1735 return rcache[fn][rev]
1736 1736
1737 1737 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1738 1738 # filectx logic.
1739 1739
1740 1740 try:
1741 1741 return repo.changectx(rev).filectx(fn).renamed()
1742 1742 except revlog.LookupError:
1743 1743 pass
1744 1744 return None
1745 1745
1746 1746 df = False
1747 1747 if opts["date"]:
1748 1748 df = util.matchdate(opts["date"])
1749 1749
1750 1750 only_branches = opts['only_branch']
1751 1751
1752 1752 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1753 1753 for st, rev, fns in changeiter:
1754 1754 if st == 'add':
1755 1755 changenode = repo.changelog.node(rev)
1756 1756 parents = [p for p in repo.changelog.parentrevs(rev)
1757 1757 if p != nullrev]
1758 1758 if opts['no_merges'] and len(parents) == 2:
1759 1759 continue
1760 1760 if opts['only_merges'] and len(parents) != 2:
1761 1761 continue
1762 1762
1763 1763 if only_branches:
1764 1764 revbranch = get(rev)[5]['branch']
1765 1765 if revbranch not in only_branches:
1766 1766 continue
1767 1767
1768 1768 if df:
1769 1769 changes = get(rev)
1770 1770 if not df(changes[2][0]):
1771 1771 continue
1772 1772
1773 1773 if opts['keyword']:
1774 1774 changes = get(rev)
1775 1775 miss = 0
1776 1776 for k in [kw.lower() for kw in opts['keyword']]:
1777 1777 if not (k in changes[1].lower() or
1778 1778 k in changes[4].lower() or
1779 1779 k in " ".join(changes[3]).lower()):
1780 1780 miss = 1
1781 1781 break
1782 1782 if miss:
1783 1783 continue
1784 1784
1785 1785 copies = []
1786 1786 if opts.get('copies') and rev:
1787 1787 for fn in get(rev)[3]:
1788 1788 rename = getrenamed(fn, rev)
1789 1789 if rename:
1790 1790 copies.append((fn, rename[0]))
1791 1791 displayer.show(rev, changenode, copies=copies)
1792 1792 elif st == 'iter':
1793 1793 if count == limit: break
1794 1794 if displayer.flush(rev):
1795 1795 count += 1
1796 1796
1797 1797 def manifest(ui, repo, node=None, rev=None):
1798 1798 """output the current or given revision of the project manifest
1799 1799
1800 1800 Print a list of version controlled files for the given revision.
1801 1801 If no revision is given, the parent of the working directory is used,
1802 1802 or tip if no revision is checked out.
1803 1803
1804 1804 The manifest is the list of files being version controlled. If no revision
1805 1805 is given then the first parent of the working directory is used.
1806 1806
1807 1807 With -v flag, print file permissions, symlink and executable bits. With
1808 1808 --debug flag, print file revision hashes.
1809 1809 """
1810 1810
1811 1811 if rev and node:
1812 1812 raise util.Abort(_("please specify just one revision"))
1813 1813
1814 1814 if not node:
1815 1815 node = rev
1816 1816
1817 1817 m = repo.changectx(node).manifest()
1818 1818 files = m.keys()
1819 1819 files.sort()
1820 1820
1821 1821 for f in files:
1822 1822 if ui.debugflag:
1823 1823 ui.write("%40s " % hex(m[f]))
1824 1824 if ui.verbose:
1825 1825 type = m.execf(f) and "*" or m.linkf(f) and "@" or " "
1826 1826 perm = m.execf(f) and "755" or "644"
1827 1827 ui.write("%3s %1s " % (perm, type))
1828 1828 ui.write("%s\n" % f)
1829 1829
1830 1830 def merge(ui, repo, node=None, force=None, rev=None):
1831 1831 """merge working directory with another revision
1832 1832
1833 1833 Merge the contents of the current working directory and the
1834 1834 requested revision. Files that changed between either parent are
1835 1835 marked as changed for the next commit and a commit must be
1836 1836 performed before any further updates are allowed.
1837 1837
1838 1838 If no revision is specified, the working directory's parent is a
1839 1839 head revision, and the repository contains exactly one other head,
1840 1840 the other head is merged with by default. Otherwise, an explicit
1841 1841 revision to merge with must be provided.
1842 1842 """
1843 1843
1844 1844 if rev and node:
1845 1845 raise util.Abort(_("please specify just one revision"))
1846 1846 if not node:
1847 1847 node = rev
1848 1848
1849 1849 if not node:
1850 1850 heads = repo.heads()
1851 1851 if len(heads) > 2:
1852 1852 raise util.Abort(_('repo has %d heads - '
1853 1853 'please merge with an explicit rev') %
1854 1854 len(heads))
1855 1855 parent = repo.dirstate.parents()[0]
1856 1856 if len(heads) == 1:
1857 1857 msg = _('there is nothing to merge')
1858 1858 if parent != repo.lookup(repo.workingctx().branch()):
1859 1859 msg = _('%s - use "hg update" instead') % msg
1860 1860 raise util.Abort(msg)
1861 1861
1862 1862 if parent not in heads:
1863 1863 raise util.Abort(_('working dir not at a head rev - '
1864 1864 'use "hg update" or merge with an explicit rev'))
1865 1865 node = parent == heads[0] and heads[-1] or heads[0]
1866 1866 return hg.merge(repo, node, force=force)
1867 1867
1868 1868 def outgoing(ui, repo, dest=None, **opts):
1869 1869 """show changesets not found in destination
1870 1870
1871 1871 Show changesets not found in the specified destination repository or
1872 1872 the default push location. These are the changesets that would be pushed
1873 1873 if a push was requested.
1874 1874
1875 1875 See pull for valid destination format details.
1876 1876 """
1877 1877 limit = cmdutil.loglimit(opts)
1878 1878 dest, revs, checkout = hg.parseurl(
1879 1879 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1880 1880 cmdutil.setremoteconfig(ui, opts)
1881 1881 if revs:
1882 1882 revs = [repo.lookup(rev) for rev in revs]
1883 1883
1884 1884 other = hg.repository(ui, dest)
1885 1885 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
1886 1886 o = repo.findoutgoing(other, force=opts['force'])
1887 1887 if not o:
1888 1888 ui.status(_("no changes found\n"))
1889 1889 return 1
1890 1890 o = repo.changelog.nodesbetween(o, revs)[0]
1891 1891 if opts['newest_first']:
1892 1892 o.reverse()
1893 1893 displayer = cmdutil.show_changeset(ui, repo, opts)
1894 1894 count = 0
1895 1895 for n in o:
1896 1896 if count >= limit:
1897 1897 break
1898 1898 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1899 1899 if opts['no_merges'] and len(parents) == 2:
1900 1900 continue
1901 1901 count += 1
1902 1902 displayer.show(changenode=n)
1903 1903
1904 1904 def parents(ui, repo, file_=None, **opts):
1905 1905 """show the parents of the working dir or revision
1906 1906
1907 1907 Print the working directory's parent revisions. If a
1908 1908 revision is given via --rev, the parent of that revision
1909 1909 will be printed. If a file argument is given, revision in
1910 1910 which the file was last changed (before the working directory
1911 1911 revision or the argument to --rev if given) is printed.
1912 1912 """
1913 1913 rev = opts.get('rev')
1914 1914 if rev:
1915 1915 ctx = repo.changectx(rev)
1916 1916 else:
1917 1917 ctx = repo.workingctx()
1918 1918
1919 1919 if file_:
1920 1920 files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
1921 1921 if anypats or len(files) != 1:
1922 1922 raise util.Abort(_('can only specify an explicit file name'))
1923 1923 file_ = files[0]
1924 1924 filenodes = []
1925 1925 for cp in ctx.parents():
1926 1926 if not cp:
1927 1927 continue
1928 1928 try:
1929 1929 filenodes.append(cp.filenode(file_))
1930 1930 except revlog.LookupError:
1931 1931 pass
1932 1932 if not filenodes:
1933 1933 raise util.Abort(_("'%s' not found in manifest!") % file_)
1934 1934 fl = repo.file(file_)
1935 1935 p = [repo.lookup(fl.linkrev(fn)) for fn in filenodes]
1936 1936 else:
1937 1937 p = [cp.node() for cp in ctx.parents()]
1938 1938
1939 1939 displayer = cmdutil.show_changeset(ui, repo, opts)
1940 1940 for n in p:
1941 1941 if n != nullid:
1942 1942 displayer.show(changenode=n)
1943 1943
1944 1944 def paths(ui, repo, search=None):
1945 1945 """show definition of symbolic path names
1946 1946
1947 1947 Show definition of symbolic path name NAME. If no name is given, show
1948 1948 definition of available names.
1949 1949
1950 1950 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1951 1951 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1952 1952 """
1953 1953 if search:
1954 1954 for name, path in ui.configitems("paths"):
1955 1955 if name == search:
1956 1956 ui.write("%s\n" % path)
1957 1957 return
1958 1958 ui.warn(_("not found!\n"))
1959 1959 return 1
1960 1960 else:
1961 1961 for name, path in ui.configitems("paths"):
1962 1962 ui.write("%s = %s\n" % (name, path))
1963 1963
1964 1964 def postincoming(ui, repo, modheads, optupdate, checkout):
1965 1965 if modheads == 0:
1966 1966 return
1967 1967 if optupdate:
1968 1968 if modheads <= 1 or checkout:
1969 1969 return hg.update(repo, checkout)
1970 1970 else:
1971 1971 ui.status(_("not updating, since new heads added\n"))
1972 1972 if modheads > 1:
1973 1973 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1974 1974 else:
1975 1975 ui.status(_("(run 'hg update' to get a working copy)\n"))
1976 1976
1977 1977 def pull(ui, repo, source="default", **opts):
1978 1978 """pull changes from the specified source
1979 1979
1980 1980 Pull changes from a remote repository to a local one.
1981 1981
1982 1982 This finds all changes from the repository at the specified path
1983 1983 or URL and adds them to the local repository. By default, this
1984 1984 does not update the copy of the project in the working directory.
1985 1985
1986 1986 Valid URLs are of the form:
1987 1987
1988 1988 local/filesystem/path (or file://local/filesystem/path)
1989 1989 http://[user@]host[:port]/[path]
1990 1990 https://[user@]host[:port]/[path]
1991 1991 ssh://[user@]host[:port]/[path]
1992 1992 static-http://host[:port]/[path]
1993 1993
1994 1994 Paths in the local filesystem can either point to Mercurial
1995 1995 repositories or to bundle files (as created by 'hg bundle' or
1996 1996 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1997 1997 allows access to a Mercurial repository where you simply use a web
1998 1998 server to publish the .hg directory as static content.
1999 1999
2000 2000 An optional identifier after # indicates a particular branch, tag,
2001 2001 or changeset to pull.
2002 2002
2003 2003 Some notes about using SSH with Mercurial:
2004 2004 - SSH requires an accessible shell account on the destination machine
2005 2005 and a copy of hg in the remote path or specified with as remotecmd.
2006 2006 - path is relative to the remote user's home directory by default.
2007 2007 Use an extra slash at the start of a path to specify an absolute path:
2008 2008 ssh://example.com//tmp/repository
2009 2009 - Mercurial doesn't use its own compression via SSH; the right thing
2010 2010 to do is to configure it in your ~/.ssh/config, e.g.:
2011 2011 Host *.mylocalnetwork.example.com
2012 2012 Compression no
2013 2013 Host *
2014 2014 Compression yes
2015 2015 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2016 2016 with the --ssh command line option.
2017 2017 """
2018 2018 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
2019 2019 cmdutil.setremoteconfig(ui, opts)
2020 2020
2021 2021 other = hg.repository(ui, source)
2022 2022 ui.status(_('pulling from %s\n') % util.hidepassword(source))
2023 2023 if revs:
2024 2024 try:
2025 2025 revs = [other.lookup(rev) for rev in revs]
2026 2026 except repo.NoCapability:
2027 2027 error = _("Other repository doesn't support revision lookup, "
2028 2028 "so a rev cannot be specified.")
2029 2029 raise util.Abort(error)
2030 2030
2031 2031 modheads = repo.pull(other, heads=revs, force=opts['force'])
2032 2032 return postincoming(ui, repo, modheads, opts['update'], checkout)
2033 2033
2034 2034 def push(ui, repo, dest=None, **opts):
2035 2035 """push changes to the specified destination
2036 2036
2037 2037 Push changes from the local repository to the given destination.
2038 2038
2039 2039 This is the symmetrical operation for pull. It helps to move
2040 2040 changes from the current repository to a different one. If the
2041 2041 destination is local this is identical to a pull in that directory
2042 2042 from the current one.
2043 2043
2044 2044 By default, push will refuse to run if it detects the result would
2045 2045 increase the number of remote heads. This generally indicates the
2046 2046 the client has forgotten to sync and merge before pushing.
2047 2047
2048 2048 Valid URLs are of the form:
2049 2049
2050 2050 local/filesystem/path (or file://local/filesystem/path)
2051 2051 ssh://[user@]host[:port]/[path]
2052 2052 http://[user@]host[:port]/[path]
2053 2053 https://[user@]host[:port]/[path]
2054 2054
2055 2055 An optional identifier after # indicates a particular branch, tag,
2056 2056 or changeset to push.
2057 2057
2058 2058 Look at the help text for the pull command for important details
2059 2059 about ssh:// URLs.
2060 2060
2061 2061 Pushing to http:// and https:// URLs is only possible, if this
2062 2062 feature is explicitly enabled on the remote Mercurial server.
2063 2063 """
2064 2064 dest, revs, checkout = hg.parseurl(
2065 2065 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2066 2066 cmdutil.setremoteconfig(ui, opts)
2067 2067
2068 2068 other = hg.repository(ui, dest)
2069 2069 ui.status('pushing to %s\n' % util.hidepassword(dest))
2070 2070 if revs:
2071 2071 revs = [repo.lookup(rev) for rev in revs]
2072 2072 r = repo.push(other, opts['force'], revs=revs)
2073 2073 return r == 0
2074 2074
2075 2075 def rawcommit(ui, repo, *pats, **opts):
2076 2076 """raw commit interface (DEPRECATED)
2077 2077
2078 2078 (DEPRECATED)
2079 2079 Lowlevel commit, for use in helper scripts.
2080 2080
2081 2081 This command is not intended to be used by normal users, as it is
2082 2082 primarily useful for importing from other SCMs.
2083 2083
2084 2084 This command is now deprecated and will be removed in a future
2085 2085 release, please use debugsetparents and commit instead.
2086 2086 """
2087 2087
2088 2088 ui.warn(_("(the rawcommit command is deprecated)\n"))
2089 2089
2090 2090 message = cmdutil.logmessage(opts)
2091 2091
2092 2092 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2093 2093 if opts['files']:
2094 2094 files += open(opts['files']).read().splitlines()
2095 2095
2096 2096 parents = [repo.lookup(p) for p in opts['parent']]
2097 2097
2098 2098 try:
2099 2099 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2100 2100 except ValueError, inst:
2101 2101 raise util.Abort(str(inst))
2102 2102
2103 2103 def recover(ui, repo):
2104 2104 """roll back an interrupted transaction
2105 2105
2106 2106 Recover from an interrupted commit or pull.
2107 2107
2108 2108 This command tries to fix the repository status after an interrupted
2109 2109 operation. It should only be necessary when Mercurial suggests it.
2110 2110 """
2111 2111 if repo.recover():
2112 2112 return hg.verify(repo)
2113 2113 return 1
2114 2114
2115 2115 def remove(ui, repo, *pats, **opts):
2116 2116 """remove the specified files on the next commit
2117 2117
2118 2118 Schedule the indicated files for removal from the repository.
2119 2119
2120 2120 This only removes files from the current branch, not from the
2121 2121 entire project history. If the files still exist in the working
2122 2122 directory, they will be deleted from it. If invoked with --after,
2123 2123 files are marked as removed, but not actually unlinked unless --force
2124 2124 is also given. Without exact file names, --after will only mark
2125 2125 files as removed if they are no longer in the working directory.
2126 2126
2127 2127 This command schedules the files to be removed at the next commit.
2128 2128 To undo a remove before that, see hg revert.
2129 2129
2130 2130 Modified files and added files are not removed by default. To
2131 2131 remove them, use the -f/--force option.
2132 2132 """
2133 2133 if not opts['after'] and not pats:
2134 2134 raise util.Abort(_('no files specified'))
2135 2135 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2136 2136 exact = dict.fromkeys(files)
2137 2137 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2138 2138 modified, added, removed, deleted, unknown = mardu
2139 2139 remove, forget = [], []
2140 2140 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2141 2141 reason = None
2142 2142 if abs in modified and not opts['force']:
2143 2143 reason = _('is modified (use -f to force removal)')
2144 2144 elif abs in added:
2145 2145 if opts['force']:
2146 2146 forget.append(abs)
2147 2147 continue
2148 2148 reason = _('has been marked for add (use -f to force removal)')
2149 2149 exact = 1 # force the message
2150 2150 elif abs not in repo.dirstate:
2151 2151 reason = _('is not managed')
2152 2152 elif opts['after'] and not exact and abs not in deleted:
2153 2153 continue
2154 2154 elif abs in removed:
2155 2155 continue
2156 2156 if reason:
2157 2157 if exact:
2158 2158 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2159 2159 else:
2160 2160 if ui.verbose or not exact:
2161 2161 ui.status(_('removing %s\n') % rel)
2162 2162 remove.append(abs)
2163 2163 repo.forget(forget)
2164 2164 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2165 2165
2166 2166 def rename(ui, repo, *pats, **opts):
2167 2167 """rename files; equivalent of copy + remove
2168 2168
2169 2169 Mark dest as copies of sources; mark sources for deletion. If
2170 2170 dest is a directory, copies are put in that directory. If dest is
2171 2171 a file, there can only be one source.
2172 2172
2173 2173 By default, this command copies the contents of files as they
2174 2174 stand in the working directory. If invoked with --after, the
2175 2175 operation is recorded, but no copying is performed.
2176 2176
2177 2177 This command takes effect in the next commit. To undo a rename
2178 2178 before that, see hg revert.
2179 2179 """
2180 2180 wlock = repo.wlock(False)
2181 2181 try:
2182 2182 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2183 2183 finally:
2184 2184 del wlock
2185 2185
2186 2186 def revert(ui, repo, *pats, **opts):
2187 2187 """restore individual files or dirs to an earlier state
2188 2188
2189 2189 (use update -r to check out earlier revisions, revert does not
2190 2190 change the working dir parents)
2191 2191
2192 2192 With no revision specified, revert the named files or directories
2193 2193 to the contents they had in the parent of the working directory.
2194 2194 This restores the contents of the affected files to an unmodified
2195 2195 state and unschedules adds, removes, copies, and renames. If the
2196 2196 working directory has two parents, you must explicitly specify the
2197 2197 revision to revert to.
2198 2198
2199 2199 Using the -r option, revert the given files or directories to their
2200 2200 contents as of a specific revision. This can be helpful to "roll
2201 2201 back" some or all of an earlier change.
2202 2202 See 'hg help dates' for a list of formats valid for -d/--date.
2203 2203
2204 2204 Revert modifies the working directory. It does not commit any
2205 2205 changes, or change the parent of the working directory. If you
2206 2206 revert to a revision other than the parent of the working
2207 2207 directory, the reverted files will thus appear modified
2208 2208 afterwards.
2209 2209
2210 2210 If a file has been deleted, it is restored. If the executable
2211 2211 mode of a file was changed, it is reset.
2212 2212
2213 2213 If names are given, all files matching the names are reverted.
2214 2214 If no arguments are given, no files are reverted.
2215 2215
2216 2216 Modified files are saved with a .orig suffix before reverting.
2217 2217 To disable these backups, use --no-backup.
2218 2218 """
2219 2219
2220 2220 if opts["date"]:
2221 2221 if opts["rev"]:
2222 2222 raise util.Abort(_("you can't specify a revision and a date"))
2223 2223 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2224 2224
2225 2225 if not pats and not opts['all']:
2226 2226 raise util.Abort(_('no files or directories specified; '
2227 2227 'use --all to revert the whole repo'))
2228 2228
2229 2229 parent, p2 = repo.dirstate.parents()
2230 2230 if not opts['rev'] and p2 != nullid:
2231 2231 raise util.Abort(_('uncommitted merge - please provide a '
2232 2232 'specific revision'))
2233 2233 ctx = repo.changectx(opts['rev'])
2234 2234 node = ctx.node()
2235 2235 mf = ctx.manifest()
2236 2236 if node == parent:
2237 2237 pmf = mf
2238 2238 else:
2239 2239 pmf = None
2240 2240
2241 2241 # need all matching names in dirstate and manifest of target rev,
2242 2242 # so have to walk both. do not print errors if files exist in one
2243 2243 # but not other.
2244 2244
2245 2245 names = {}
2246 2246
2247 2247 wlock = repo.wlock()
2248 2248 try:
2249 2249 # walk dirstate.
2250 2250 files = []
2251 2251 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2252 2252 badmatch=mf.has_key):
2253 2253 names[abs] = (rel, exact)
2254 2254 if src != 'b':
2255 2255 files.append(abs)
2256 2256
2257 2257 # walk target manifest.
2258 2258
2259 2259 def badmatch(path):
2260 2260 if path in names:
2261 2261 return True
2262 2262 path_ = path + '/'
2263 2263 for f in names:
2264 2264 if f.startswith(path_):
2265 2265 return True
2266 2266 return False
2267 2267
2268 2268 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2269 2269 badmatch=badmatch):
2270 2270 if abs in names or src == 'b':
2271 2271 continue
2272 2272 names[abs] = (rel, exact)
2273 2273
2274 2274 changes = repo.status(files=files, match=names.has_key)[:4]
2275 2275 modified, added, removed, deleted = map(dict.fromkeys, changes)
2276 2276
2277 2277 # if f is a rename, also revert the source
2278 2278 cwd = repo.getcwd()
2279 2279 for f in added:
2280 2280 src = repo.dirstate.copied(f)
2281 2281 if src and src not in names and repo.dirstate[src] == 'r':
2282 2282 removed[src] = None
2283 2283 names[src] = (repo.pathto(src, cwd), True)
2284 2284
2285 2285 def removeforget(abs):
2286 2286 if repo.dirstate[abs] == 'a':
2287 2287 return _('forgetting %s\n')
2288 2288 return _('removing %s\n')
2289 2289
2290 2290 revert = ([], _('reverting %s\n'))
2291 2291 add = ([], _('adding %s\n'))
2292 2292 remove = ([], removeforget)
2293 2293 undelete = ([], _('undeleting %s\n'))
2294 2294
2295 2295 disptable = (
2296 2296 # dispatch table:
2297 2297 # file state
2298 2298 # action if in target manifest
2299 2299 # action if not in target manifest
2300 2300 # make backup if in target manifest
2301 2301 # make backup if not in target manifest
2302 2302 (modified, revert, remove, True, True),
2303 2303 (added, revert, remove, True, False),
2304 2304 (removed, undelete, None, False, False),
2305 2305 (deleted, revert, remove, False, False),
2306 2306 )
2307 2307
2308 2308 entries = names.items()
2309 2309 entries.sort()
2310 2310
2311 2311 for abs, (rel, exact) in entries:
2312 2312 mfentry = mf.get(abs)
2313 2313 target = repo.wjoin(abs)
2314 2314 def handle(xlist, dobackup):
2315 2315 xlist[0].append(abs)
2316 2316 if dobackup and not opts['no_backup'] and util.lexists(target):
2317 2317 bakname = "%s.orig" % rel
2318 2318 ui.note(_('saving current version of %s as %s\n') %
2319 2319 (rel, bakname))
2320 2320 if not opts.get('dry_run'):
2321 2321 util.copyfile(target, bakname)
2322 2322 if ui.verbose or not exact:
2323 2323 msg = xlist[1]
2324 2324 if not isinstance(msg, basestring):
2325 2325 msg = msg(abs)
2326 2326 ui.status(msg % rel)
2327 2327 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2328 2328 if abs not in table: continue
2329 2329 # file has changed in dirstate
2330 2330 if mfentry:
2331 2331 handle(hitlist, backuphit)
2332 2332 elif misslist is not None:
2333 2333 handle(misslist, backupmiss)
2334 2334 break
2335 2335 else:
2336 2336 if abs not in repo.dirstate:
2337 2337 if mfentry:
2338 2338 handle(add, True)
2339 2339 elif exact:
2340 2340 ui.warn(_('file not managed: %s\n') % rel)
2341 2341 continue
2342 2342 # file has not changed in dirstate
2343 2343 if node == parent:
2344 2344 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2345 2345 continue
2346 2346 if pmf is None:
2347 2347 # only need parent manifest in this unlikely case,
2348 2348 # so do not read by default
2349 2349 pmf = repo.changectx(parent).manifest()
2350 2350 if abs in pmf:
2351 2351 if mfentry:
2352 2352 # if version of file is same in parent and target
2353 2353 # manifests, do nothing
2354 2354 if (pmf[abs] != mfentry or
2355 2355 pmf.flags(abs) != mf.flags(abs)):
2356 2356 handle(revert, False)
2357 2357 else:
2358 2358 handle(remove, False)
2359 2359
2360 2360 if not opts.get('dry_run'):
2361 2361 def checkout(f):
2362 2362 fc = ctx[f]
2363 2363 repo.wwrite(f, fc.data(), fc.fileflags())
2364 2364
2365 2365 audit_path = util.path_auditor(repo.root)
2366 2366 for f in remove[0]:
2367 2367 if repo.dirstate[f] == 'a':
2368 2368 repo.dirstate.forget(f)
2369 2369 continue
2370 2370 audit_path(f)
2371 2371 try:
2372 2372 util.unlink(repo.wjoin(f))
2373 2373 except OSError:
2374 2374 pass
2375 2375 repo.dirstate.remove(f)
2376 2376
2377 2377 for f in revert[0]:
2378 2378 checkout(f)
2379 2379
2380 2380 for f in add[0]:
2381 2381 checkout(f)
2382 2382 repo.dirstate.add(f)
2383 2383
2384 2384 normal = repo.dirstate.normallookup
2385 2385 if node == parent and p2 == nullid:
2386 2386 normal = repo.dirstate.normal
2387 2387 for f in undelete[0]:
2388 2388 checkout(f)
2389 2389 normal(f)
2390 2390
2391 2391 finally:
2392 2392 del wlock
2393 2393
2394 2394 def rollback(ui, repo):
2395 2395 """roll back the last transaction
2396 2396
2397 2397 This command should be used with care. There is only one level of
2398 2398 rollback, and there is no way to undo a rollback. It will also
2399 2399 restore the dirstate at the time of the last transaction, losing
2400 2400 any dirstate changes since that time.
2401 2401
2402 2402 Transactions are used to encapsulate the effects of all commands
2403 2403 that create new changesets or propagate existing changesets into a
2404 2404 repository. For example, the following commands are transactional,
2405 2405 and their effects can be rolled back:
2406 2406
2407 2407 commit
2408 2408 import
2409 2409 pull
2410 2410 push (with this repository as destination)
2411 2411 unbundle
2412 2412
2413 2413 This command is not intended for use on public repositories. Once
2414 2414 changes are visible for pull by other users, rolling a transaction
2415 2415 back locally is ineffective (someone else may already have pulled
2416 2416 the changes). Furthermore, a race is possible with readers of the
2417 2417 repository; for example an in-progress pull from the repository
2418 2418 may fail if a rollback is performed.
2419 2419 """
2420 2420 repo.rollback()
2421 2421
2422 2422 def root(ui, repo):
2423 2423 """print the root (top) of the current working dir
2424 2424
2425 2425 Print the root directory of the current repository.
2426 2426 """
2427 2427 ui.write(repo.root + "\n")
2428 2428
2429 2429 def serve(ui, repo, **opts):
2430 2430 """export the repository via HTTP
2431 2431
2432 2432 Start a local HTTP repository browser and pull server.
2433 2433
2434 2434 By default, the server logs accesses to stdout and errors to
2435 2435 stderr. Use the "-A" and "-E" options to log to files.
2436 2436 """
2437 2437
2438 2438 if opts["stdio"]:
2439 2439 if repo is None:
2440 2440 raise hg.RepoError(_("There is no Mercurial repository here"
2441 2441 " (.hg not found)"))
2442 2442 s = sshserver.sshserver(ui, repo)
2443 2443 s.serve_forever()
2444 2444
2445 2445 parentui = ui.parentui or ui
2446 2446 optlist = ("name templates style address port prefix ipv6"
2447 2447 " accesslog errorlog webdir_conf certificate")
2448 2448 for o in optlist.split():
2449 2449 if opts[o]:
2450 2450 parentui.setconfig("web", o, str(opts[o]))
2451 2451 if (repo is not None) and (repo.ui != parentui):
2452 2452 repo.ui.setconfig("web", o, str(opts[o]))
2453 2453
2454 2454 if repo is None and not ui.config("web", "webdir_conf"):
2455 2455 raise hg.RepoError(_("There is no Mercurial repository here"
2456 2456 " (.hg not found)"))
2457 2457
2458 2458 class service:
2459 2459 def init(self):
2460 2460 util.set_signal_handler()
2461 2461 try:
2462 2462 self.httpd = hgweb.server.create_server(parentui, repo)
2463 2463 except socket.error, inst:
2464 2464 raise util.Abort(_('cannot start server: ') + inst.args[1])
2465 2465
2466 2466 if not ui.verbose: return
2467 2467
2468 2468 if self.httpd.prefix:
2469 2469 prefix = self.httpd.prefix.strip('/') + '/'
2470 2470 else:
2471 2471 prefix = ''
2472 2472
2473 2473 if self.httpd.port != 80:
2474 2474 ui.status(_('listening at http://%s:%d/%s\n') %
2475 2475 (self.httpd.addr, self.httpd.port, prefix))
2476 2476 else:
2477 2477 ui.status(_('listening at http://%s/%s\n') %
2478 2478 (self.httpd.addr, prefix))
2479 2479
2480 2480 def run(self):
2481 2481 self.httpd.serve_forever()
2482 2482
2483 2483 service = service()
2484 2484
2485 2485 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2486 2486
2487 2487 def status(ui, repo, *pats, **opts):
2488 2488 """show changed files in the working directory
2489 2489
2490 2490 Show status of files in the repository. If names are given, only
2491 2491 files that match are shown. Files that are clean or ignored or
2492 2492 source of a copy/move operation, are not listed unless -c (clean),
2493 2493 -i (ignored), -C (copies) or -A is given. Unless options described
2494 2494 with "show only ..." are given, the options -mardu are used.
2495 2495
2496 2496 Option -q/--quiet hides untracked (unknown and ignored) files
2497 2497 unless explicitly requested with -u/--unknown or -i/-ignored.
2498 2498
2499 2499 NOTE: status may appear to disagree with diff if permissions have
2500 2500 changed or a merge has occurred. The standard diff format does not
2501 2501 report permission changes and diff only reports changes relative
2502 2502 to one merge parent.
2503 2503
2504 2504 If one revision is given, it is used as the base revision.
2505 2505 If two revisions are given, the difference between them is shown.
2506 2506
2507 2507 The codes used to show the status of files are:
2508 2508 M = modified
2509 2509 A = added
2510 2510 R = removed
2511 2511 C = clean
2512 2512 ! = deleted, but still tracked
2513 2513 ? = not tracked
2514 2514 I = ignored
2515 2515 = the previous added file was copied from here
2516 2516 """
2517 2517
2518 2518 all = opts['all']
2519 2519 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2520 2520
2521 2521 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2522 2522 cwd = (pats and repo.getcwd()) or ''
2523 2523 modified, added, removed, deleted, unknown, ignored, clean = [
2524 2524 n for n in repo.status(node1=node1, node2=node2, files=files,
2525 2525 match=matchfn,
2526 2526 list_ignored=opts['ignored']
2527 2527 or all and not ui.quiet,
2528 2528 list_clean=opts['clean'] or all,
2529 2529 list_unknown=opts['unknown']
2530 2530 or not (ui.quiet or
2531 2531 opts['modified'] or
2532 2532 opts['added'] or
2533 2533 opts['removed'] or
2534 2534 opts['deleted'] or
2535 2535 opts['ignored']))]
2536 2536
2537 2537 changetypes = (('modified', 'M', modified),
2538 2538 ('added', 'A', added),
2539 2539 ('removed', 'R', removed),
2540 2540 ('deleted', '!', deleted),
2541 2541 ('unknown', '?', unknown),
2542 2542 ('ignored', 'I', ignored))
2543 2543
2544 2544 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2545 2545
2546 2546 end = opts['print0'] and '\0' or '\n'
2547 2547
2548 2548 for opt, char, changes in ([ct for ct in explicit_changetypes
2549 2549 if all or opts[ct[0]]]
2550 2550 or changetypes):
2551 2551
2552 2552 if opts['no_status']:
2553 2553 format = "%%s%s" % end
2554 2554 else:
2555 2555 format = "%s %%s%s" % (char, end)
2556 2556
2557 2557 for f in changes:
2558 2558 ui.write(format % repo.pathto(f, cwd))
2559 2559 if ((all or opts.get('copies')) and not opts.get('no_status')):
2560 2560 copied = repo.dirstate.copied(f)
2561 2561 if copied:
2562 2562 ui.write(' %s%s' % (repo.pathto(copied, cwd), end))
2563 2563
2564 2564 def tag(ui, repo, name, rev_=None, **opts):
2565 2565 """add a tag for the current or given revision
2566 2566
2567 2567 Name a particular revision using <name>.
2568 2568
2569 2569 Tags are used to name particular revisions of the repository and are
2570 2570 very useful to compare different revision, to go back to significant
2571 2571 earlier versions or to mark branch points as releases, etc.
2572 2572
2573 2573 If no revision is given, the parent of the working directory is used,
2574 2574 or tip if no revision is checked out.
2575 2575
2576 2576 To facilitate version control, distribution, and merging of tags,
2577 2577 they are stored as a file named ".hgtags" which is managed
2578 2578 similarly to other project files and can be hand-edited if
2579 2579 necessary. The file '.hg/localtags' is used for local tags (not
2580 2580 shared among repositories).
2581 2581
2582 2582 See 'hg help dates' for a list of formats valid for -d/--date.
2583 2583 """
2584 2584 if name in ['tip', '.', 'null']:
2585 2585 raise util.Abort(_("the name '%s' is reserved") % name)
2586 2586 if rev_ is not None:
2587 2587 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2588 2588 "please use 'hg tag [-r REV] NAME' instead\n"))
2589 2589 if opts['rev']:
2590 2590 raise util.Abort(_("use only one form to specify the revision"))
2591 2591 if opts['rev'] and opts['remove']:
2592 2592 raise util.Abort(_("--rev and --remove are incompatible"))
2593 2593 if opts['rev']:
2594 2594 rev_ = opts['rev']
2595 2595 message = opts['message']
2596 2596 if opts['remove']:
2597 2597 tagtype = repo.tagtype(name)
2598 2598
2599 2599 if not tagtype:
2600 2600 raise util.Abort(_('tag %s does not exist') % name)
2601 2601 if opts['local'] and tagtype == 'global':
2602 2602 raise util.Abort(_('%s tag is global') % name)
2603 2603 if not opts['local'] and tagtype == 'local':
2604 2604 raise util.Abort(_('%s tag is local') % name)
2605 2605
2606 2606 rev_ = nullid
2607 2607 if not message:
2608 2608 message = _('Removed tag %s') % name
2609 2609 elif name in repo.tags() and not opts['force']:
2610 2610 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2611 2611 % name)
2612 2612 if not rev_ and repo.dirstate.parents()[1] != nullid:
2613 2613 raise util.Abort(_('uncommitted merge - please provide a '
2614 2614 'specific revision'))
2615 2615 r = repo.changectx(rev_).node()
2616 2616
2617 2617 if not message:
2618 2618 message = _('Added tag %s for changeset %s') % (name, short(r))
2619 2619
2620 2620 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2621 2621
2622 2622 def tags(ui, repo):
2623 2623 """list repository tags
2624 2624
2625 2625 List the repository tags.
2626 2626
2627 2627 This lists both regular and local tags. When the -v/--verbose switch
2628 2628 is used, a third column "local" is printed for local tags.
2629 2629 """
2630 2630
2631 2631 l = repo.tagslist()
2632 2632 l.reverse()
2633 2633 hexfunc = ui.debugflag and hex or short
2634 2634 tagtype = ""
2635 2635
2636 2636 for t, n in l:
2637 2637 if ui.quiet:
2638 2638 ui.write("%s\n" % t)
2639 2639 continue
2640 2640
2641 2641 try:
2642 2642 hn = hexfunc(n)
2643 2643 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2644 2644 except revlog.LookupError:
2645 2645 r = " ?:%s" % hn
2646 2646 else:
2647 2647 spaces = " " * (30 - util.locallen(t))
2648 2648 if ui.verbose:
2649 2649 if repo.tagtype(t) == 'local':
2650 2650 tagtype = " local"
2651 2651 else:
2652 2652 tagtype = ""
2653 2653 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2654 2654
2655 2655 def tip(ui, repo, **opts):
2656 2656 """show the tip revision
2657 2657
2658 2658 Show the tip revision.
2659 2659 """
2660 2660 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2661 2661
2662 2662 def unbundle(ui, repo, fname1, *fnames, **opts):
2663 2663 """apply one or more changegroup files
2664 2664
2665 2665 Apply one or more compressed changegroup files generated by the
2666 2666 bundle command.
2667 2667 """
2668 2668 fnames = (fname1,) + fnames
2669 2669
2670 2670 lock = None
2671 2671 try:
2672 2672 lock = repo.lock()
2673 2673 for fname in fnames:
2674 2674 if os.path.exists(fname):
2675 2675 f = open(fname, "rb")
2676 2676 else:
2677 2677 f = urllib.urlopen(fname)
2678 2678 gen = changegroup.readbundle(f, fname)
2679 2679 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2680 2680 finally:
2681 2681 del lock
2682 2682
2683 2683 return postincoming(ui, repo, modheads, opts['update'], None)
2684 2684
2685 2685 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2686 2686 """update working directory
2687 2687
2688 2688 Update the working directory to the specified revision, or the
2689 2689 tip of the current branch if none is specified.
2690 2690 See 'hg help dates' for a list of formats valid for -d/--date.
2691 2691
2692 2692 If there are no outstanding changes in the working directory and
2693 2693 there is a linear relationship between the current version and the
2694 2694 requested version, the result is the requested version.
2695 2695
2696 2696 To merge the working directory with another revision, use the
2697 2697 merge command.
2698 2698
2699 2699 By default, update will refuse to run if doing so would require
2700 2700 discarding local changes.
2701 2701 """
2702 2702 if rev and node:
2703 2703 raise util.Abort(_("please specify just one revision"))
2704 2704
2705 2705 if not rev:
2706 2706 rev = node
2707 2707
2708 2708 if date:
2709 2709 if rev:
2710 2710 raise util.Abort(_("you can't specify a revision and a date"))
2711 2711 rev = cmdutil.finddate(ui, repo, date)
2712 2712
2713 2713 if clean:
2714 2714 return hg.clean(repo, rev)
2715 2715 else:
2716 2716 return hg.update(repo, rev)
2717 2717
2718 2718 def verify(ui, repo):
2719 2719 """verify the integrity of the repository
2720 2720
2721 2721 Verify the integrity of the current repository.
2722 2722
2723 2723 This will perform an extensive check of the repository's
2724 2724 integrity, validating the hashes and checksums of each entry in
2725 2725 the changelog, manifest, and tracked files, as well as the
2726 2726 integrity of their crosslinks and indices.
2727 2727 """
2728 2728 return hg.verify(repo)
2729 2729
2730 2730 def version_(ui):
2731 2731 """output version and copyright information"""
2732 2732 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2733 2733 % version.get_version())
2734 2734 ui.status(_(
2735 2735 "\nCopyright (C) 2005-2008 Matt Mackall <mpm@selenic.com> and others\n"
2736 2736 "This is free software; see the source for copying conditions. "
2737 2737 "There is NO\nwarranty; "
2738 2738 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2739 2739 ))
2740 2740
2741 2741 # Command options and aliases are listed here, alphabetically
2742 2742
2743 2743 globalopts = [
2744 2744 ('R', 'repository', '',
2745 2745 _('repository root directory or symbolic path name')),
2746 2746 ('', 'cwd', '', _('change working directory')),
2747 2747 ('y', 'noninteractive', None,
2748 2748 _('do not prompt, assume \'yes\' for any required answers')),
2749 2749 ('q', 'quiet', None, _('suppress output')),
2750 2750 ('v', 'verbose', None, _('enable additional output')),
2751 2751 ('', 'config', [], _('set/override config option')),
2752 2752 ('', 'debug', None, _('enable debugging output')),
2753 2753 ('', 'debugger', None, _('start debugger')),
2754 2754 ('', 'encoding', util._encoding, _('set the charset encoding')),
2755 2755 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2756 2756 ('', 'lsprof', None, _('print improved command execution profile')),
2757 2757 ('', 'traceback', None, _('print traceback on exception')),
2758 2758 ('', 'time', None, _('time how long the command takes')),
2759 2759 ('', 'profile', None, _('print command execution profile')),
2760 2760 ('', 'version', None, _('output version information and exit')),
2761 2761 ('h', 'help', None, _('display help and exit')),
2762 2762 ]
2763 2763
2764 2764 dryrunopts = [('n', 'dry-run', None,
2765 2765 _('do not perform actions, just print output'))]
2766 2766
2767 2767 remoteopts = [
2768 2768 ('e', 'ssh', '', _('specify ssh command to use')),
2769 2769 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2770 2770 ]
2771 2771
2772 2772 walkopts = [
2773 2773 ('I', 'include', [], _('include names matching the given patterns')),
2774 2774 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2775 2775 ]
2776 2776
2777 2777 commitopts = [
2778 2778 ('m', 'message', '', _('use <text> as commit message')),
2779 2779 ('l', 'logfile', '', _('read commit message from <file>')),
2780 2780 ]
2781 2781
2782 2782 commitopts2 = [
2783 2783 ('d', 'date', '', _('record datecode as commit date')),
2784 2784 ('u', 'user', '', _('record user as committer')),
2785 2785 ]
2786 2786
2787 2787 templateopts = [
2788 2788 ('', 'style', '', _('display using template map file')),
2789 2789 ('', 'template', '', _('display with template')),
2790 2790 ]
2791 2791
2792 2792 logopts = [
2793 2793 ('p', 'patch', None, _('show patch')),
2794 2794 ('l', 'limit', '', _('limit number of changes displayed')),
2795 2795 ('M', 'no-merges', None, _('do not show merges')),
2796 2796 ] + templateopts
2797 2797
2798 2798 table = {
2799 2799 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2800 2800 "addremove":
2801 2801 (addremove,
2802 2802 [('s', 'similarity', '',
2803 2803 _('guess renamed files by similarity (0<=s<=100)')),
2804 2804 ] + walkopts + dryrunopts,
2805 2805 _('hg addremove [OPTION]... [FILE]...')),
2806 2806 "^annotate|blame":
2807 2807 (annotate,
2808 2808 [('r', 'rev', '', _('annotate the specified revision')),
2809 2809 ('f', 'follow', None, _('follow file copies and renames')),
2810 2810 ('a', 'text', None, _('treat all files as text')),
2811 2811 ('u', 'user', None, _('list the author (long with -v)')),
2812 2812 ('d', 'date', None, _('list the date (short with -q)')),
2813 2813 ('n', 'number', None, _('list the revision number (default)')),
2814 2814 ('c', 'changeset', None, _('list the changeset')),
2815 2815 ('l', 'line-number', None,
2816 2816 _('show line number at the first appearance'))
2817 2817 ] + walkopts,
2818 2818 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2819 2819 "archive":
2820 2820 (archive,
2821 2821 [('', 'no-decode', None, _('do not pass files through decoders')),
2822 2822 ('p', 'prefix', '', _('directory prefix for files in archive')),
2823 2823 ('r', 'rev', '', _('revision to distribute')),
2824 2824 ('t', 'type', '', _('type of distribution to create')),
2825 2825 ] + walkopts,
2826 2826 _('hg archive [OPTION]... DEST')),
2827 2827 "backout":
2828 2828 (backout,
2829 2829 [('', 'merge', None,
2830 2830 _('merge with old dirstate parent after backout')),
2831 2831 ('', 'parent', '', _('parent to choose when backing out merge')),
2832 2832 ('r', 'rev', '', _('revision to backout')),
2833 2833 ] + walkopts + commitopts + commitopts2,
2834 2834 _('hg backout [OPTION]... [-r] REV')),
2835 2835 "bisect":
2836 2836 (bisect,
2837 2837 [('r', 'reset', False, _('reset bisect state')),
2838 2838 ('g', 'good', False, _('mark changeset good')),
2839 2839 ('b', 'bad', False, _('mark changeset bad')),
2840 2840 ('s', 'skip', False, _('skip testing changeset')),
2841 2841 ('U', 'noupdate', False, _('do not update to target'))],
2842 2842 _("hg bisect [-gbsr] [REV]")),
2843 2843 "branch":
2844 2844 (branch,
2845 2845 [('f', 'force', None,
2846 2846 _('set branch name even if it shadows an existing branch'))],
2847 2847 _('hg branch [-f] [NAME]')),
2848 2848 "branches":
2849 2849 (branches,
2850 2850 [('a', 'active', False,
2851 2851 _('show only branches that have unmerged heads'))],
2852 2852 _('hg branches [-a]')),
2853 2853 "bundle":
2854 2854 (bundle,
2855 2855 [('f', 'force', None,
2856 2856 _('run even when remote repository is unrelated')),
2857 2857 ('r', 'rev', [],
2858 2858 _('a changeset you would like to bundle')),
2859 2859 ('', 'base', [],
2860 2860 _('a base changeset to specify instead of a destination')),
2861 2861 ('a', 'all', None,
2862 2862 _('bundle all changesets in the repository')),
2863 2863 ] + remoteopts,
2864 2864 _('hg bundle [-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
2865 2865 "cat":
2866 2866 (cat,
2867 2867 [('o', 'output', '', _('print output to file with formatted name')),
2868 2868 ('r', 'rev', '', _('print the given revision')),
2869 2869 ('', 'decode', None, _('apply any matching decode filter')),
2870 2870 ] + walkopts,
2871 2871 _('hg cat [OPTION]... FILE...')),
2872 2872 "^clone":
2873 2873 (clone,
2874 2874 [('U', 'noupdate', None, _('do not update the new working directory')),
2875 2875 ('r', 'rev', [],
2876 2876 _('a changeset you would like to have after cloning')),
2877 2877 ('', 'pull', None, _('use pull protocol to copy metadata')),
2878 2878 ('', 'uncompressed', None,
2879 2879 _('use uncompressed transfer (fast over LAN)')),
2880 2880 ] + remoteopts,
2881 2881 _('hg clone [OPTION]... SOURCE [DEST]')),
2882 2882 "^commit|ci":
2883 2883 (commit,
2884 2884 [('A', 'addremove', None,
2885 2885 _('mark new/missing files as added/removed before committing')),
2886 2886 ] + walkopts + commitopts + commitopts2,
2887 2887 _('hg commit [OPTION]... [FILE]...')),
2888 2888 "copy|cp":
2889 2889 (copy,
2890 2890 [('A', 'after', None, _('record a copy that has already occurred')),
2891 2891 ('f', 'force', None,
2892 2892 _('forcibly copy over an existing managed file')),
2893 2893 ] + walkopts + dryrunopts,
2894 2894 _('hg copy [OPTION]... [SOURCE]... DEST')),
2895 2895 "debugancestor": (debugancestor, [],
2896 2896 _('hg debugancestor [INDEX] REV1 REV2')),
2897 2897 "debugcheckstate": (debugcheckstate, [], _('hg debugcheckstate')),
2898 2898 "debugcomplete":
2899 2899 (debugcomplete,
2900 2900 [('o', 'options', None, _('show the command options'))],
2901 2901 _('hg debugcomplete [-o] CMD')),
2902 2902 "debugdate":
2903 2903 (debugdate,
2904 2904 [('e', 'extended', None, _('try extended date formats'))],
2905 2905 _('hg debugdate [-e] DATE [RANGE]')),
2906 2906 "debugdata": (debugdata, [], _('hg debugdata FILE REV')),
2907 2907 "debugfsinfo": (debugfsinfo, [], _('hg debugfsinfo [PATH]')),
2908 2908 "debugindex": (debugindex, [], _('hg debugindex FILE')),
2909 2909 "debugindexdot": (debugindexdot, [], _('hg debugindexdot FILE')),
2910 2910 "debuginstall": (debuginstall, [], _('hg debuginstall')),
2911 2911 "debugrawcommit|rawcommit":
2912 2912 (rawcommit,
2913 2913 [('p', 'parent', [], _('parent')),
2914 2914 ('F', 'files', '', _('file list'))
2915 2915 ] + commitopts + commitopts2,
2916 2916 _('hg debugrawcommit [OPTION]... [FILE]...')),
2917 2917 "debugrebuildstate":
2918 2918 (debugrebuildstate,
2919 2919 [('r', 'rev', '', _('revision to rebuild to'))],
2920 2920 _('hg debugrebuildstate [-r REV] [REV]')),
2921 2921 "debugrename":
2922 2922 (debugrename,
2923 2923 [('r', 'rev', '', _('revision to debug'))],
2924 2924 _('hg debugrename [-r REV] FILE')),
2925 2925 "debugsetparents":
2926 2926 (debugsetparents,
2927 2927 [],
2928 2928 _('hg debugsetparents REV1 [REV2]')),
2929 2929 "debugstate": (debugstate, [], _('hg debugstate')),
2930 2930 "debugwalk": (debugwalk, walkopts, _('hg debugwalk [OPTION]... [FILE]...')),
2931 2931 "^diff":
2932 2932 (diff,
2933 2933 [('r', 'rev', [], _('revision')),
2934 2934 ('a', 'text', None, _('treat all files as text')),
2935 2935 ('p', 'show-function', None,
2936 2936 _('show which function each change is in')),
2937 2937 ('g', 'git', None, _('use git extended diff format')),
2938 2938 ('', 'nodates', None, _("don't include dates in diff headers")),
2939 2939 ('w', 'ignore-all-space', None,
2940 2940 _('ignore white space when comparing lines')),
2941 2941 ('b', 'ignore-space-change', None,
2942 2942 _('ignore changes in the amount of white space')),
2943 2943 ('B', 'ignore-blank-lines', None,
2944 2944 _('ignore changes whose lines are all blank')),
2945 2945 ('U', 'unified', 3,
2946 2946 _('number of lines of context to show'))
2947 2947 ] + walkopts,
2948 2948 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2949 2949 "^export":
2950 2950 (export,
2951 2951 [('o', 'output', '', _('print output to file with formatted name')),
2952 2952 ('a', 'text', None, _('treat all files as text')),
2953 2953 ('g', 'git', None, _('use git extended diff format')),
2954 2954 ('', 'nodates', None, _("don't include dates in diff headers")),
2955 2955 ('', 'switch-parent', None, _('diff against the second parent'))],
2956 2956 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2957 2957 "grep":
2958 2958 (grep,
2959 2959 [('0', 'print0', None, _('end fields with NUL')),
2960 2960 ('', 'all', None, _('print all revisions that match')),
2961 2961 ('f', 'follow', None,
2962 2962 _('follow changeset history, or file history across copies and renames')),
2963 2963 ('i', 'ignore-case', None, _('ignore case when matching')),
2964 2964 ('l', 'files-with-matches', None,
2965 2965 _('print only filenames and revs that match')),
2966 2966 ('n', 'line-number', None, _('print matching line numbers')),
2967 2967 ('r', 'rev', [], _('search in given revision range')),
2968 2968 ('u', 'user', None, _('list the author (long with -v)')),
2969 2969 ('d', 'date', None, _('list the date (short with -q)')),
2970 2970 ] + walkopts,
2971 2971 _('hg grep [OPTION]... PATTERN [FILE]...')),
2972 2972 "heads":
2973 2973 (heads,
2974 2974 [('r', 'rev', '', _('show only heads which are descendants of rev')),
2975 2975 ] + templateopts,
2976 2976 _('hg heads [-r REV] [REV]...')),
2977 2977 "help": (help_, [], _('hg help [COMMAND]')),
2978 2978 "identify|id":
2979 2979 (identify,
2980 2980 [('r', 'rev', '', _('identify the specified rev')),
2981 2981 ('n', 'num', None, _('show local revision number')),
2982 2982 ('i', 'id', None, _('show global revision id')),
2983 2983 ('b', 'branch', None, _('show branch')),
2984 2984 ('t', 'tags', None, _('show tags'))],
2985 2985 _('hg identify [-nibt] [-r REV] [SOURCE]')),
2986 2986 "import|patch":
2987 2987 (import_,
2988 2988 [('p', 'strip', 1,
2989 2989 _('directory strip option for patch. This has the same\n'
2990 2990 'meaning as the corresponding patch option')),
2991 2991 ('b', 'base', '', _('base path')),
2992 2992 ('f', 'force', None,
2993 2993 _('skip check for outstanding uncommitted changes')),
2994 2994 ('', 'no-commit', None, _("don't commit, just update the working directory")),
2995 2995 ('', 'exact', None,
2996 2996 _('apply patch to the nodes from which it was generated')),
2997 2997 ('', 'import-branch', None,
2998 2998 _('Use any branch information in patch (implied by --exact)'))] +
2999 2999 commitopts + commitopts2,
3000 3000 _('hg import [OPTION]... PATCH...')),
3001 3001 "incoming|in":
3002 3002 (incoming,
3003 3003 [('f', 'force', None,
3004 3004 _('run even when remote repository is unrelated')),
3005 3005 ('n', 'newest-first', None, _('show newest record first')),
3006 3006 ('', 'bundle', '', _('file to store the bundles into')),
3007 3007 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
3008 3008 ] + logopts + remoteopts,
3009 3009 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
3010 3010 ' [--bundle FILENAME] [SOURCE]')),
3011 3011 "^init":
3012 3012 (init,
3013 3013 remoteopts,
3014 3014 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
3015 3015 "locate":
3016 3016 (locate,
3017 3017 [('r', 'rev', '', _('search the repository as it stood at rev')),
3018 3018 ('0', 'print0', None,
3019 3019 _('end filenames with NUL, for use with xargs')),
3020 3020 ('f', 'fullpath', None,
3021 3021 _('print complete paths from the filesystem root')),
3022 3022 ] + walkopts,
3023 3023 _('hg locate [OPTION]... [PATTERN]...')),
3024 3024 "^log|history":
3025 3025 (log,
3026 3026 [('f', 'follow', None,
3027 3027 _('follow changeset history, or file history across copies and renames')),
3028 3028 ('', 'follow-first', None,
3029 3029 _('only follow the first parent of merge changesets')),
3030 3030 ('d', 'date', '', _('show revs matching date spec')),
3031 3031 ('C', 'copies', None, _('show copied files')),
3032 3032 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3033 3033 ('r', 'rev', [], _('show the specified revision or range')),
3034 3034 ('', 'removed', None, _('include revs where files were removed')),
3035 3035 ('m', 'only-merges', None, _('show only merges')),
3036 3036 ('b', 'only-branch', [],
3037 3037 _('show only changesets within the given named branch')),
3038 3038 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3039 3039 ] + logopts + walkopts,
3040 3040 _('hg log [OPTION]... [FILE]')),
3041 3041 "manifest":
3042 3042 (manifest,
3043 3043 [('r', 'rev', '', _('revision to display'))],
3044 3044 _('hg manifest [-r REV]')),
3045 3045 "^merge":
3046 3046 (merge,
3047 3047 [('f', 'force', None, _('force a merge with outstanding changes')),
3048 3048 ('r', 'rev', '', _('revision to merge')),
3049 3049 ],
3050 3050 _('hg merge [-f] [[-r] REV]')),
3051 3051 "outgoing|out":
3052 3052 (outgoing,
3053 3053 [('f', 'force', None,
3054 3054 _('run even when remote repository is unrelated')),
3055 3055 ('r', 'rev', [], _('a specific revision you would like to push')),
3056 3056 ('n', 'newest-first', None, _('show newest record first')),
3057 3057 ] + logopts + remoteopts,
3058 3058 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3059 3059 "^parents":
3060 3060 (parents,
3061 3061 [('r', 'rev', '', _('show parents from the specified rev')),
3062 3062 ] + templateopts,
3063 3063 _('hg parents [-r REV] [FILE]')),
3064 3064 "paths": (paths, [], _('hg paths [NAME]')),
3065 3065 "^pull":
3066 3066 (pull,
3067 3067 [('u', 'update', None,
3068 3068 _('update to new tip if changesets were pulled')),
3069 3069 ('f', 'force', None,
3070 3070 _('run even when remote repository is unrelated')),
3071 3071 ('r', 'rev', [],
3072 3072 _('a specific revision up to which you would like to pull')),
3073 3073 ] + remoteopts,
3074 3074 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3075 3075 "^push":
3076 3076 (push,
3077 3077 [('f', 'force', None, _('force push')),
3078 3078 ('r', 'rev', [], _('a specific revision you would like to push')),
3079 3079 ] + remoteopts,
3080 3080 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3081 3081 "recover": (recover, [], _('hg recover')),
3082 3082 "^remove|rm":
3083 3083 (remove,
3084 3084 [('A', 'after', None, _('record remove without deleting')),
3085 3085 ('f', 'force', None, _('remove file even if modified')),
3086 3086 ] + walkopts,
3087 3087 _('hg remove [OPTION]... FILE...')),
3088 3088 "rename|mv":
3089 3089 (rename,
3090 3090 [('A', 'after', None, _('record a rename that has already occurred')),
3091 3091 ('f', 'force', None,
3092 3092 _('forcibly copy over an existing managed file')),
3093 3093 ] + walkopts + dryrunopts,
3094 3094 _('hg rename [OPTION]... SOURCE... DEST')),
3095 3095 "revert":
3096 3096 (revert,
3097 3097 [('a', 'all', None, _('revert all changes when no arguments given')),
3098 3098 ('d', 'date', '', _('tipmost revision matching date')),
3099 3099 ('r', 'rev', '', _('revision to revert to')),
3100 3100 ('', 'no-backup', None, _('do not save backup copies of files')),
3101 3101 ] + walkopts + dryrunopts,
3102 3102 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3103 3103 "rollback": (rollback, [], _('hg rollback')),
3104 3104 "root": (root, [], _('hg root')),
3105 3105 "^serve":
3106 3106 (serve,
3107 3107 [('A', 'accesslog', '', _('name of access log file to write to')),
3108 3108 ('d', 'daemon', None, _('run server in background')),
3109 3109 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3110 3110 ('E', 'errorlog', '', _('name of error log file to write to')),
3111 3111 ('p', 'port', 0, _('port to use (default: 8000)')),
3112 3112 ('a', 'address', '', _('address to use')),
3113 3113 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3114 3114 ('n', 'name', '',
3115 3115 _('name to show in web pages (default: working dir)')),
3116 3116 ('', 'webdir-conf', '', _('name of the webdir config file'
3117 3117 ' (serve more than one repo)')),
3118 3118 ('', 'pid-file', '', _('name of file to write process ID to')),
3119 3119 ('', 'stdio', None, _('for remote clients')),
3120 3120 ('t', 'templates', '', _('web templates to use')),
3121 3121 ('', 'style', '', _('template style to use')),
3122 3122 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3123 3123 ('', 'certificate', '', _('SSL certificate file'))],
3124 3124 _('hg serve [OPTION]...')),
3125 3125 "showconfig|debugconfig":
3126 3126 (showconfig,
3127 3127 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3128 3128 _('hg showconfig [-u] [NAME]...')),
3129 3129 "^status|st":
3130 3130 (status,
3131 3131 [('A', 'all', None, _('show status of all files')),
3132 3132 ('m', 'modified', None, _('show only modified files')),
3133 3133 ('a', 'added', None, _('show only added files')),
3134 3134 ('r', 'removed', None, _('show only removed files')),
3135 3135 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3136 3136 ('c', 'clean', None, _('show only files without changes')),
3137 3137 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3138 3138 ('i', 'ignored', None, _('show only ignored files')),
3139 3139 ('n', 'no-status', None, _('hide status prefix')),
3140 3140 ('C', 'copies', None, _('show source of copied files')),
3141 3141 ('0', 'print0', None,
3142 3142 _('end filenames with NUL, for use with xargs')),
3143 3143 ('', 'rev', [], _('show difference from revision')),
3144 3144 ] + walkopts,
3145 3145 _('hg status [OPTION]... [FILE]...')),
3146 3146 "tag":
3147 3147 (tag,
3148 3148 [('f', 'force', None, _('replace existing tag')),
3149 3149 ('l', 'local', None, _('make the tag local')),
3150 3150 ('r', 'rev', '', _('revision to tag')),
3151 3151 ('', 'remove', None, _('remove a tag')),
3152 3152 # -l/--local is already there, commitopts cannot be used
3153 3153 ('m', 'message', '', _('use <text> as commit message')),
3154 3154 ] + commitopts2,
3155 3155 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3156 3156 "tags": (tags, [], _('hg tags')),
3157 3157 "tip":
3158 3158 (tip,
3159 3159 [('p', 'patch', None, _('show patch')),
3160 3160 ] + templateopts,
3161 3161 _('hg tip [-p]')),
3162 3162 "unbundle":
3163 3163 (unbundle,
3164 3164 [('u', 'update', None,
3165 3165 _('update to new tip if changesets were unbundled'))],
3166 3166 _('hg unbundle [-u] FILE...')),
3167 3167 "^update|up|checkout|co":
3168 3168 (update,
3169 3169 [('C', 'clean', None, _('overwrite locally modified files')),
3170 3170 ('d', 'date', '', _('tipmost revision matching date')),
3171 3171 ('r', 'rev', '', _('revision'))],
3172 3172 _('hg update [-C] [-d DATE] [[-r] REV]')),
3173 3173 "verify": (verify, [], _('hg verify')),
3174 3174 "version": (version_, [], _('hg version')),
3175 3175 }
3176 3176
3177 3177 norepo = ("clone init version help debugcomplete debugdata"
3178 3178 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3179 3179 optionalrepo = ("identify paths serve showconfig debugancestor")
@@ -1,620 +1,620 b''
1 1 # context.py - changeset and file context objects for mercurial
2 2 #
3 3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import nullid, nullrev, short
9 9 from i18n import _
10 import ancestor, bdiff, repo, revlog, util, os, errno
10 import ancestor, bdiff, revlog, util, os, errno
11 11
12 12 class changectx(object):
13 13 """A changecontext object makes access to data related to a particular
14 14 changeset convenient."""
15 15 def __init__(self, repo, changeid=None):
16 16 """changeid is a revision number, node, or tag"""
17 17 self._repo = repo
18 18
19 19 if not changeid and changeid != 0:
20 20 p1, p2 = self._repo.dirstate.parents()
21 21 self._rev = self._repo.changelog.rev(p1)
22 22 if self._rev == -1:
23 23 changeid = 'tip'
24 24 else:
25 25 self._node = p1
26 26 return
27 27
28 28 self._node = self._repo.lookup(changeid)
29 29 self._rev = self._repo.changelog.rev(self._node)
30 30
31 31 def __str__(self):
32 32 return short(self.node())
33 33
34 34 def __repr__(self):
35 35 return "<changectx %s>" % str(self)
36 36
37 37 def __eq__(self, other):
38 38 try:
39 39 return self._rev == other._rev
40 40 except AttributeError:
41 41 return False
42 42
43 43 def __ne__(self, other):
44 44 return not (self == other)
45 45
46 46 def __nonzero__(self):
47 47 return self._rev != nullrev
48 48
49 49 def __getattr__(self, name):
50 50 if name == '_changeset':
51 51 self._changeset = self._repo.changelog.read(self.node())
52 52 return self._changeset
53 53 elif name == '_manifest':
54 54 self._manifest = self._repo.manifest.read(self._changeset[0])
55 55 return self._manifest
56 56 elif name == '_manifestdelta':
57 57 md = self._repo.manifest.readdelta(self._changeset[0])
58 58 self._manifestdelta = md
59 59 return self._manifestdelta
60 60 else:
61 61 raise AttributeError, name
62 62
63 63 def __contains__(self, key):
64 64 return key in self._manifest
65 65
66 66 def __getitem__(self, key):
67 67 return self.filectx(key)
68 68
69 69 def __iter__(self):
70 70 a = self._manifest.keys()
71 71 a.sort()
72 72 for f in a:
73 73 yield f
74 74
75 75 def changeset(self): return self._changeset
76 76 def manifest(self): return self._manifest
77 77
78 78 def rev(self): return self._rev
79 79 def node(self): return self._node
80 80 def user(self): return self._changeset[1]
81 81 def date(self): return self._changeset[2]
82 82 def files(self): return self._changeset[3]
83 83 def description(self): return self._changeset[4]
84 84 def branch(self): return self._changeset[5].get("branch")
85 85 def extra(self): return self._changeset[5]
86 86 def tags(self): return self._repo.nodetags(self._node)
87 87
88 88 def parents(self):
89 89 """return contexts for each parent changeset"""
90 90 p = self._repo.changelog.parents(self._node)
91 91 return [changectx(self._repo, x) for x in p]
92 92
93 93 def children(self):
94 94 """return contexts for each child changeset"""
95 95 c = self._repo.changelog.children(self._node)
96 96 return [changectx(self._repo, x) for x in c]
97 97
98 98 def _fileinfo(self, path):
99 99 if '_manifest' in self.__dict__:
100 100 try:
101 101 return self._manifest[path], self._manifest.flags(path)
102 102 except KeyError:
103 103 raise revlog.LookupError(path, _("'%s' not found in manifest") % path)
104 104 if '_manifestdelta' in self.__dict__ or path in self.files():
105 105 if path in self._manifestdelta:
106 106 return self._manifestdelta[path], self._manifestdelta.flags(path)
107 107 node, flag = self._repo.manifest.find(self._changeset[0], path)
108 108 if not node:
109 109 raise revlog.LookupError(path, _("'%s' not found in manifest") % path)
110 110
111 111 return node, flag
112 112
113 113 def filenode(self, path):
114 114 return self._fileinfo(path)[0]
115 115
116 116 def fileflags(self, path):
117 117 try:
118 118 return self._fileinfo(path)[1]
119 119 except revlog.LookupError:
120 120 return ''
121 121
122 122 def filectx(self, path, fileid=None, filelog=None):
123 123 """get a file context from this changeset"""
124 124 if fileid is None:
125 125 fileid = self.filenode(path)
126 126 return filectx(self._repo, path, fileid=fileid,
127 127 changectx=self, filelog=filelog)
128 128
129 129 def filectxs(self):
130 130 """generate a file context for each file in this changeset's
131 131 manifest"""
132 132 mf = self.manifest()
133 133 m = mf.keys()
134 134 m.sort()
135 135 for f in m:
136 136 yield self.filectx(f, fileid=mf[f])
137 137
138 138 def ancestor(self, c2):
139 139 """
140 140 return the ancestor context of self and c2
141 141 """
142 142 n = self._repo.changelog.ancestor(self._node, c2._node)
143 143 return changectx(self._repo, n)
144 144
145 145 class filectx(object):
146 146 """A filecontext object makes access to data related to a particular
147 147 filerevision convenient."""
148 148 def __init__(self, repo, path, changeid=None, fileid=None,
149 149 filelog=None, changectx=None):
150 150 """changeid can be a changeset revision, node, or tag.
151 151 fileid can be a file revision or node."""
152 152 self._repo = repo
153 153 self._path = path
154 154
155 155 assert (changeid is not None
156 156 or fileid is not None
157 157 or changectx is not None)
158 158
159 159 if filelog:
160 160 self._filelog = filelog
161 161
162 162 if changeid is not None:
163 163 self._changeid = changeid
164 164 if changectx is not None:
165 165 self._changectx = changectx
166 166 if fileid is not None:
167 167 self._fileid = fileid
168 168
169 169 def __getattr__(self, name):
170 170 if name == '_changectx':
171 171 self._changectx = changectx(self._repo, self._changeid)
172 172 return self._changectx
173 173 elif name == '_filelog':
174 174 self._filelog = self._repo.file(self._path)
175 175 return self._filelog
176 176 elif name == '_changeid':
177 177 if '_changectx' in self.__dict__:
178 178 self._changeid = self._changectx.rev()
179 179 else:
180 180 self._changeid = self._filelog.linkrev(self._filenode)
181 181 return self._changeid
182 182 elif name == '_filenode':
183 183 if '_fileid' in self.__dict__:
184 184 self._filenode = self._filelog.lookup(self._fileid)
185 185 else:
186 186 self._filenode = self._changectx.filenode(self._path)
187 187 return self._filenode
188 188 elif name == '_filerev':
189 189 self._filerev = self._filelog.rev(self._filenode)
190 190 return self._filerev
191 191 else:
192 192 raise AttributeError, name
193 193
194 194 def __nonzero__(self):
195 195 try:
196 196 n = self._filenode
197 197 return True
198 198 except revlog.LookupError:
199 199 # file is missing
200 200 return False
201 201
202 202 def __str__(self):
203 203 return "%s@%s" % (self.path(), short(self.node()))
204 204
205 205 def __repr__(self):
206 206 return "<filectx %s>" % str(self)
207 207
208 208 def __eq__(self, other):
209 209 try:
210 210 return (self._path == other._path
211 211 and self._fileid == other._fileid)
212 212 except AttributeError:
213 213 return False
214 214
215 215 def __ne__(self, other):
216 216 return not (self == other)
217 217
218 218 def filectx(self, fileid):
219 219 '''opens an arbitrary revision of the file without
220 220 opening a new filelog'''
221 221 return filectx(self._repo, self._path, fileid=fileid,
222 222 filelog=self._filelog)
223 223
224 224 def filerev(self): return self._filerev
225 225 def filenode(self): return self._filenode
226 226 def fileflags(self): return self._changectx.fileflags(self._path)
227 227 def isexec(self): return 'x' in self.fileflags()
228 228 def islink(self): return 'l' in self.fileflags()
229 229 def filelog(self): return self._filelog
230 230
231 231 def rev(self):
232 232 if '_changectx' in self.__dict__:
233 233 return self._changectx.rev()
234 234 if '_changeid' in self.__dict__:
235 235 return self._changectx.rev()
236 236 return self._filelog.linkrev(self._filenode)
237 237
238 238 def linkrev(self): return self._filelog.linkrev(self._filenode)
239 239 def node(self): return self._changectx.node()
240 240 def user(self): return self._changectx.user()
241 241 def date(self): return self._changectx.date()
242 242 def files(self): return self._changectx.files()
243 243 def description(self): return self._changectx.description()
244 244 def branch(self): return self._changectx.branch()
245 245 def manifest(self): return self._changectx.manifest()
246 246 def changectx(self): return self._changectx
247 247
248 248 def data(self): return self._filelog.read(self._filenode)
249 249 def path(self): return self._path
250 250 def size(self): return self._filelog.size(self._filerev)
251 251
252 252 def cmp(self, text): return self._filelog.cmp(self._filenode, text)
253 253
254 254 def renamed(self):
255 255 """check if file was actually renamed in this changeset revision
256 256
257 257 If rename logged in file revision, we report copy for changeset only
258 258 if file revisions linkrev points back to the changeset in question
259 259 or both changeset parents contain different file revisions.
260 260 """
261 261
262 262 renamed = self._filelog.renamed(self._filenode)
263 263 if not renamed:
264 264 return renamed
265 265
266 266 if self.rev() == self.linkrev():
267 267 return renamed
268 268
269 269 name = self.path()
270 270 fnode = self._filenode
271 271 for p in self._changectx.parents():
272 272 try:
273 273 if fnode == p.filenode(name):
274 274 return None
275 275 except revlog.LookupError:
276 276 pass
277 277 return renamed
278 278
279 279 def parents(self):
280 280 p = self._path
281 281 fl = self._filelog
282 282 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
283 283
284 284 r = self._filelog.renamed(self._filenode)
285 285 if r:
286 286 pl[0] = (r[0], r[1], None)
287 287
288 288 return [filectx(self._repo, p, fileid=n, filelog=l)
289 289 for p,n,l in pl if n != nullid]
290 290
291 291 def children(self):
292 292 # hard for renames
293 293 c = self._filelog.children(self._filenode)
294 294 return [filectx(self._repo, self._path, fileid=x,
295 295 filelog=self._filelog) for x in c]
296 296
297 297 def annotate(self, follow=False, linenumber=None):
298 298 '''returns a list of tuples of (ctx, line) for each line
299 299 in the file, where ctx is the filectx of the node where
300 300 that line was last changed.
301 301 This returns tuples of ((ctx, linenumber), line) for each line,
302 302 if "linenumber" parameter is NOT "None".
303 303 In such tuples, linenumber means one at the first appearance
304 304 in the managed file.
305 305 To reduce annotation cost,
306 306 this returns fixed value(False is used) as linenumber,
307 307 if "linenumber" parameter is "False".'''
308 308
309 309 def decorate_compat(text, rev):
310 310 return ([rev] * len(text.splitlines()), text)
311 311
312 312 def without_linenumber(text, rev):
313 313 return ([(rev, False)] * len(text.splitlines()), text)
314 314
315 315 def with_linenumber(text, rev):
316 316 size = len(text.splitlines())
317 317 return ([(rev, i) for i in xrange(1, size + 1)], text)
318 318
319 319 decorate = (((linenumber is None) and decorate_compat) or
320 320 (linenumber and with_linenumber) or
321 321 without_linenumber)
322 322
323 323 def pair(parent, child):
324 324 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
325 325 child[0][b1:b2] = parent[0][a1:a2]
326 326 return child
327 327
328 328 getlog = util.cachefunc(lambda x: self._repo.file(x))
329 329 def getctx(path, fileid):
330 330 log = path == self._path and self._filelog or getlog(path)
331 331 return filectx(self._repo, path, fileid=fileid, filelog=log)
332 332 getctx = util.cachefunc(getctx)
333 333
334 334 def parents(f):
335 335 # we want to reuse filectx objects as much as possible
336 336 p = f._path
337 337 if f._filerev is None: # working dir
338 338 pl = [(n.path(), n.filerev()) for n in f.parents()]
339 339 else:
340 340 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
341 341
342 342 if follow:
343 343 r = f.renamed()
344 344 if r:
345 345 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
346 346
347 347 return [getctx(p, n) for p, n in pl if n != nullrev]
348 348
349 349 # use linkrev to find the first changeset where self appeared
350 350 if self.rev() != self.linkrev():
351 351 base = self.filectx(self.filerev())
352 352 else:
353 353 base = self
354 354
355 355 # find all ancestors
356 356 needed = {base: 1}
357 357 visit = [base]
358 358 files = [base._path]
359 359 while visit:
360 360 f = visit.pop(0)
361 361 for p in parents(f):
362 362 if p not in needed:
363 363 needed[p] = 1
364 364 visit.append(p)
365 365 if p._path not in files:
366 366 files.append(p._path)
367 367 else:
368 368 # count how many times we'll use this
369 369 needed[p] += 1
370 370
371 371 # sort by revision (per file) which is a topological order
372 372 visit = []
373 373 for f in files:
374 374 fn = [(n.rev(), n) for n in needed.keys() if n._path == f]
375 375 visit.extend(fn)
376 376 visit.sort()
377 377 hist = {}
378 378
379 379 for r, f in visit:
380 380 curr = decorate(f.data(), f)
381 381 for p in parents(f):
382 382 if p != nullid:
383 383 curr = pair(hist[p], curr)
384 384 # trim the history of unneeded revs
385 385 needed[p] -= 1
386 386 if not needed[p]:
387 387 del hist[p]
388 388 hist[f] = curr
389 389
390 390 return zip(hist[f][0], hist[f][1].splitlines(1))
391 391
392 392 def ancestor(self, fc2):
393 393 """
394 394 find the common ancestor file context, if any, of self, and fc2
395 395 """
396 396
397 397 acache = {}
398 398
399 399 # prime the ancestor cache for the working directory
400 400 for c in (self, fc2):
401 401 if c._filerev == None:
402 402 pl = [(n.path(), n.filenode()) for n in c.parents()]
403 403 acache[(c._path, None)] = pl
404 404
405 405 flcache = {self._path:self._filelog, fc2._path:fc2._filelog}
406 406 def parents(vertex):
407 407 if vertex in acache:
408 408 return acache[vertex]
409 409 f, n = vertex
410 410 if f not in flcache:
411 411 flcache[f] = self._repo.file(f)
412 412 fl = flcache[f]
413 413 pl = [(f, p) for p in fl.parents(n) if p != nullid]
414 414 re = fl.renamed(n)
415 415 if re:
416 416 pl.append(re)
417 417 acache[vertex] = pl
418 418 return pl
419 419
420 420 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
421 421 v = ancestor.ancestor(a, b, parents)
422 422 if v:
423 423 f, n = v
424 424 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
425 425
426 426 return None
427 427
428 428 class workingctx(changectx):
429 429 """A workingctx object makes access to data related to
430 430 the current working directory convenient."""
431 431 def __init__(self, repo):
432 432 self._repo = repo
433 433 self._rev = None
434 434 self._node = None
435 435
436 436 def __str__(self):
437 437 return str(self._parents[0]) + "+"
438 438
439 439 def __nonzero__(self):
440 440 return True
441 441
442 442 def __getattr__(self, name):
443 443 if name == '_parents':
444 444 self._parents = self._repo.parents()
445 445 return self._parents
446 446 if name == '_status':
447 447 self._status = self._repo.status()
448 448 return self._status
449 449 if name == '_manifest':
450 450 self._buildmanifest()
451 451 return self._manifest
452 452 else:
453 453 raise AttributeError, name
454 454
455 455 def _buildmanifest(self):
456 456 """generate a manifest corresponding to the working directory"""
457 457
458 458 man = self._parents[0].manifest().copy()
459 459 copied = self._repo.dirstate.copies()
460 460 is_exec = util.execfunc(self._repo.root,
461 461 lambda p: man.execf(copied.get(p,p)))
462 462 is_link = util.linkfunc(self._repo.root,
463 463 lambda p: man.linkf(copied.get(p,p)))
464 464 modified, added, removed, deleted, unknown = self._status[:5]
465 465 for i, l in (("a", added), ("m", modified), ("u", unknown)):
466 466 for f in l:
467 467 man[f] = man.get(copied.get(f, f), nullid) + i
468 468 try:
469 469 man.set(f, is_exec(f), is_link(f))
470 470 except OSError:
471 471 pass
472 472
473 473 for f in deleted + removed:
474 474 if f in man:
475 475 del man[f]
476 476
477 477 self._manifest = man
478 478
479 479 def manifest(self): return self._manifest
480 480
481 481 def user(self): return self._repo.ui.username()
482 482 def date(self): return util.makedate()
483 483 def description(self): return ""
484 484 def files(self):
485 485 f = self.modified() + self.added() + self.removed()
486 486 f.sort()
487 487 return f
488 488
489 489 def modified(self): return self._status[0]
490 490 def added(self): return self._status[1]
491 491 def removed(self): return self._status[2]
492 492 def deleted(self): return self._status[3]
493 493 def unknown(self): return self._status[4]
494 494 def clean(self): return self._status[5]
495 495 def branch(self): return self._repo.dirstate.branch()
496 496
497 497 def tags(self):
498 498 t = []
499 499 [t.extend(p.tags()) for p in self.parents()]
500 500 return t
501 501
502 502 def parents(self):
503 503 """return contexts for each parent changeset"""
504 504 return self._parents
505 505
506 506 def children(self):
507 507 return []
508 508
509 509 def fileflags(self, path):
510 510 if '_manifest' in self.__dict__:
511 511 try:
512 512 return self._manifest.flags(path)
513 513 except KeyError:
514 514 return ''
515 515
516 516 pnode = self._parents[0].changeset()[0]
517 517 orig = self._repo.dirstate.copies().get(path, path)
518 518 node, flag = self._repo.manifest.find(pnode, orig)
519 519 is_link = util.linkfunc(self._repo.root, lambda p: 'l' in flag)
520 520 is_exec = util.execfunc(self._repo.root, lambda p: 'x' in flag)
521 521 try:
522 522 return (is_link(path) and 'l' or '') + (is_exec(path) and 'e' or '')
523 523 except OSError:
524 524 pass
525 525
526 526 if not node or path in self.deleted() or path in self.removed():
527 527 return ''
528 528 return flag
529 529
530 530 def filectx(self, path, filelog=None):
531 531 """get a file context from the working directory"""
532 532 return workingfilectx(self._repo, path, workingctx=self,
533 533 filelog=filelog)
534 534
535 535 def ancestor(self, c2):
536 536 """return the ancestor context of self and c2"""
537 537 return self._parents[0].ancestor(c2) # punt on two parents for now
538 538
539 539 class workingfilectx(filectx):
540 540 """A workingfilectx object makes access to data related to a particular
541 541 file in the working directory convenient."""
542 542 def __init__(self, repo, path, filelog=None, workingctx=None):
543 543 """changeid can be a changeset revision, node, or tag.
544 544 fileid can be a file revision or node."""
545 545 self._repo = repo
546 546 self._path = path
547 547 self._changeid = None
548 548 self._filerev = self._filenode = None
549 549
550 550 if filelog:
551 551 self._filelog = filelog
552 552 if workingctx:
553 553 self._changectx = workingctx
554 554
555 555 def __getattr__(self, name):
556 556 if name == '_changectx':
557 557 self._changectx = workingctx(self._repo)
558 558 return self._changectx
559 559 elif name == '_repopath':
560 560 self._repopath = (self._repo.dirstate.copied(self._path)
561 561 or self._path)
562 562 return self._repopath
563 563 elif name == '_filelog':
564 564 self._filelog = self._repo.file(self._repopath)
565 565 return self._filelog
566 566 else:
567 567 raise AttributeError, name
568 568
569 569 def __nonzero__(self):
570 570 return True
571 571
572 572 def __str__(self):
573 573 return "%s@%s" % (self.path(), self._changectx)
574 574
575 575 def filectx(self, fileid):
576 576 '''opens an arbitrary revision of the file without
577 577 opening a new filelog'''
578 578 return filectx(self._repo, self._repopath, fileid=fileid,
579 579 filelog=self._filelog)
580 580
581 581 def rev(self):
582 582 if '_changectx' in self.__dict__:
583 583 return self._changectx.rev()
584 584 return self._filelog.linkrev(self._filenode)
585 585
586 586 def data(self): return self._repo.wread(self._path)
587 587 def renamed(self):
588 588 rp = self._repopath
589 589 if rp == self._path:
590 590 return None
591 591 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
592 592
593 593 def parents(self):
594 594 '''return parent filectxs, following copies if necessary'''
595 595 p = self._path
596 596 rp = self._repopath
597 597 pcl = self._changectx._parents
598 598 fl = self._filelog
599 599 pl = [(rp, pcl[0]._manifest.get(rp, nullid), fl)]
600 600 if len(pcl) > 1:
601 601 if rp != p:
602 602 fl = None
603 603 pl.append((p, pcl[1]._manifest.get(p, nullid), fl))
604 604
605 605 return [filectx(self._repo, p, fileid=n, filelog=l)
606 606 for p,n,l in pl if n != nullid]
607 607
608 608 def children(self):
609 609 return []
610 610
611 611 def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
612 612 def date(self):
613 613 t, tz = self._changectx.date()
614 614 try:
615 615 return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz)
616 616 except OSError, err:
617 617 if err.errno != errno.ENOENT: raise
618 618 return (t, tz)
619 619
620 620 def cmp(self, text): return self._repo.wread(self._path) == text
@@ -1,598 +1,598 b''
1 1 """
2 2 dirstate.py - working directory tracking for mercurial
3 3
4 4 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5
6 6 This software may be used and distributed according to the terms
7 7 of the GNU General Public License, incorporated herein by reference.
8 8 """
9 9
10 10 from node import nullid
11 11 from i18n import _
12 import struct, os, time, bisect, stat, strutil, util, re, errno, ignore
12 import struct, os, bisect, stat, strutil, util, errno, ignore
13 13 import cStringIO, osutil
14 14
15 15 _unknown = ('?', 0, 0, 0)
16 16 _format = ">cllll"
17 17
18 18 class dirstate(object):
19 19
20 20 def __init__(self, opener, ui, root):
21 21 self._opener = opener
22 22 self._root = root
23 23 self._dirty = False
24 24 self._dirtypl = False
25 25 self._ui = ui
26 26
27 27 def __getattr__(self, name):
28 28 if name == '_map':
29 29 self._read()
30 30 return self._map
31 31 elif name == '_copymap':
32 32 self._read()
33 33 return self._copymap
34 34 elif name == '_branch':
35 35 try:
36 36 self._branch = (self._opener("branch").read().strip()
37 37 or "default")
38 38 except IOError:
39 39 self._branch = "default"
40 40 return self._branch
41 41 elif name == '_pl':
42 42 self._pl = [nullid, nullid]
43 43 try:
44 44 st = self._opener("dirstate").read(40)
45 45 if len(st) == 40:
46 46 self._pl = st[:20], st[20:40]
47 47 except IOError, err:
48 48 if err.errno != errno.ENOENT: raise
49 49 return self._pl
50 50 elif name == '_dirs':
51 51 self._dirs = {}
52 52 for f in self._map:
53 53 if self[f] != 'r':
54 54 self._incpath(f)
55 55 return self._dirs
56 56 elif name == '_ignore':
57 57 files = [self._join('.hgignore')]
58 58 for name, path in self._ui.configitems("ui"):
59 59 if name == 'ignore' or name.startswith('ignore.'):
60 60 files.append(os.path.expanduser(path))
61 61 self._ignore = ignore.ignore(self._root, files, self._ui.warn)
62 62 return self._ignore
63 63 elif name == '_slash':
64 64 self._slash = self._ui.configbool('ui', 'slash') and os.sep != '/'
65 65 return self._slash
66 66 else:
67 67 raise AttributeError, name
68 68
69 69 def _join(self, f):
70 70 return os.path.join(self._root, f)
71 71
72 72 def getcwd(self):
73 73 cwd = os.getcwd()
74 74 if cwd == self._root: return ''
75 75 # self._root ends with a path separator if self._root is '/' or 'C:\'
76 76 rootsep = self._root
77 77 if not util.endswithsep(rootsep):
78 78 rootsep += os.sep
79 79 if cwd.startswith(rootsep):
80 80 return cwd[len(rootsep):]
81 81 else:
82 82 # we're outside the repo. return an absolute path.
83 83 return cwd
84 84
85 85 def pathto(self, f, cwd=None):
86 86 if cwd is None:
87 87 cwd = self.getcwd()
88 88 path = util.pathto(self._root, cwd, f)
89 89 if self._slash:
90 90 return util.normpath(path)
91 91 return path
92 92
93 93 def __getitem__(self, key):
94 94 ''' current states:
95 95 n normal
96 96 m needs merging
97 97 r marked for removal
98 98 a marked for addition
99 99 ? not tracked'''
100 100 return self._map.get(key, ("?",))[0]
101 101
102 102 def __contains__(self, key):
103 103 return key in self._map
104 104
105 105 def __iter__(self):
106 106 a = self._map.keys()
107 107 a.sort()
108 108 for x in a:
109 109 yield x
110 110
111 111 def parents(self):
112 112 return self._pl
113 113
114 114 def branch(self):
115 115 return self._branch
116 116
117 117 def setparents(self, p1, p2=nullid):
118 118 self._dirty = self._dirtypl = True
119 119 self._pl = p1, p2
120 120
121 121 def setbranch(self, branch):
122 122 self._branch = branch
123 123 self._opener("branch", "w").write(branch + '\n')
124 124
125 125 def _read(self):
126 126 self._map = {}
127 127 self._copymap = {}
128 128 if not self._dirtypl:
129 129 self._pl = [nullid, nullid]
130 130 try:
131 131 st = self._opener("dirstate").read()
132 132 except IOError, err:
133 133 if err.errno != errno.ENOENT: raise
134 134 return
135 135 if not st:
136 136 return
137 137
138 138 if not self._dirtypl:
139 139 self._pl = [st[:20], st[20: 40]]
140 140
141 141 # deref fields so they will be local in loop
142 142 dmap = self._map
143 143 copymap = self._copymap
144 144 unpack = struct.unpack
145 145 e_size = struct.calcsize(_format)
146 146 pos1 = 40
147 147 l = len(st)
148 148
149 149 # the inner loop
150 150 while pos1 < l:
151 151 pos2 = pos1 + e_size
152 152 e = unpack(">cllll", st[pos1:pos2]) # a literal here is faster
153 153 pos1 = pos2 + e[4]
154 154 f = st[pos2:pos1]
155 155 if '\0' in f:
156 156 f, c = f.split('\0')
157 157 copymap[f] = c
158 158 dmap[f] = e # we hold onto e[4] because making a subtuple is slow
159 159
160 160 def invalidate(self):
161 161 for a in "_map _copymap _branch _pl _dirs _ignore".split():
162 162 if a in self.__dict__:
163 163 delattr(self, a)
164 164 self._dirty = False
165 165
166 166 def copy(self, source, dest):
167 167 self._dirty = True
168 168 self._copymap[dest] = source
169 169
170 170 def copied(self, file):
171 171 return self._copymap.get(file, None)
172 172
173 173 def copies(self):
174 174 return self._copymap
175 175
176 176 def _incpath(self, path):
177 177 c = path.rfind('/')
178 178 if c >= 0:
179 179 dirs = self._dirs
180 180 base = path[:c]
181 181 if base not in dirs:
182 182 self._incpath(base)
183 183 dirs[base] = 1
184 184 else:
185 185 dirs[base] += 1
186 186
187 187 def _decpath(self, path):
188 188 c = path.rfind('/')
189 189 if c >= 0:
190 190 base = path[:c]
191 191 dirs = self._dirs
192 192 if dirs[base] == 1:
193 193 del dirs[base]
194 194 self._decpath(base)
195 195 else:
196 196 dirs[base] -= 1
197 197
198 198 def _incpathcheck(self, f):
199 199 if '\r' in f or '\n' in f:
200 200 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
201 201 % f)
202 202 # shadows
203 203 if f in self._dirs:
204 204 raise util.Abort(_('directory %r already in dirstate') % f)
205 205 for c in strutil.rfindall(f, '/'):
206 206 d = f[:c]
207 207 if d in self._dirs:
208 208 break
209 209 if d in self._map and self[d] != 'r':
210 210 raise util.Abort(_('file %r in dirstate clashes with %r') %
211 211 (d, f))
212 212 self._incpath(f)
213 213
214 214 def _changepath(self, f, newstate, relaxed=False):
215 215 # handle upcoming path changes
216 216 oldstate = self[f]
217 217 if oldstate not in "?r" and newstate in "?r":
218 218 if "_dirs" in self.__dict__:
219 219 self._decpath(f)
220 220 return
221 221 if oldstate in "?r" and newstate not in "?r":
222 222 if relaxed and oldstate == '?':
223 223 # XXX
224 224 # in relaxed mode we assume the caller knows
225 225 # what it is doing, workaround for updating
226 226 # dir-to-file revisions
227 227 if "_dirs" in self.__dict__:
228 228 self._incpath(f)
229 229 return
230 230 self._incpathcheck(f)
231 231 return
232 232
233 233 def normal(self, f):
234 234 'mark a file normal and clean'
235 235 self._dirty = True
236 236 self._changepath(f, 'n', True)
237 237 s = os.lstat(self._join(f))
238 238 self._map[f] = ('n', s.st_mode, s.st_size, s.st_mtime, 0)
239 239 if f in self._copymap:
240 240 del self._copymap[f]
241 241
242 242 def normallookup(self, f):
243 243 'mark a file normal, but possibly dirty'
244 244 self._dirty = True
245 245 self._changepath(f, 'n', True)
246 246 self._map[f] = ('n', 0, -1, -1, 0)
247 247 if f in self._copymap:
248 248 del self._copymap[f]
249 249
250 250 def normaldirty(self, f):
251 251 'mark a file normal, but dirty'
252 252 self._dirty = True
253 253 self._changepath(f, 'n', True)
254 254 self._map[f] = ('n', 0, -2, -1, 0)
255 255 if f in self._copymap:
256 256 del self._copymap[f]
257 257
258 258 def add(self, f):
259 259 'mark a file added'
260 260 self._dirty = True
261 261 self._changepath(f, 'a')
262 262 self._map[f] = ('a', 0, -1, -1, 0)
263 263 if f in self._copymap:
264 264 del self._copymap[f]
265 265
266 266 def remove(self, f):
267 267 'mark a file removed'
268 268 self._dirty = True
269 269 self._changepath(f, 'r')
270 270 self._map[f] = ('r', 0, 0, 0, 0)
271 271 if f in self._copymap:
272 272 del self._copymap[f]
273 273
274 274 def merge(self, f):
275 275 'mark a file merged'
276 276 self._dirty = True
277 277 s = os.lstat(self._join(f))
278 278 self._changepath(f, 'm', True)
279 279 self._map[f] = ('m', s.st_mode, s.st_size, s.st_mtime, 0)
280 280 if f in self._copymap:
281 281 del self._copymap[f]
282 282
283 283 def forget(self, f):
284 284 'forget a file'
285 285 self._dirty = True
286 286 try:
287 287 self._changepath(f, '?')
288 288 del self._map[f]
289 289 except KeyError:
290 290 self._ui.warn(_("not in dirstate: %s\n") % f)
291 291
292 292 def clear(self):
293 293 self._map = {}
294 294 if "_dirs" in self.__dict__:
295 295 delattr(self, "_dirs");
296 296 self._copymap = {}
297 297 self._pl = [nullid, nullid]
298 298 self._dirty = True
299 299
300 300 def rebuild(self, parent, files):
301 301 self.clear()
302 302 for f in files:
303 303 if files.execf(f):
304 304 self._map[f] = ('n', 0777, -1, 0, 0)
305 305 else:
306 306 self._map[f] = ('n', 0666, -1, 0, 0)
307 307 self._pl = (parent, nullid)
308 308 self._dirty = True
309 309
310 310 def write(self):
311 311 if not self._dirty:
312 312 return
313 313 cs = cStringIO.StringIO()
314 314 copymap = self._copymap
315 315 pack = struct.pack
316 316 write = cs.write
317 317 write("".join(self._pl))
318 318 for f, e in self._map.iteritems():
319 319 if f in copymap:
320 320 f = "%s\0%s" % (f, copymap[f])
321 321 e = pack(_format, e[0], e[1], e[2], e[3], len(f))
322 322 write(e)
323 323 write(f)
324 324 st = self._opener("dirstate", "w", atomictemp=True)
325 325 st.write(cs.getvalue())
326 326 st.rename()
327 327 self._dirty = self._dirtypl = False
328 328
329 329 def _filter(self, files):
330 330 ret = {}
331 331 unknown = []
332 332
333 333 for x in files:
334 334 if x == '.':
335 335 return self._map.copy()
336 336 if x not in self._map:
337 337 unknown.append(x)
338 338 else:
339 339 ret[x] = self._map[x]
340 340
341 341 if not unknown:
342 342 return ret
343 343
344 344 b = self._map.keys()
345 345 b.sort()
346 346 blen = len(b)
347 347
348 348 for x in unknown:
349 349 bs = bisect.bisect(b, "%s%s" % (x, '/'))
350 350 while bs < blen:
351 351 s = b[bs]
352 352 if len(s) > len(x) and s.startswith(x):
353 353 ret[s] = self._map[s]
354 354 else:
355 355 break
356 356 bs += 1
357 357 return ret
358 358
359 359 def _supported(self, f, mode, verbose=False):
360 360 if stat.S_ISREG(mode) or stat.S_ISLNK(mode):
361 361 return True
362 362 if verbose:
363 363 kind = 'unknown'
364 364 if stat.S_ISCHR(mode): kind = _('character device')
365 365 elif stat.S_ISBLK(mode): kind = _('block device')
366 366 elif stat.S_ISFIFO(mode): kind = _('fifo')
367 367 elif stat.S_ISSOCK(mode): kind = _('socket')
368 368 elif stat.S_ISDIR(mode): kind = _('directory')
369 369 self._ui.warn(_('%s: unsupported file type (type is %s)\n')
370 370 % (self.pathto(f), kind))
371 371 return False
372 372
373 373 def _dirignore(self, f):
374 374 if self._ignore(f):
375 375 return True
376 376 for c in strutil.findall(f, '/'):
377 377 if self._ignore(f[:c]):
378 378 return True
379 379 return False
380 380
381 381 def walk(self, files=None, match=util.always, badmatch=None):
382 382 # filter out the stat
383 383 for src, f, st in self.statwalk(files, match, badmatch=badmatch):
384 384 yield src, f
385 385
386 386 def statwalk(self, files=None, match=util.always, unknown=True,
387 387 ignored=False, badmatch=None, directories=False):
388 388 '''
389 389 walk recursively through the directory tree, finding all files
390 390 matched by the match function
391 391
392 392 results are yielded in a tuple (src, filename, st), where src
393 393 is one of:
394 394 'f' the file was found in the directory tree
395 395 'd' the file is a directory of the tree
396 396 'm' the file was only in the dirstate and not in the tree
397 397 'b' file was not found and matched badmatch
398 398
399 399 and st is the stat result if the file was found in the directory.
400 400 '''
401 401
402 402 # walk all files by default
403 403 if not files:
404 404 files = ['.']
405 405 dc = self._map.copy()
406 406 else:
407 407 files = util.unique(files)
408 408 dc = self._filter(files)
409 409
410 410 def imatch(file_):
411 411 if file_ not in dc and self._ignore(file_):
412 412 return False
413 413 return match(file_)
414 414
415 415 # TODO: don't walk unknown directories if unknown and ignored are False
416 416 ignore = self._ignore
417 417 dirignore = self._dirignore
418 418 if ignored:
419 419 imatch = match
420 420 ignore = util.never
421 421 dirignore = util.never
422 422
423 423 # self._root may end with a path separator when self._root == '/'
424 424 common_prefix_len = len(self._root)
425 425 if not util.endswithsep(self._root):
426 426 common_prefix_len += 1
427 427
428 428 normpath = util.normpath
429 429 listdir = osutil.listdir
430 430 lstat = os.lstat
431 431 bisect_left = bisect.bisect_left
432 432 isdir = os.path.isdir
433 433 pconvert = util.pconvert
434 434 join = os.path.join
435 435 s_isdir = stat.S_ISDIR
436 436 supported = self._supported
437 437 _join = self._join
438 438 known = {'.hg': 1}
439 439
440 440 # recursion free walker, faster than os.walk.
441 441 def findfiles(s):
442 442 work = [s]
443 443 wadd = work.append
444 444 found = []
445 445 add = found.append
446 446 if directories:
447 447 add((normpath(s[common_prefix_len:]), 'd', lstat(s)))
448 448 while work:
449 449 top = work.pop()
450 450 entries = listdir(top, stat=True)
451 451 # nd is the top of the repository dir tree
452 452 nd = normpath(top[common_prefix_len:])
453 453 if nd == '.':
454 454 nd = ''
455 455 else:
456 456 # do not recurse into a repo contained in this
457 457 # one. use bisect to find .hg directory so speed
458 458 # is good on big directory.
459 459 names = [e[0] for e in entries]
460 460 hg = bisect_left(names, '.hg')
461 461 if hg < len(names) and names[hg] == '.hg':
462 462 if isdir(join(top, '.hg')):
463 463 continue
464 464 for f, kind, st in entries:
465 465 np = pconvert(join(nd, f))
466 466 if np in known:
467 467 continue
468 468 known[np] = 1
469 469 p = join(top, f)
470 470 # don't trip over symlinks
471 471 if kind == stat.S_IFDIR:
472 472 if not ignore(np):
473 473 wadd(p)
474 474 if directories:
475 475 add((np, 'd', st))
476 476 if np in dc and match(np):
477 477 add((np, 'm', st))
478 478 elif imatch(np):
479 479 if supported(np, st.st_mode):
480 480 add((np, 'f', st))
481 481 elif np in dc:
482 482 add((np, 'm', st))
483 483 found.sort()
484 484 return found
485 485
486 486 # step one, find all files that match our criteria
487 487 files.sort()
488 488 for ff in files:
489 489 nf = normpath(ff)
490 490 f = _join(ff)
491 491 try:
492 492 st = lstat(f)
493 493 except OSError, inst:
494 494 found = False
495 495 for fn in dc:
496 496 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
497 497 found = True
498 498 break
499 499 if not found:
500 500 if inst.errno != errno.ENOENT or not badmatch:
501 501 self._ui.warn('%s: %s\n' %
502 502 (self.pathto(ff), inst.strerror))
503 503 elif badmatch and badmatch(ff) and imatch(nf):
504 504 yield 'b', ff, None
505 505 continue
506 506 if s_isdir(st.st_mode):
507 507 if not dirignore(nf):
508 508 for f, src, st in findfiles(f):
509 509 yield src, f, st
510 510 else:
511 511 if nf in known:
512 512 continue
513 513 known[nf] = 1
514 514 if match(nf):
515 515 if supported(ff, st.st_mode, verbose=True):
516 516 yield 'f', nf, st
517 517 elif ff in dc:
518 518 yield 'm', nf, st
519 519
520 520 # step two run through anything left in the dc hash and yield
521 521 # if we haven't already seen it
522 522 ks = dc.keys()
523 523 ks.sort()
524 524 for k in ks:
525 525 if k in known:
526 526 continue
527 527 known[k] = 1
528 528 if imatch(k):
529 529 yield 'm', k, None
530 530
531 531 def status(self, files, match, list_ignored, list_clean, list_unknown=True):
532 532 lookup, modified, added, unknown, ignored = [], [], [], [], []
533 533 removed, deleted, clean = [], [], []
534 534
535 535 files = files or []
536 536 _join = self._join
537 537 lstat = os.lstat
538 538 cmap = self._copymap
539 539 dmap = self._map
540 540 ladd = lookup.append
541 541 madd = modified.append
542 542 aadd = added.append
543 543 uadd = unknown.append
544 544 iadd = ignored.append
545 545 radd = removed.append
546 546 dadd = deleted.append
547 547 cadd = clean.append
548 548
549 549 for src, fn, st in self.statwalk(files, match, unknown=list_unknown,
550 550 ignored=list_ignored):
551 551 if fn in dmap:
552 552 type_, mode, size, time, foo = dmap[fn]
553 553 else:
554 554 if (list_ignored or fn in files) and self._dirignore(fn):
555 555 if list_ignored:
556 556 iadd(fn)
557 557 elif list_unknown:
558 558 uadd(fn)
559 559 continue
560 560 if src == 'm':
561 561 nonexistent = True
562 562 if not st:
563 563 try:
564 564 st = lstat(_join(fn))
565 565 except OSError, inst:
566 566 if inst.errno not in (errno.ENOENT, errno.ENOTDIR):
567 567 raise
568 568 st = None
569 569 # We need to re-check that it is a valid file
570 570 if st and self._supported(fn, st.st_mode):
571 571 nonexistent = False
572 572 # XXX: what to do with file no longer present in the fs
573 573 # who are not removed in the dirstate ?
574 574 if nonexistent and type_ in "nma":
575 575 dadd(fn)
576 576 continue
577 577 # check the common case first
578 578 if type_ == 'n':
579 579 if not st:
580 580 st = lstat(_join(fn))
581 581 if (size >= 0 and (size != st.st_size
582 582 or (mode ^ st.st_mode) & 0100)
583 583 or size == -2
584 584 or fn in self._copymap):
585 585 madd(fn)
586 586 elif time != int(st.st_mtime):
587 587 ladd(fn)
588 588 elif list_clean:
589 589 cadd(fn)
590 590 elif type_ == 'm':
591 591 madd(fn)
592 592 elif type_ == 'a':
593 593 aadd(fn)
594 594 elif type_ == 'r':
595 595 radd(fn)
596 596
597 597 return (lookup, modified, added, removed, deleted, unknown, ignored,
598 598 clean)
@@ -1,89 +1,89 b''
1 1 # extensions.py - extension handling for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import imp, os
9 import util, sys
9 import util
10 10 from i18n import _
11 11
12 12 _extensions = {}
13 13 _order = []
14 14
15 15 def extensions():
16 16 for name in _order:
17 17 module = _extensions[name]
18 18 if module:
19 19 yield name, module
20 20
21 21 def find(name):
22 22 '''return module with given extension name'''
23 23 try:
24 24 return _extensions[name]
25 25 except KeyError:
26 26 for k, v in _extensions.iteritems():
27 27 if k.endswith('.' + name) or k.endswith('/' + name):
28 28 return v
29 29 raise KeyError(name)
30 30
31 31 def load(ui, name, path):
32 32 if name.startswith('hgext.'):
33 33 shortname = name[6:]
34 34 else:
35 35 shortname = name
36 36 if shortname in _extensions:
37 37 return
38 38 _extensions[shortname] = None
39 39 if path:
40 40 # the module will be loaded in sys.modules
41 41 # choose an unique name so that it doesn't
42 42 # conflicts with other modules
43 43 module_name = "hgext_%s" % name.replace('.', '_')
44 44 if os.path.isdir(path):
45 45 # module/__init__.py style
46 46 d, f = os.path.split(path)
47 47 fd, fpath, desc = imp.find_module(f, [d])
48 48 mod = imp.load_module(module_name, fd, fpath, desc)
49 49 else:
50 50 mod = imp.load_source(module_name, path)
51 51 else:
52 52 def importh(name):
53 53 mod = __import__(name)
54 54 components = name.split('.')
55 55 for comp in components[1:]:
56 56 mod = getattr(mod, comp)
57 57 return mod
58 58 try:
59 59 mod = importh("hgext.%s" % name)
60 60 except ImportError:
61 61 mod = importh(name)
62 62 _extensions[shortname] = mod
63 63 _order.append(shortname)
64 64
65 65 uisetup = getattr(mod, 'uisetup', None)
66 66 if uisetup:
67 67 uisetup(ui)
68 68
69 69 def loadall(ui):
70 70 result = ui.configitems("extensions")
71 71 for i, (name, path) in enumerate(result):
72 72 if path:
73 73 if path[0] == '!':
74 74 continue
75 75 path = os.path.expanduser(path)
76 76 try:
77 77 load(ui, name, path)
78 78 except (util.SignalInterrupt, KeyboardInterrupt):
79 79 raise
80 80 except Exception, inst:
81 81 if path:
82 82 ui.warn(_("*** failed to import extension %s from %s: %s\n")
83 83 % (name, path, inst))
84 84 else:
85 85 ui.warn(_("*** failed to import extension %s: %s\n")
86 86 % (name, inst))
87 87 if ui.print_exc():
88 88 return 1
89 89
@@ -1,84 +1,83 b''
1 1 # filelog.py - file history class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import bin, nullid
9 9 from revlog import revlog
10 import os
11 10
12 11 class filelog(revlog):
13 12 def __init__(self, opener, path):
14 13 revlog.__init__(self, opener,
15 14 "/".join(("data", self.encodedir(path + ".i"))))
16 15
17 16 # This avoids a collision between a file named foo and a dir named
18 17 # foo.i or foo.d
19 18 def encodedir(self, path):
20 19 return (path
21 20 .replace(".hg/", ".hg.hg/")
22 21 .replace(".i/", ".i.hg/")
23 22 .replace(".d/", ".d.hg/"))
24 23
25 24 def decodedir(self, path):
26 25 return (path
27 26 .replace(".d.hg/", ".d/")
28 27 .replace(".i.hg/", ".i/")
29 28 .replace(".hg.hg/", ".hg/"))
30 29
31 30 def read(self, node):
32 31 t = self.revision(node)
33 32 if not t.startswith('\1\n'):
34 33 return t
35 34 s = t.index('\1\n', 2)
36 35 return t[s+2:]
37 36
38 37 def _readmeta(self, node):
39 38 t = self.revision(node)
40 39 if not t.startswith('\1\n'):
41 40 return {}
42 41 s = t.index('\1\n', 2)
43 42 mt = t[2:s]
44 43 m = {}
45 44 for l in mt.splitlines():
46 45 k, v = l.split(": ", 1)
47 46 m[k] = v
48 47 return m
49 48
50 49 def add(self, text, meta, transaction, link, p1=None, p2=None):
51 50 if meta or text.startswith('\1\n'):
52 51 mt = ""
53 52 if meta:
54 53 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
55 54 text = "\1\n%s\1\n%s" % ("".join(mt), text)
56 55 return self.addrevision(text, transaction, link, p1, p2)
57 56
58 57 def renamed(self, node):
59 58 if self.parents(node)[0] != nullid:
60 59 return False
61 60 m = self._readmeta(node)
62 61 if m and "copy" in m:
63 62 return (m["copy"], bin(m["copyrev"]))
64 63 return False
65 64
66 65 def size(self, rev):
67 66 """return the size of a given revision"""
68 67
69 68 # for revisions with renames, we have to go the slow way
70 69 node = self.node(rev)
71 70 if self.renamed(node):
72 71 return len(self.read(node))
73 72
74 73 return revlog.size(self, rev)
75 74
76 75 def cmp(self, node, text):
77 76 """compare text with a given file revision"""
78 77
79 78 # for renames, we have to go the slow way
80 79 if self.renamed(node):
81 80 t2 = self.read(node)
82 81 return t2 != text
83 82
84 83 return revlog.cmp(self, node, text)
@@ -1,217 +1,217 b''
1 1 # filemerge.py - file-level merge handling for Mercurial
2 2 #
3 3 # Copyright 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import nullrev
9 9 from i18n import _
10 import util, os, tempfile, context, simplemerge, re, filecmp
10 import util, os, tempfile, simplemerge, re, filecmp
11 11
12 12 def _toolstr(ui, tool, part, default=""):
13 13 return ui.config("merge-tools", tool + "." + part, default)
14 14
15 15 def _toolbool(ui, tool, part, default=False):
16 16 return ui.configbool("merge-tools", tool + "." + part, default)
17 17
18 18 def _findtool(ui, tool):
19 19 k = _toolstr(ui, tool, "regkey")
20 20 if k:
21 21 p = util.lookup_reg(k, _toolstr(ui, tool, "regname"))
22 22 if p:
23 23 p = util.find_exe(p + _toolstr(ui, tool, "regappend"))
24 24 if p:
25 25 return p
26 26 return util.find_exe(_toolstr(ui, tool, "executable", tool))
27 27
28 28 def _picktool(repo, ui, path, binary, symlink):
29 29 def check(tool, pat, symlink, binary):
30 30 tmsg = tool
31 31 if pat:
32 32 tmsg += " specified for " + pat
33 33 if pat and not _findtool(ui, tool): # skip search if not matching
34 34 ui.warn(_("couldn't find merge tool %s\n") % tmsg)
35 35 elif symlink and not _toolbool(ui, tool, "symlink"):
36 36 ui.warn(_("tool %s can't handle symlinks\n") % tmsg)
37 37 elif binary and not _toolbool(ui, tool, "binary"):
38 38 ui.warn(_("tool %s can't handle binary\n") % tmsg)
39 39 elif not util.gui() and _toolbool(ui, tool, "gui"):
40 40 ui.warn(_("tool %s requires a GUI\n") % tmsg)
41 41 else:
42 42 return True
43 43 return False
44 44
45 45 # HGMERGE takes precedence
46 46 hgmerge = os.environ.get("HGMERGE")
47 47 if hgmerge:
48 48 return (hgmerge, hgmerge)
49 49
50 50 # then patterns
51 51 for pat, tool in ui.configitems("merge-patterns"):
52 52 mf = util.matcher(repo.root, "", [pat], [], [])[1]
53 53 if mf(path) and check(tool, pat, symlink, False):
54 54 toolpath = _findtool(ui, tool)
55 55 return (tool, '"' + toolpath + '"')
56 56
57 57 # then merge tools
58 58 tools = {}
59 59 for k,v in ui.configitems("merge-tools"):
60 60 t = k.split('.')[0]
61 61 if t not in tools:
62 62 tools[t] = int(_toolstr(ui, t, "priority", "0"))
63 63 names = tools.keys()
64 64 tools = [(-p,t) for t,p in tools.items()]
65 65 tools.sort()
66 66 uimerge = ui.config("ui", "merge")
67 67 if uimerge:
68 68 if uimerge not in names:
69 69 return (uimerge, uimerge)
70 70 tools.insert(0, (None, uimerge)) # highest priority
71 71 tools.append((None, "hgmerge")) # the old default, if found
72 72 for p,t in tools:
73 73 toolpath = _findtool(ui, t)
74 74 if toolpath and check(t, None, symlink, binary):
75 75 return (t, '"' + toolpath + '"')
76 76 # internal merge as last resort
77 77 return (not (symlink or binary) and "internal:merge" or None, None)
78 78
79 79 def _eoltype(data):
80 80 "Guess the EOL type of a file"
81 81 if '\0' in data: # binary
82 82 return None
83 83 if '\r\n' in data: # Windows
84 84 return '\r\n'
85 85 if '\r' in data: # Old Mac
86 86 return '\r'
87 87 if '\n' in data: # UNIX
88 88 return '\n'
89 89 return None # unknown
90 90
91 91 def _matcheol(file, origfile):
92 92 "Convert EOL markers in a file to match origfile"
93 93 tostyle = _eoltype(open(origfile, "rb").read())
94 94 if tostyle:
95 95 data = open(file, "rb").read()
96 96 style = _eoltype(data)
97 97 if style:
98 98 newdata = data.replace(style, tostyle)
99 99 if newdata != data:
100 100 open(file, "wb").write(newdata)
101 101
102 102 def filemerge(repo, fw, fd, fo, wctx, mctx):
103 103 """perform a 3-way merge in the working directory
104 104
105 105 fw = original filename in the working directory
106 106 fd = destination filename in the working directory
107 107 fo = filename in other parent
108 108 wctx, mctx = working and merge changecontexts
109 109 """
110 110
111 111 def temp(prefix, ctx):
112 112 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
113 113 (fd, name) = tempfile.mkstemp(prefix=pre)
114 114 data = repo.wwritedata(ctx.path(), ctx.data())
115 115 f = os.fdopen(fd, "wb")
116 116 f.write(data)
117 117 f.close()
118 118 return name
119 119
120 120 def isbin(ctx):
121 121 try:
122 122 return util.binary(ctx.data())
123 123 except IOError:
124 124 return False
125 125
126 126 fco = mctx.filectx(fo)
127 127 if not fco.cmp(wctx.filectx(fd).data()): # files identical?
128 128 return None
129 129
130 130 ui = repo.ui
131 131 fcm = wctx.filectx(fw)
132 132 fca = fcm.ancestor(fco) or repo.filectx(fw, fileid=nullrev)
133 133 binary = isbin(fcm) or isbin(fco) or isbin(fca)
134 134 symlink = fcm.islink() or fco.islink()
135 135 tool, toolpath = _picktool(repo, ui, fw, binary, symlink)
136 136 ui.debug(_("picked tool '%s' for %s (binary %s symlink %s)\n") %
137 137 (tool, fw, binary, symlink))
138 138
139 139 if not tool:
140 140 tool = "internal:local"
141 141 if ui.prompt(_(" no tool found to merge %s\n"
142 142 "keep (l)ocal or take (o)ther?") % fw,
143 143 _("[lo]"), _("l")) != _("l"):
144 144 tool = "internal:other"
145 145 if tool == "internal:local":
146 146 return 0
147 147 if tool == "internal:other":
148 148 repo.wwrite(fd, fco.data(), fco.fileflags())
149 149 return 0
150 150 if tool == "internal:fail":
151 151 return 1
152 152
153 153 # do the actual merge
154 154 a = repo.wjoin(fd)
155 155 b = temp("base", fca)
156 156 c = temp("other", fco)
157 157 out = ""
158 158 back = a + ".orig"
159 159 util.copyfile(a, back)
160 160
161 161 if fw != fo:
162 162 repo.ui.status(_("merging %s and %s\n") % (fw, fo))
163 163 else:
164 164 repo.ui.status(_("merging %s\n") % fw)
165 165 repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca))
166 166
167 167 # do we attempt to simplemerge first?
168 168 if _toolbool(ui, tool, "premerge", not (binary or symlink)):
169 169 r = simplemerge.simplemerge(a, b, c, quiet=True)
170 170 if not r:
171 171 ui.debug(_(" premerge successful\n"))
172 172 os.unlink(back)
173 173 os.unlink(b)
174 174 os.unlink(c)
175 175 return 0
176 176 util.copyfile(back, a) # restore from backup and try again
177 177
178 178 env = dict(HG_FILE=fd,
179 179 HG_MY_NODE=str(wctx.parents()[0]),
180 180 HG_OTHER_NODE=str(mctx),
181 181 HG_MY_ISLINK=fcm.islink(),
182 182 HG_OTHER_ISLINK=fco.islink(),
183 183 HG_BASE_ISLINK=fca.islink())
184 184
185 185 if tool == "internal:merge":
186 186 r = simplemerge.simplemerge(a, b, c, label=['local', 'other'])
187 187 else:
188 188 args = _toolstr(ui, tool, "args", '$local $base $other')
189 189 if "$output" in args:
190 190 out, a = a, back # read input from backup, write to original
191 191 replace = dict(local=a, base=b, other=c, output=out)
192 192 args = re.sub("\$(local|base|other|output)",
193 193 lambda x: '"%s"' % replace[x.group()[1:]], args)
194 194 r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env)
195 195
196 196 if not r and _toolbool(ui, tool, "checkconflicts"):
197 197 if re.match("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcm.data()):
198 198 r = 1
199 199
200 200 if not r and _toolbool(ui, tool, "checkchanged"):
201 201 if filecmp.cmp(repo.wjoin(fd), back):
202 202 if ui.prompt(_(" output file %s appears unchanged\n"
203 203 "was merge successful (yn)?") % fd,
204 204 _("[yn]"), _("n")) != _("y"):
205 205 r = 1
206 206
207 207 if _toolbool(ui, tool, "fixeol"):
208 208 _matcheol(repo.wjoin(fd), back)
209 209
210 210 if r:
211 211 repo.ui.warn(_("merging %s failed!\n") % fd)
212 212 else:
213 213 os.unlink(back)
214 214
215 215 os.unlink(b)
216 216 os.unlink(c)
217 217 return r
@@ -1,221 +1,220 b''
1 1 #
2 2 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 import cStringIO, zlib, bz2, tempfile, errno, os, sys
8 import cStringIO, zlib, tempfile, errno, os, sys
9 9 from mercurial import util, streamclone
10 from mercurial.i18n import gettext as _
11 10 from mercurial.node import bin, hex
12 11 from mercurial import changegroup as changegroupmod
13 12 from common import HTTP_OK, HTTP_NOT_FOUND, HTTP_SERVER_ERROR
14 13
15 14 # __all__ is populated with the allowed commands. Be sure to add to it if
16 15 # you're adding a new command, or the new command won't work.
17 16
18 17 __all__ = [
19 18 'lookup', 'heads', 'branches', 'between', 'changegroup',
20 19 'changegroupsubset', 'capabilities', 'unbundle', 'stream_out',
21 20 ]
22 21
23 22 HGTYPE = 'application/mercurial-0.1'
24 23
25 24 def lookup(web, req):
26 25 try:
27 26 r = hex(web.repo.lookup(req.form['key'][0]))
28 27 success = 1
29 28 except Exception,inst:
30 29 r = str(inst)
31 30 success = 0
32 31 resp = "%s %s\n" % (success, r)
33 32 req.respond(HTTP_OK, HGTYPE, length=len(resp))
34 33 req.write(resp)
35 34
36 35 def heads(web, req):
37 36 resp = " ".join(map(hex, web.repo.heads())) + "\n"
38 37 req.respond(HTTP_OK, HGTYPE, length=len(resp))
39 38 req.write(resp)
40 39
41 40 def branches(web, req):
42 41 nodes = []
43 42 if 'nodes' in req.form:
44 43 nodes = map(bin, req.form['nodes'][0].split(" "))
45 44 resp = cStringIO.StringIO()
46 45 for b in web.repo.branches(nodes):
47 46 resp.write(" ".join(map(hex, b)) + "\n")
48 47 resp = resp.getvalue()
49 48 req.respond(HTTP_OK, HGTYPE, length=len(resp))
50 49 req.write(resp)
51 50
52 51 def between(web, req):
53 52 if 'pairs' in req.form:
54 53 pairs = [map(bin, p.split("-"))
55 54 for p in req.form['pairs'][0].split(" ")]
56 55 resp = cStringIO.StringIO()
57 56 for b in web.repo.between(pairs):
58 57 resp.write(" ".join(map(hex, b)) + "\n")
59 58 resp = resp.getvalue()
60 59 req.respond(HTTP_OK, HGTYPE, length=len(resp))
61 60 req.write(resp)
62 61
63 62 def changegroup(web, req):
64 63 req.respond(HTTP_OK, HGTYPE)
65 64 nodes = []
66 65 if not web.allowpull:
67 66 return
68 67
69 68 if 'roots' in req.form:
70 69 nodes = map(bin, req.form['roots'][0].split(" "))
71 70
72 71 z = zlib.compressobj()
73 72 f = web.repo.changegroup(nodes, 'serve')
74 73 while 1:
75 74 chunk = f.read(4096)
76 75 if not chunk:
77 76 break
78 77 req.write(z.compress(chunk))
79 78
80 79 req.write(z.flush())
81 80
82 81 def changegroupsubset(web, req):
83 82 req.respond(HTTP_OK, HGTYPE)
84 83 bases = []
85 84 heads = []
86 85 if not web.allowpull:
87 86 return
88 87
89 88 if 'bases' in req.form:
90 89 bases = [bin(x) for x in req.form['bases'][0].split(' ')]
91 90 if 'heads' in req.form:
92 91 heads = [bin(x) for x in req.form['heads'][0].split(' ')]
93 92
94 93 z = zlib.compressobj()
95 94 f = web.repo.changegroupsubset(bases, heads, 'serve')
96 95 while 1:
97 96 chunk = f.read(4096)
98 97 if not chunk:
99 98 break
100 99 req.write(z.compress(chunk))
101 100
102 101 req.write(z.flush())
103 102
104 103 def capabilities(web, req):
105 104 resp = ' '.join(web.capabilities())
106 105 req.respond(HTTP_OK, HGTYPE, length=len(resp))
107 106 req.write(resp)
108 107
109 108 def unbundle(web, req):
110 109 def bail(response, headers={}):
111 110 length = int(req.env['CONTENT_LENGTH'])
112 111 for s in util.filechunkiter(req, limit=length):
113 112 # drain incoming bundle, else client will not see
114 113 # response when run outside cgi script
115 114 pass
116 115 req.header(headers.items())
117 116 req.respond(HTTP_OK, HGTYPE)
118 117 req.write('0\n')
119 118 req.write(response)
120 119
121 120 # require ssl by default, auth info cannot be sniffed and
122 121 # replayed
123 122 ssl_req = web.configbool('web', 'push_ssl', True)
124 123 if ssl_req:
125 124 if req.env.get('wsgi.url_scheme') != 'https':
126 125 bail('ssl required\n')
127 126 return
128 127 proto = 'https'
129 128 else:
130 129 proto = 'http'
131 130
132 131 # do not allow push unless explicitly allowed
133 132 if not web.check_perm(req, 'push', False):
134 133 bail('push not authorized\n',
135 134 headers={'status': '401 Unauthorized'})
136 135 return
137 136
138 137 their_heads = req.form['heads'][0].split(' ')
139 138
140 139 def check_heads():
141 140 heads = map(hex, web.repo.heads())
142 141 return their_heads == [hex('force')] or their_heads == heads
143 142
144 143 # fail early if possible
145 144 if not check_heads():
146 145 bail('unsynced changes\n')
147 146 return
148 147
149 148 req.respond(HTTP_OK, HGTYPE)
150 149
151 150 # do not lock repo until all changegroup data is
152 151 # streamed. save to temporary file.
153 152
154 153 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
155 154 fp = os.fdopen(fd, 'wb+')
156 155 try:
157 156 length = int(req.env['CONTENT_LENGTH'])
158 157 for s in util.filechunkiter(req, limit=length):
159 158 fp.write(s)
160 159
161 160 try:
162 161 lock = web.repo.lock()
163 162 try:
164 163 if not check_heads():
165 164 req.write('0\n')
166 165 req.write('unsynced changes\n')
167 166 return
168 167
169 168 fp.seek(0)
170 169 header = fp.read(6)
171 170 if header.startswith('HG') and not header.startswith('HG10'):
172 171 raise ValueError('unknown bundle version')
173 172 elif header not in changegroupmod.bundletypes:
174 173 raise ValueError('unknown bundle compression type')
175 174 gen = changegroupmod.unbundle(header, fp)
176 175
177 176 # send addchangegroup output to client
178 177
179 178 old_stdout = sys.stdout
180 179 sys.stdout = cStringIO.StringIO()
181 180
182 181 try:
183 182 url = 'remote:%s:%s' % (proto,
184 183 req.env.get('REMOTE_HOST', ''))
185 184 try:
186 185 ret = web.repo.addchangegroup(gen, 'serve', url)
187 186 except util.Abort, inst:
188 187 sys.stdout.write("abort: %s\n" % inst)
189 188 ret = 0
190 189 finally:
191 190 val = sys.stdout.getvalue()
192 191 sys.stdout = old_stdout
193 192 req.write('%d\n' % ret)
194 193 req.write(val)
195 194 finally:
196 195 del lock
197 196 except ValueError, inst:
198 197 req.write('0\n')
199 198 req.write(str(inst) + '\n')
200 199 except (OSError, IOError), inst:
201 200 req.write('0\n')
202 201 filename = getattr(inst, 'filename', '')
203 202 # Don't send our filesystem layout to the client
204 203 if filename.startswith(web.repo.root):
205 204 filename = filename[len(web.repo.root)+1:]
206 205 else:
207 206 filename = ''
208 207 error = getattr(inst, 'strerror', 'Unknown error')
209 208 if inst.errno == errno.ENOENT:
210 209 code = HTTP_NOT_FOUND
211 210 else:
212 211 code = HTTP_SERVER_ERROR
213 212 req.respond(code)
214 213 req.write('%s: %s\n' % (error, filename))
215 214 finally:
216 215 fp.close()
217 216 os.unlink(tempname)
218 217
219 218 def stream_out(web, req):
220 219 req.respond(HTTP_OK, HGTYPE)
221 220 streamclone.stream_out(web.repo, req, untrusted=True)
@@ -1,102 +1,101 b''
1 1 # hgweb/request.py - An http request from either CGI or the standalone server.
2 2 #
3 3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 import socket, cgi, errno
10 from mercurial.i18n import gettext as _
11 10 from common import ErrorResponse, statusmessage
12 11
13 12 class wsgirequest(object):
14 13 def __init__(self, wsgienv, start_response):
15 14 version = wsgienv['wsgi.version']
16 15 if (version < (1, 0)) or (version >= (2, 0)):
17 16 raise RuntimeError("Unknown and unsupported WSGI version %d.%d"
18 17 % version)
19 18 self.inp = wsgienv['wsgi.input']
20 19 self.err = wsgienv['wsgi.errors']
21 20 self.threaded = wsgienv['wsgi.multithread']
22 21 self.multiprocess = wsgienv['wsgi.multiprocess']
23 22 self.run_once = wsgienv['wsgi.run_once']
24 23 self.env = wsgienv
25 24 self.form = cgi.parse(self.inp, self.env, keep_blank_values=1)
26 25 self._start_response = start_response
27 26 self.server_write = None
28 27 self.headers = []
29 28
30 29 def __iter__(self):
31 30 return iter([])
32 31
33 32 def read(self, count=-1):
34 33 return self.inp.read(count)
35 34
36 35 def respond(self, status, type=None, filename=None, length=0):
37 36 if self._start_response is not None:
38 37
39 38 self.httphdr(type, filename, length)
40 39 if not self.headers:
41 40 raise RuntimeError("request.write called before headers sent")
42 41
43 42 for k, v in self.headers:
44 43 if not isinstance(v, str):
45 44 raise TypeError('header value must be string: %r' % v)
46 45
47 46 if isinstance(status, ErrorResponse):
48 47 status = statusmessage(status.code)
49 48 elif status == 200:
50 49 status = '200 Script output follows'
51 50 elif isinstance(status, int):
52 51 status = statusmessage(status)
53 52
54 53 self.server_write = self._start_response(status, self.headers)
55 54 self._start_response = None
56 55 self.headers = []
57 56
58 57 def write(self, thing):
59 58 if hasattr(thing, "__iter__"):
60 59 for part in thing:
61 60 self.write(part)
62 61 else:
63 62 thing = str(thing)
64 63 try:
65 64 self.server_write(thing)
66 65 except socket.error, inst:
67 66 if inst[0] != errno.ECONNRESET:
68 67 raise
69 68
70 69 def writelines(self, lines):
71 70 for line in lines:
72 71 self.write(line)
73 72
74 73 def flush(self):
75 74 return None
76 75
77 76 def close(self):
78 77 return None
79 78
80 79 def header(self, headers=[('Content-Type','text/html')]):
81 80 self.headers.extend(headers)
82 81
83 82 def httphdr(self, type=None, filename=None, length=0, headers={}):
84 83 headers = headers.items()
85 84 if type is not None:
86 85 headers.append(('Content-Type', type))
87 86 if filename:
88 87 filename = (filename.split('/')[-1]
89 88 .replace('\\', '\\\\').replace('"', '\\"'))
90 89 headers.append(('Content-Disposition',
91 90 'inline; filename="%s"' % filename))
92 91 if length:
93 92 headers.append(('Content-Length', str(length)))
94 93 self.header(headers)
95 94
96 95 def wsgiapplication(app_maker):
97 96 '''For compatibility with old CGI scripts. A plain hgweb() or hgwebdir()
98 97 can and should now be used as a WSGI application.'''
99 98 application = app_maker()
100 99 def run_wsgi(env, respond):
101 100 return application(env, respond)
102 101 return run_wsgi
@@ -1,294 +1,294 b''
1 1 # hgweb/server.py - The standalone hg web server.
2 2 #
3 3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 import os, sys, errno, urllib, BaseHTTPServer, socket, SocketServer, traceback
10 from mercurial import ui, hg, util, templater
10 from mercurial import hg, util
11 11 from hgweb_mod import hgweb
12 12 from hgwebdir_mod import hgwebdir
13 13 from mercurial.i18n import gettext as _
14 14
15 15 def _splitURI(uri):
16 16 """ Return path and query splited from uri
17 17
18 18 Just like CGI environment, the path is unquoted, the query is
19 19 not.
20 20 """
21 21 if '?' in uri:
22 22 path, query = uri.split('?', 1)
23 23 else:
24 24 path, query = uri, ''
25 25 return urllib.unquote(path), query
26 26
27 27 class _error_logger(object):
28 28 def __init__(self, handler):
29 29 self.handler = handler
30 30 def flush(self):
31 31 pass
32 32 def write(self, str):
33 33 self.writelines(str.split('\n'))
34 34 def writelines(self, seq):
35 35 for msg in seq:
36 36 self.handler.log_error("HG error: %s", msg)
37 37
38 38 class _hgwebhandler(object, BaseHTTPServer.BaseHTTPRequestHandler):
39 39
40 40 url_scheme = 'http'
41 41
42 42 def __init__(self, *args, **kargs):
43 43 self.protocol_version = 'HTTP/1.1'
44 44 BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kargs)
45 45
46 46 def _log_any(self, fp, format, *args):
47 47 fp.write("%s - - [%s] %s\n" % (self.client_address[0],
48 48 self.log_date_time_string(),
49 49 format % args))
50 50 fp.flush()
51 51
52 52 def log_error(self, format, *args):
53 53 self._log_any(self.server.errorlog, format, *args)
54 54
55 55 def log_message(self, format, *args):
56 56 self._log_any(self.server.accesslog, format, *args)
57 57
58 58 def do_write(self):
59 59 try:
60 60 self.do_hgweb()
61 61 except socket.error, inst:
62 62 if inst[0] != errno.EPIPE:
63 63 raise
64 64
65 65 def do_POST(self):
66 66 try:
67 67 self.do_write()
68 68 except StandardError, inst:
69 69 self._start_response("500 Internal Server Error", [])
70 70 self._write("Internal Server Error")
71 71 tb = "".join(traceback.format_exception(*sys.exc_info()))
72 72 self.log_error("Exception happened during processing request '%s':\n%s",
73 73 self.path, tb)
74 74
75 75 def do_GET(self):
76 76 self.do_POST()
77 77
78 78 def do_hgweb(self):
79 79 path, query = _splitURI(self.path)
80 80
81 81 env = {}
82 82 env['GATEWAY_INTERFACE'] = 'CGI/1.1'
83 83 env['REQUEST_METHOD'] = self.command
84 84 env['SERVER_NAME'] = self.server.server_name
85 85 env['SERVER_PORT'] = str(self.server.server_port)
86 86 env['REQUEST_URI'] = self.path
87 87 env['SCRIPT_NAME'] = self.server.prefix
88 88 env['PATH_INFO'] = path[len(self.server.prefix):]
89 89 env['REMOTE_HOST'] = self.client_address[0]
90 90 env['REMOTE_ADDR'] = self.client_address[0]
91 91 if query:
92 92 env['QUERY_STRING'] = query
93 93
94 94 if self.headers.typeheader is None:
95 95 env['CONTENT_TYPE'] = self.headers.type
96 96 else:
97 97 env['CONTENT_TYPE'] = self.headers.typeheader
98 98 length = self.headers.getheader('content-length')
99 99 if length:
100 100 env['CONTENT_LENGTH'] = length
101 101 for header in [h for h in self.headers.keys()
102 102 if h not in ('content-type', 'content-length')]:
103 103 hkey = 'HTTP_' + header.replace('-', '_').upper()
104 104 hval = self.headers.getheader(header)
105 105 hval = hval.replace('\n', '').strip()
106 106 if hval:
107 107 env[hkey] = hval
108 108 env['SERVER_PROTOCOL'] = self.request_version
109 109 env['wsgi.version'] = (1, 0)
110 110 env['wsgi.url_scheme'] = self.url_scheme
111 111 env['wsgi.input'] = self.rfile
112 112 env['wsgi.errors'] = _error_logger(self)
113 113 env['wsgi.multithread'] = isinstance(self.server,
114 114 SocketServer.ThreadingMixIn)
115 115 env['wsgi.multiprocess'] = isinstance(self.server,
116 116 SocketServer.ForkingMixIn)
117 117 env['wsgi.run_once'] = 0
118 118
119 119 self.close_connection = True
120 120 self.saved_status = None
121 121 self.saved_headers = []
122 122 self.sent_headers = False
123 123 self.length = None
124 124 self.server.application(env, self._start_response)
125 125
126 126 def send_headers(self):
127 127 if not self.saved_status:
128 128 raise AssertionError("Sending headers before start_response() called")
129 129 saved_status = self.saved_status.split(None, 1)
130 130 saved_status[0] = int(saved_status[0])
131 131 self.send_response(*saved_status)
132 132 should_close = True
133 133 for h in self.saved_headers:
134 134 self.send_header(*h)
135 135 if h[0].lower() == 'content-length':
136 136 should_close = False
137 137 self.length = int(h[1])
138 138 # The value of the Connection header is a list of case-insensitive
139 139 # tokens separated by commas and optional whitespace.
140 140 if 'close' in [token.strip().lower() for token in
141 141 self.headers.get('connection', '').split(',')]:
142 142 should_close = True
143 143 if should_close:
144 144 self.send_header('Connection', 'close')
145 145 self.close_connection = should_close
146 146 self.end_headers()
147 147 self.sent_headers = True
148 148
149 149 def _start_response(self, http_status, headers, exc_info=None):
150 150 code, msg = http_status.split(None, 1)
151 151 code = int(code)
152 152 self.saved_status = http_status
153 153 bad_headers = ('connection', 'transfer-encoding')
154 154 self.saved_headers = [h for h in headers
155 155 if h[0].lower() not in bad_headers]
156 156 return self._write
157 157
158 158 def _write(self, data):
159 159 if not self.saved_status:
160 160 raise AssertionError("data written before start_response() called")
161 161 elif not self.sent_headers:
162 162 self.send_headers()
163 163 if self.length is not None:
164 164 if len(data) > self.length:
165 165 raise AssertionError("Content-length header sent, but more bytes than specified are being written.")
166 166 self.length = self.length - len(data)
167 167 self.wfile.write(data)
168 168 self.wfile.flush()
169 169
170 170 class _shgwebhandler(_hgwebhandler):
171 171
172 172 url_scheme = 'https'
173 173
174 174 def setup(self):
175 175 self.connection = self.request
176 176 self.rfile = socket._fileobject(self.request, "rb", self.rbufsize)
177 177 self.wfile = socket._fileobject(self.request, "wb", self.wbufsize)
178 178
179 179 def do_write(self):
180 180 from OpenSSL.SSL import SysCallError
181 181 try:
182 182 super(_shgwebhandler, self).do_write()
183 183 except SysCallError, inst:
184 184 if inst.args[0] != errno.EPIPE:
185 185 raise
186 186
187 187 def handle_one_request(self):
188 188 from OpenSSL.SSL import SysCallError, ZeroReturnError
189 189 try:
190 190 super(_shgwebhandler, self).handle_one_request()
191 191 except (SysCallError, ZeroReturnError):
192 192 self.close_connection = True
193 193 pass
194 194
195 195 def create_server(ui, repo):
196 196 use_threads = True
197 197
198 198 def openlog(opt, default):
199 199 if opt and opt != '-':
200 200 return open(opt, 'a')
201 201 return default
202 202
203 203 if repo is None:
204 204 myui = ui
205 205 else:
206 206 myui = repo.ui
207 207 address = myui.config("web", "address", "")
208 208 port = int(myui.config("web", "port", 8000))
209 209 prefix = myui.config("web", "prefix", "")
210 210 if prefix:
211 211 prefix = "/" + prefix.strip("/")
212 212 use_ipv6 = myui.configbool("web", "ipv6")
213 213 webdir_conf = myui.config("web", "webdir_conf")
214 214 ssl_cert = myui.config("web", "certificate")
215 215 accesslog = openlog(myui.config("web", "accesslog", "-"), sys.stdout)
216 216 errorlog = openlog(myui.config("web", "errorlog", "-"), sys.stderr)
217 217
218 218 if use_threads:
219 219 try:
220 220 from threading import activeCount
221 221 except ImportError:
222 222 use_threads = False
223 223
224 224 if use_threads:
225 225 _mixin = SocketServer.ThreadingMixIn
226 226 else:
227 227 if hasattr(os, "fork"):
228 228 _mixin = SocketServer.ForkingMixIn
229 229 else:
230 230 class _mixin:
231 231 pass
232 232
233 233 class MercurialHTTPServer(object, _mixin, BaseHTTPServer.HTTPServer):
234 234
235 235 # SO_REUSEADDR has broken semantics on windows
236 236 if os.name == 'nt':
237 237 allow_reuse_address = 0
238 238
239 239 def __init__(self, *args, **kargs):
240 240 BaseHTTPServer.HTTPServer.__init__(self, *args, **kargs)
241 241 self.accesslog = accesslog
242 242 self.errorlog = errorlog
243 243 self.daemon_threads = True
244 244 def make_handler():
245 245 if webdir_conf:
246 246 hgwebobj = hgwebdir(webdir_conf, ui)
247 247 elif repo is not None:
248 248 hgwebobj = hgweb(hg.repository(repo.ui, repo.root))
249 249 else:
250 250 raise hg.RepoError(_("There is no Mercurial repository here"
251 251 " (.hg not found)"))
252 252 return hgwebobj
253 253 self.application = make_handler()
254 254
255 255 addr = address
256 256 if addr in ('', '::'):
257 257 addr = socket.gethostname()
258 258
259 259 self.addr, self.port = addr, port
260 260 self.prefix = prefix
261 261
262 262 if ssl_cert:
263 263 try:
264 264 from OpenSSL import SSL
265 265 ctx = SSL.Context(SSL.SSLv23_METHOD)
266 266 except ImportError:
267 267 raise util.Abort("SSL support is unavailable")
268 268 ctx.use_privatekey_file(ssl_cert)
269 269 ctx.use_certificate_file(ssl_cert)
270 270 sock = socket.socket(self.address_family, self.socket_type)
271 271 self.socket = SSL.Connection(ctx, sock)
272 272 self.server_bind()
273 273 self.server_activate()
274 274
275 275 class IPv6HTTPServer(MercurialHTTPServer):
276 276 address_family = getattr(socket, 'AF_INET6', None)
277 277
278 278 def __init__(self, *args, **kwargs):
279 279 if self.address_family is None:
280 280 raise hg.RepoError(_('IPv6 not available on this system'))
281 281 super(IPv6HTTPServer, self).__init__(*args, **kwargs)
282 282
283 283 if ssl_cert:
284 284 handler = _shgwebhandler
285 285 else:
286 286 handler = _hgwebhandler
287 287
288 288 try:
289 289 if use_ipv6:
290 290 return IPv6HTTPServer((address, port), handler)
291 291 else:
292 292 return MercurialHTTPServer((address, port), handler)
293 293 except socket.error, inst:
294 294 raise util.Abort(_('cannot start server: %s') % inst.args[1])
@@ -1,458 +1,458 b''
1 1 # httprepo.py - HTTP repository proxy classes for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 from node import bin, hex
10 10 from remoterepo import remoterepository
11 11 from i18n import _
12 12 import repo, os, urllib, urllib2, urlparse, zlib, util, httplib
13 import errno, keepalive, tempfile, socket, changegroup
13 import errno, keepalive, socket, changegroup
14 14
15 15 class passwordmgr(urllib2.HTTPPasswordMgrWithDefaultRealm):
16 16 def __init__(self, ui):
17 17 urllib2.HTTPPasswordMgrWithDefaultRealm.__init__(self)
18 18 self.ui = ui
19 19
20 20 def find_user_password(self, realm, authuri):
21 21 authinfo = urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password(
22 22 self, realm, authuri)
23 23 user, passwd = authinfo
24 24 if user and passwd:
25 25 return (user, passwd)
26 26
27 27 if not self.ui.interactive:
28 28 raise util.Abort(_('http authorization required'))
29 29
30 30 self.ui.write(_("http authorization required\n"))
31 31 self.ui.status(_("realm: %s\n") % realm)
32 32 if user:
33 33 self.ui.status(_("user: %s\n") % user)
34 34 else:
35 35 user = self.ui.prompt(_("user:"), default=None)
36 36
37 37 if not passwd:
38 38 passwd = self.ui.getpass()
39 39
40 40 self.add_password(realm, authuri, user, passwd)
41 41 return (user, passwd)
42 42
43 43 def netlocsplit(netloc):
44 44 '''split [user[:passwd]@]host[:port] into 4-tuple.'''
45 45
46 46 a = netloc.find('@')
47 47 if a == -1:
48 48 user, passwd = None, None
49 49 else:
50 50 userpass, netloc = netloc[:a], netloc[a+1:]
51 51 c = userpass.find(':')
52 52 if c == -1:
53 53 user, passwd = urllib.unquote(userpass), None
54 54 else:
55 55 user = urllib.unquote(userpass[:c])
56 56 passwd = urllib.unquote(userpass[c+1:])
57 57 c = netloc.find(':')
58 58 if c == -1:
59 59 host, port = netloc, None
60 60 else:
61 61 host, port = netloc[:c], netloc[c+1:]
62 62 return host, port, user, passwd
63 63
64 64 def netlocunsplit(host, port, user=None, passwd=None):
65 65 '''turn host, port, user, passwd into [user[:passwd]@]host[:port].'''
66 66 if port:
67 67 hostport = host + ':' + port
68 68 else:
69 69 hostport = host
70 70 if user:
71 71 if passwd:
72 72 userpass = urllib.quote(user) + ':' + urllib.quote(passwd)
73 73 else:
74 74 userpass = urllib.quote(user)
75 75 return userpass + '@' + hostport
76 76 return hostport
77 77
78 78 # work around a bug in Python < 2.4.2
79 79 # (it leaves a "\n" at the end of Proxy-authorization headers)
80 80 class request(urllib2.Request):
81 81 def add_header(self, key, val):
82 82 if key.lower() == 'proxy-authorization':
83 83 val = val.strip()
84 84 return urllib2.Request.add_header(self, key, val)
85 85
86 86 class httpsendfile(file):
87 87 def __len__(self):
88 88 return os.fstat(self.fileno()).st_size
89 89
90 90 def _gen_sendfile(connection):
91 91 def _sendfile(self, data):
92 92 # send a file
93 93 if isinstance(data, httpsendfile):
94 94 # if auth required, some data sent twice, so rewind here
95 95 data.seek(0)
96 96 for chunk in util.filechunkiter(data):
97 97 connection.send(self, chunk)
98 98 else:
99 99 connection.send(self, data)
100 100 return _sendfile
101 101
102 102 class httpconnection(keepalive.HTTPConnection):
103 103 # must be able to send big bundle as stream.
104 104 send = _gen_sendfile(keepalive.HTTPConnection)
105 105
106 106 class httphandler(keepalive.HTTPHandler):
107 107 def http_open(self, req):
108 108 return self.do_open(httpconnection, req)
109 109
110 110 def __del__(self):
111 111 self.close_all()
112 112
113 113 has_https = hasattr(urllib2, 'HTTPSHandler')
114 114 if has_https:
115 115 class httpsconnection(httplib.HTTPSConnection):
116 116 response_class = keepalive.HTTPResponse
117 117 # must be able to send big bundle as stream.
118 118 send = _gen_sendfile(httplib.HTTPSConnection)
119 119
120 120 class httpshandler(keepalive.KeepAliveHandler, urllib2.HTTPSHandler):
121 121 def https_open(self, req):
122 122 return self.do_open(httpsconnection, req)
123 123
124 124 # In python < 2.5 AbstractDigestAuthHandler raises a ValueError if
125 125 # it doesn't know about the auth type requested. This can happen if
126 126 # somebody is using BasicAuth and types a bad password.
127 127 class httpdigestauthhandler(urllib2.HTTPDigestAuthHandler):
128 128 def http_error_auth_reqed(self, auth_header, host, req, headers):
129 129 try:
130 130 return urllib2.HTTPDigestAuthHandler.http_error_auth_reqed(
131 131 self, auth_header, host, req, headers)
132 132 except ValueError, inst:
133 133 arg = inst.args[0]
134 134 if arg.startswith("AbstractDigestAuthHandler doesn't know "):
135 135 return
136 136 raise
137 137
138 138 def zgenerator(f):
139 139 zd = zlib.decompressobj()
140 140 try:
141 141 for chunk in util.filechunkiter(f):
142 142 yield zd.decompress(chunk)
143 143 except httplib.HTTPException, inst:
144 144 raise IOError(None, _('connection ended unexpectedly'))
145 145 yield zd.flush()
146 146
147 147 _safe = ('abcdefghijklmnopqrstuvwxyz'
148 148 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
149 149 '0123456789' '_.-/')
150 150 _safeset = None
151 151 _hex = None
152 152 def quotepath(path):
153 153 '''quote the path part of a URL
154 154
155 155 This is similar to urllib.quote, but it also tries to avoid
156 156 quoting things twice (inspired by wget):
157 157
158 158 >>> quotepath('abc def')
159 159 'abc%20def'
160 160 >>> quotepath('abc%20def')
161 161 'abc%20def'
162 162 >>> quotepath('abc%20 def')
163 163 'abc%20%20def'
164 164 >>> quotepath('abc def%20')
165 165 'abc%20def%20'
166 166 >>> quotepath('abc def%2')
167 167 'abc%20def%252'
168 168 >>> quotepath('abc def%')
169 169 'abc%20def%25'
170 170 '''
171 171 global _safeset, _hex
172 172 if _safeset is None:
173 173 _safeset = util.set(_safe)
174 174 _hex = util.set('abcdefABCDEF0123456789')
175 175 l = list(path)
176 176 for i in xrange(len(l)):
177 177 c = l[i]
178 178 if c == '%' and i + 2 < len(l) and (l[i+1] in _hex and l[i+2] in _hex):
179 179 pass
180 180 elif c not in _safeset:
181 181 l[i] = '%%%02X' % ord(c)
182 182 return ''.join(l)
183 183
184 184 class httprepository(remoterepository):
185 185 def __init__(self, ui, path):
186 186 self.path = path
187 187 self.caps = None
188 188 self.handler = None
189 189 scheme, netloc, urlpath, query, frag = urlparse.urlsplit(path)
190 190 if query or frag:
191 191 raise util.Abort(_('unsupported URL component: "%s"') %
192 192 (query or frag))
193 193 if not urlpath:
194 194 urlpath = '/'
195 195 urlpath = quotepath(urlpath)
196 196 host, port, user, passwd = netlocsplit(netloc)
197 197
198 198 # urllib cannot handle URLs with embedded user or passwd
199 199 self._url = urlparse.urlunsplit((scheme, netlocunsplit(host, port),
200 200 urlpath, '', ''))
201 201 self.ui = ui
202 202 self.ui.debug(_('using %s\n') % self._url)
203 203
204 204 proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy')
205 205 # XXX proxyauthinfo = None
206 206 handlers = [httphandler()]
207 207 if has_https:
208 208 handlers.append(httpshandler())
209 209
210 210 if proxyurl:
211 211 # proxy can be proper url or host[:port]
212 212 if not (proxyurl.startswith('http:') or
213 213 proxyurl.startswith('https:')):
214 214 proxyurl = 'http://' + proxyurl + '/'
215 215 snpqf = urlparse.urlsplit(proxyurl)
216 216 proxyscheme, proxynetloc, proxypath, proxyquery, proxyfrag = snpqf
217 217 hpup = netlocsplit(proxynetloc)
218 218
219 219 proxyhost, proxyport, proxyuser, proxypasswd = hpup
220 220 if not proxyuser:
221 221 proxyuser = ui.config("http_proxy", "user")
222 222 proxypasswd = ui.config("http_proxy", "passwd")
223 223
224 224 # see if we should use a proxy for this url
225 225 no_list = [ "localhost", "127.0.0.1" ]
226 226 no_list.extend([p.lower() for
227 227 p in ui.configlist("http_proxy", "no")])
228 228 no_list.extend([p.strip().lower() for
229 229 p in os.getenv("no_proxy", '').split(',')
230 230 if p.strip()])
231 231 # "http_proxy.always" config is for running tests on localhost
232 232 if (not ui.configbool("http_proxy", "always") and
233 233 host.lower() in no_list):
234 234 # avoid auto-detection of proxy settings by appending
235 235 # a ProxyHandler with no proxies defined.
236 236 handlers.append(urllib2.ProxyHandler({}))
237 237 ui.debug(_('disabling proxy for %s\n') % host)
238 238 else:
239 239 proxyurl = urlparse.urlunsplit((
240 240 proxyscheme, netlocunsplit(proxyhost, proxyport,
241 241 proxyuser, proxypasswd or ''),
242 242 proxypath, proxyquery, proxyfrag))
243 243 handlers.append(urllib2.ProxyHandler({scheme: proxyurl}))
244 244 ui.debug(_('proxying through http://%s:%s\n') %
245 245 (proxyhost, proxyport))
246 246
247 247 # urllib2 takes proxy values from the environment and those
248 248 # will take precedence if found, so drop them
249 249 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
250 250 try:
251 251 if env in os.environ:
252 252 del os.environ[env]
253 253 except OSError:
254 254 pass
255 255
256 256 passmgr = passwordmgr(ui)
257 257 if user:
258 258 ui.debug(_('http auth: user %s, password %s\n') %
259 259 (user, passwd and '*' * len(passwd) or 'not set'))
260 260 netloc = host
261 261 if port:
262 262 netloc += ':' + port
263 263 # Python < 2.4.3 uses only the netloc to search for a password
264 264 passmgr.add_password(None, (self._url, netloc), user, passwd or '')
265 265
266 266 handlers.extend((urllib2.HTTPBasicAuthHandler(passmgr),
267 267 httpdigestauthhandler(passmgr)))
268 268 opener = urllib2.build_opener(*handlers)
269 269
270 270 # 1.0 here is the _protocol_ version
271 271 opener.addheaders = [('User-agent', 'mercurial/proto-1.0')]
272 272 urllib2.install_opener(opener)
273 273
274 274 def url(self):
275 275 return self.path
276 276
277 277 # look up capabilities only when needed
278 278
279 279 def get_caps(self):
280 280 if self.caps is None:
281 281 try:
282 282 self.caps = util.set(self.do_read('capabilities').split())
283 283 except repo.RepoError:
284 284 self.caps = util.set()
285 285 self.ui.debug(_('capabilities: %s\n') %
286 286 (' '.join(self.caps or ['none'])))
287 287 return self.caps
288 288
289 289 capabilities = property(get_caps)
290 290
291 291 def lock(self):
292 292 raise util.Abort(_('operation not supported over http'))
293 293
294 294 def do_cmd(self, cmd, **args):
295 295 data = args.pop('data', None)
296 296 headers = args.pop('headers', {})
297 297 self.ui.debug(_("sending %s command\n") % cmd)
298 298 q = {"cmd": cmd}
299 299 q.update(args)
300 300 qs = '?%s' % urllib.urlencode(q)
301 301 cu = "%s%s" % (self._url, qs)
302 302 try:
303 303 if data:
304 304 self.ui.debug(_("sending %s bytes\n") % len(data))
305 305 resp = urllib2.urlopen(request(cu, data, headers))
306 306 except urllib2.HTTPError, inst:
307 307 if inst.code == 401:
308 308 raise util.Abort(_('authorization failed'))
309 309 raise
310 310 except httplib.HTTPException, inst:
311 311 self.ui.debug(_('http error while sending %s command\n') % cmd)
312 312 self.ui.print_exc()
313 313 raise IOError(None, inst)
314 314 except IndexError:
315 315 # this only happens with Python 2.3, later versions raise URLError
316 316 raise util.Abort(_('http error, possibly caused by proxy setting'))
317 317 # record the url we got redirected to
318 318 resp_url = resp.geturl()
319 319 if resp_url.endswith(qs):
320 320 resp_url = resp_url[:-len(qs)]
321 321 if self._url != resp_url:
322 322 self.ui.status(_('real URL is %s\n') % resp_url)
323 323 self._url = resp_url
324 324 try:
325 325 proto = resp.getheader('content-type')
326 326 except AttributeError:
327 327 proto = resp.headers['content-type']
328 328
329 329 # accept old "text/plain" and "application/hg-changegroup" for now
330 330 if not (proto.startswith('application/mercurial-') or
331 331 proto.startswith('text/plain') or
332 332 proto.startswith('application/hg-changegroup')):
333 333 self.ui.debug(_("Requested URL: '%s'\n") % cu)
334 334 raise repo.RepoError(_("'%s' does not appear to be an hg repository")
335 335 % self._url)
336 336
337 337 if proto.startswith('application/mercurial-'):
338 338 try:
339 339 version = proto.split('-', 1)[1]
340 340 version_info = tuple([int(n) for n in version.split('.')])
341 341 except ValueError:
342 342 raise repo.RepoError(_("'%s' sent a broken Content-Type "
343 343 "header (%s)") % (self._url, proto))
344 344 if version_info > (0, 1):
345 345 raise repo.RepoError(_("'%s' uses newer protocol %s") %
346 346 (self._url, version))
347 347
348 348 return resp
349 349
350 350 def do_read(self, cmd, **args):
351 351 fp = self.do_cmd(cmd, **args)
352 352 try:
353 353 return fp.read()
354 354 finally:
355 355 # if using keepalive, allow connection to be reused
356 356 fp.close()
357 357
358 358 def lookup(self, key):
359 359 self.requirecap('lookup', _('look up remote revision'))
360 360 d = self.do_cmd("lookup", key = key).read()
361 361 success, data = d[:-1].split(' ', 1)
362 362 if int(success):
363 363 return bin(data)
364 364 raise repo.RepoError(data)
365 365
366 366 def heads(self):
367 367 d = self.do_read("heads")
368 368 try:
369 369 return map(bin, d[:-1].split(" "))
370 370 except:
371 371 raise util.UnexpectedOutput(_("unexpected response:"), d)
372 372
373 373 def branches(self, nodes):
374 374 n = " ".join(map(hex, nodes))
375 375 d = self.do_read("branches", nodes=n)
376 376 try:
377 377 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
378 378 return br
379 379 except:
380 380 raise util.UnexpectedOutput(_("unexpected response:"), d)
381 381
382 382 def between(self, pairs):
383 383 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
384 384 d = self.do_read("between", pairs=n)
385 385 try:
386 386 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
387 387 return p
388 388 except:
389 389 raise util.UnexpectedOutput(_("unexpected response:"), d)
390 390
391 391 def changegroup(self, nodes, kind):
392 392 n = " ".join(map(hex, nodes))
393 393 f = self.do_cmd("changegroup", roots=n)
394 394 return util.chunkbuffer(zgenerator(f))
395 395
396 396 def changegroupsubset(self, bases, heads, source):
397 397 self.requirecap('changegroupsubset', _('look up remote changes'))
398 398 baselst = " ".join([hex(n) for n in bases])
399 399 headlst = " ".join([hex(n) for n in heads])
400 400 f = self.do_cmd("changegroupsubset", bases=baselst, heads=headlst)
401 401 return util.chunkbuffer(zgenerator(f))
402 402
403 403 def unbundle(self, cg, heads, source):
404 404 # have to stream bundle to a temp file because we do not have
405 405 # http 1.1 chunked transfer.
406 406
407 407 type = ""
408 408 types = self.capable('unbundle')
409 409 # servers older than d1b16a746db6 will send 'unbundle' as a
410 410 # boolean capability
411 411 try:
412 412 types = types.split(',')
413 413 except AttributeError:
414 414 types = [""]
415 415 if types:
416 416 for x in types:
417 417 if x in changegroup.bundletypes:
418 418 type = x
419 419 break
420 420
421 421 tempname = changegroup.writebundle(cg, None, type)
422 422 fp = httpsendfile(tempname, "rb")
423 423 try:
424 424 try:
425 425 rfp = self.do_cmd(
426 426 'unbundle', data=fp,
427 427 headers={'Content-Type': 'application/octet-stream'},
428 428 heads=' '.join(map(hex, heads)))
429 429 try:
430 430 ret = int(rfp.readline())
431 431 self.ui.write(rfp.read())
432 432 return ret
433 433 finally:
434 434 rfp.close()
435 435 except socket.error, err:
436 436 if err[0] in (errno.ECONNRESET, errno.EPIPE):
437 437 raise util.Abort(_('push failed: %s') % err[1])
438 438 raise util.Abort(err[1])
439 439 finally:
440 440 fp.close()
441 441 os.unlink(tempname)
442 442
443 443 def stream_out(self):
444 444 return self.do_cmd('stream_out')
445 445
446 446 class httpsrepository(httprepository):
447 447 def __init__(self, ui, path):
448 448 if not has_https:
449 449 raise util.Abort(_('Python support for SSL and HTTPS '
450 450 'is not installed'))
451 451 httprepository.__init__(self, ui, path)
452 452
453 453 def instance(ui, path, create):
454 454 if create:
455 455 raise util.Abort(_('cannot create new http repository'))
456 456 if path.startswith('https:'):
457 457 return httpsrepository(ui, path)
458 458 return httprepository(ui, path)
@@ -1,2126 +1,2126 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import bin, hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import repo, changegroup
11 11 import changelog, dirstate, filelog, manifest, context, weakref
12 import re, lock, transaction, tempfile, stat, errno, ui
12 import lock, transaction, stat, errno, ui
13 13 import os, revlog, time, util, extensions, hook, inspect
14 14
15 15 class localrepository(repo.repository):
16 16 capabilities = util.set(('lookup', 'changegroupsubset'))
17 17 supported = ('revlogv1', 'store')
18 18
19 19 def __init__(self, parentui, path=None, create=0):
20 20 repo.repository.__init__(self)
21 21 self.root = os.path.realpath(path)
22 22 self.path = os.path.join(self.root, ".hg")
23 23 self.origroot = path
24 24 self.opener = util.opener(self.path)
25 25 self.wopener = util.opener(self.root)
26 26
27 27 if not os.path.isdir(self.path):
28 28 if create:
29 29 if not os.path.exists(path):
30 30 os.mkdir(path)
31 31 os.mkdir(self.path)
32 32 requirements = ["revlogv1"]
33 33 if parentui.configbool('format', 'usestore', True):
34 34 os.mkdir(os.path.join(self.path, "store"))
35 35 requirements.append("store")
36 36 # create an invalid changelog
37 37 self.opener("00changelog.i", "a").write(
38 38 '\0\0\0\2' # represents revlogv2
39 39 ' dummy changelog to prevent using the old repo layout'
40 40 )
41 41 reqfile = self.opener("requires", "w")
42 42 for r in requirements:
43 43 reqfile.write("%s\n" % r)
44 44 reqfile.close()
45 45 else:
46 46 raise repo.RepoError(_("repository %s not found") % path)
47 47 elif create:
48 48 raise repo.RepoError(_("repository %s already exists") % path)
49 49 else:
50 50 # find requirements
51 51 try:
52 52 requirements = self.opener("requires").read().splitlines()
53 53 except IOError, inst:
54 54 if inst.errno != errno.ENOENT:
55 55 raise
56 56 requirements = []
57 57 # check them
58 58 for r in requirements:
59 59 if r not in self.supported:
60 60 raise repo.RepoError(_("requirement '%s' not supported") % r)
61 61
62 62 # setup store
63 63 if "store" in requirements:
64 64 self.encodefn = util.encodefilename
65 65 self.decodefn = util.decodefilename
66 66 self.spath = os.path.join(self.path, "store")
67 67 else:
68 68 self.encodefn = lambda x: x
69 69 self.decodefn = lambda x: x
70 70 self.spath = self.path
71 71
72 72 try:
73 73 # files in .hg/ will be created using this mode
74 74 mode = os.stat(self.spath).st_mode
75 75 # avoid some useless chmods
76 76 if (0777 & ~util._umask) == (0777 & mode):
77 77 mode = None
78 78 except OSError:
79 79 mode = None
80 80
81 81 self._createmode = mode
82 82 self.opener.createmode = mode
83 83 sopener = util.opener(self.spath)
84 84 sopener.createmode = mode
85 85 self.sopener = util.encodedopener(sopener, self.encodefn)
86 86
87 87 self.ui = ui.ui(parentui=parentui)
88 88 try:
89 89 self.ui.readconfig(self.join("hgrc"), self.root)
90 90 extensions.loadall(self.ui)
91 91 except IOError:
92 92 pass
93 93
94 94 self.tagscache = None
95 95 self._tagstypecache = None
96 96 self.branchcache = None
97 97 self._ubranchcache = None # UTF-8 version of branchcache
98 98 self._branchcachetip = None
99 99 self.nodetagscache = None
100 100 self.filterpats = {}
101 101 self._datafilters = {}
102 102 self._transref = self._lockref = self._wlockref = None
103 103
104 104 def __getattr__(self, name):
105 105 if name == 'changelog':
106 106 self.changelog = changelog.changelog(self.sopener)
107 107 self.sopener.defversion = self.changelog.version
108 108 return self.changelog
109 109 if name == 'manifest':
110 110 self.changelog
111 111 self.manifest = manifest.manifest(self.sopener)
112 112 return self.manifest
113 113 if name == 'dirstate':
114 114 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
115 115 return self.dirstate
116 116 else:
117 117 raise AttributeError, name
118 118
119 119 def url(self):
120 120 return 'file:' + self.root
121 121
122 122 def hook(self, name, throw=False, **args):
123 123 return hook.hook(self.ui, self, name, throw, **args)
124 124
125 125 tag_disallowed = ':\r\n'
126 126
127 127 def _tag(self, name, node, message, local, user, date, parent=None,
128 128 extra={}):
129 129 use_dirstate = parent is None
130 130
131 131 for c in self.tag_disallowed:
132 132 if c in name:
133 133 raise util.Abort(_('%r cannot be used in a tag name') % c)
134 134
135 135 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
136 136
137 137 def writetag(fp, name, munge, prevtags):
138 138 fp.seek(0, 2)
139 139 if prevtags and prevtags[-1] != '\n':
140 140 fp.write('\n')
141 141 fp.write('%s %s\n' % (hex(node), munge and munge(name) or name))
142 142 fp.close()
143 143
144 144 prevtags = ''
145 145 if local:
146 146 try:
147 147 fp = self.opener('localtags', 'r+')
148 148 except IOError, err:
149 149 fp = self.opener('localtags', 'a')
150 150 else:
151 151 prevtags = fp.read()
152 152
153 153 # local tags are stored in the current charset
154 154 writetag(fp, name, None, prevtags)
155 155 self.hook('tag', node=hex(node), tag=name, local=local)
156 156 return
157 157
158 158 if use_dirstate:
159 159 try:
160 160 fp = self.wfile('.hgtags', 'rb+')
161 161 except IOError, err:
162 162 fp = self.wfile('.hgtags', 'ab')
163 163 else:
164 164 prevtags = fp.read()
165 165 else:
166 166 try:
167 167 prevtags = self.filectx('.hgtags', parent).data()
168 168 except revlog.LookupError:
169 169 pass
170 170 fp = self.wfile('.hgtags', 'wb')
171 171 if prevtags:
172 172 fp.write(prevtags)
173 173
174 174 # committed tags are stored in UTF-8
175 175 writetag(fp, name, util.fromlocal, prevtags)
176 176
177 177 if use_dirstate and '.hgtags' not in self.dirstate:
178 178 self.add(['.hgtags'])
179 179
180 180 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
181 181 extra=extra)
182 182
183 183 self.hook('tag', node=hex(node), tag=name, local=local)
184 184
185 185 return tagnode
186 186
187 187 def tag(self, name, node, message, local, user, date):
188 188 '''tag a revision with a symbolic name.
189 189
190 190 if local is True, the tag is stored in a per-repository file.
191 191 otherwise, it is stored in the .hgtags file, and a new
192 192 changeset is committed with the change.
193 193
194 194 keyword arguments:
195 195
196 196 local: whether to store tag in non-version-controlled file
197 197 (default False)
198 198
199 199 message: commit message to use if committing
200 200
201 201 user: name of user to use if committing
202 202
203 203 date: date tuple to use if committing'''
204 204
205 205 date = util.parsedate(date)
206 206 for x in self.status()[:5]:
207 207 if '.hgtags' in x:
208 208 raise util.Abort(_('working copy of .hgtags is changed '
209 209 '(please commit .hgtags manually)'))
210 210
211 211
212 212 self._tag(name, node, message, local, user, date)
213 213
214 214 def tags(self):
215 215 '''return a mapping of tag to node'''
216 216 if self.tagscache:
217 217 return self.tagscache
218 218
219 219 globaltags = {}
220 220 tagtypes = {}
221 221
222 222 def readtags(lines, fn, tagtype):
223 223 filetags = {}
224 224 count = 0
225 225
226 226 def warn(msg):
227 227 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
228 228
229 229 for l in lines:
230 230 count += 1
231 231 if not l:
232 232 continue
233 233 s = l.split(" ", 1)
234 234 if len(s) != 2:
235 235 warn(_("cannot parse entry"))
236 236 continue
237 237 node, key = s
238 238 key = util.tolocal(key.strip()) # stored in UTF-8
239 239 try:
240 240 bin_n = bin(node)
241 241 except TypeError:
242 242 warn(_("node '%s' is not well formed") % node)
243 243 continue
244 244 if bin_n not in self.changelog.nodemap:
245 245 warn(_("tag '%s' refers to unknown node") % key)
246 246 continue
247 247
248 248 h = []
249 249 if key in filetags:
250 250 n, h = filetags[key]
251 251 h.append(n)
252 252 filetags[key] = (bin_n, h)
253 253
254 254 for k, nh in filetags.items():
255 255 if k not in globaltags:
256 256 globaltags[k] = nh
257 257 tagtypes[k] = tagtype
258 258 continue
259 259
260 260 # we prefer the global tag if:
261 261 # it supercedes us OR
262 262 # mutual supercedes and it has a higher rank
263 263 # otherwise we win because we're tip-most
264 264 an, ah = nh
265 265 bn, bh = globaltags[k]
266 266 if (bn != an and an in bh and
267 267 (bn not in ah or len(bh) > len(ah))):
268 268 an = bn
269 269 ah.extend([n for n in bh if n not in ah])
270 270 globaltags[k] = an, ah
271 271 tagtypes[k] = tagtype
272 272
273 273 # read the tags file from each head, ending with the tip
274 274 f = None
275 275 for rev, node, fnode in self._hgtagsnodes():
276 276 f = (f and f.filectx(fnode) or
277 277 self.filectx('.hgtags', fileid=fnode))
278 278 readtags(f.data().splitlines(), f, "global")
279 279
280 280 try:
281 281 data = util.fromlocal(self.opener("localtags").read())
282 282 # localtags are stored in the local character set
283 283 # while the internal tag table is stored in UTF-8
284 284 readtags(data.splitlines(), "localtags", "local")
285 285 except IOError:
286 286 pass
287 287
288 288 self.tagscache = {}
289 289 self._tagstypecache = {}
290 290 for k,nh in globaltags.items():
291 291 n = nh[0]
292 292 if n != nullid:
293 293 self.tagscache[k] = n
294 294 self._tagstypecache[k] = tagtypes[k]
295 295 self.tagscache['tip'] = self.changelog.tip()
296 296
297 297 return self.tagscache
298 298
299 299 def tagtype(self, tagname):
300 300 '''
301 301 return the type of the given tag. result can be:
302 302
303 303 'local' : a local tag
304 304 'global' : a global tag
305 305 None : tag does not exist
306 306 '''
307 307
308 308 self.tags()
309 309
310 310 return self._tagstypecache.get(tagname)
311 311
312 312 def _hgtagsnodes(self):
313 313 heads = self.heads()
314 314 heads.reverse()
315 315 last = {}
316 316 ret = []
317 317 for node in heads:
318 318 c = self.changectx(node)
319 319 rev = c.rev()
320 320 try:
321 321 fnode = c.filenode('.hgtags')
322 322 except revlog.LookupError:
323 323 continue
324 324 ret.append((rev, node, fnode))
325 325 if fnode in last:
326 326 ret[last[fnode]] = None
327 327 last[fnode] = len(ret) - 1
328 328 return [item for item in ret if item]
329 329
330 330 def tagslist(self):
331 331 '''return a list of tags ordered by revision'''
332 332 l = []
333 333 for t, n in self.tags().items():
334 334 try:
335 335 r = self.changelog.rev(n)
336 336 except:
337 337 r = -2 # sort to the beginning of the list if unknown
338 338 l.append((r, t, n))
339 339 l.sort()
340 340 return [(t, n) for r, t, n in l]
341 341
342 342 def nodetags(self, node):
343 343 '''return the tags associated with a node'''
344 344 if not self.nodetagscache:
345 345 self.nodetagscache = {}
346 346 for t, n in self.tags().items():
347 347 self.nodetagscache.setdefault(n, []).append(t)
348 348 return self.nodetagscache.get(node, [])
349 349
350 350 def _branchtags(self, partial, lrev):
351 351 tiprev = self.changelog.count() - 1
352 352 if lrev != tiprev:
353 353 self._updatebranchcache(partial, lrev+1, tiprev+1)
354 354 self._writebranchcache(partial, self.changelog.tip(), tiprev)
355 355
356 356 return partial
357 357
358 358 def branchtags(self):
359 359 tip = self.changelog.tip()
360 360 if self.branchcache is not None and self._branchcachetip == tip:
361 361 return self.branchcache
362 362
363 363 oldtip = self._branchcachetip
364 364 self._branchcachetip = tip
365 365 if self.branchcache is None:
366 366 self.branchcache = {} # avoid recursion in changectx
367 367 else:
368 368 self.branchcache.clear() # keep using the same dict
369 369 if oldtip is None or oldtip not in self.changelog.nodemap:
370 370 partial, last, lrev = self._readbranchcache()
371 371 else:
372 372 lrev = self.changelog.rev(oldtip)
373 373 partial = self._ubranchcache
374 374
375 375 self._branchtags(partial, lrev)
376 376
377 377 # the branch cache is stored on disk as UTF-8, but in the local
378 378 # charset internally
379 379 for k, v in partial.items():
380 380 self.branchcache[util.tolocal(k)] = v
381 381 self._ubranchcache = partial
382 382 return self.branchcache
383 383
384 384 def _readbranchcache(self):
385 385 partial = {}
386 386 try:
387 387 f = self.opener("branch.cache")
388 388 lines = f.read().split('\n')
389 389 f.close()
390 390 except (IOError, OSError):
391 391 return {}, nullid, nullrev
392 392
393 393 try:
394 394 last, lrev = lines.pop(0).split(" ", 1)
395 395 last, lrev = bin(last), int(lrev)
396 396 if not (lrev < self.changelog.count() and
397 397 self.changelog.node(lrev) == last): # sanity check
398 398 # invalidate the cache
399 399 raise ValueError('invalidating branch cache (tip differs)')
400 400 for l in lines:
401 401 if not l: continue
402 402 node, label = l.split(" ", 1)
403 403 partial[label.strip()] = bin(node)
404 404 except (KeyboardInterrupt, util.SignalInterrupt):
405 405 raise
406 406 except Exception, inst:
407 407 if self.ui.debugflag:
408 408 self.ui.warn(str(inst), '\n')
409 409 partial, last, lrev = {}, nullid, nullrev
410 410 return partial, last, lrev
411 411
412 412 def _writebranchcache(self, branches, tip, tiprev):
413 413 try:
414 414 f = self.opener("branch.cache", "w", atomictemp=True)
415 415 f.write("%s %s\n" % (hex(tip), tiprev))
416 416 for label, node in branches.iteritems():
417 417 f.write("%s %s\n" % (hex(node), label))
418 418 f.rename()
419 419 except (IOError, OSError):
420 420 pass
421 421
422 422 def _updatebranchcache(self, partial, start, end):
423 423 for r in xrange(start, end):
424 424 c = self.changectx(r)
425 425 b = c.branch()
426 426 partial[b] = c.node()
427 427
428 428 def lookup(self, key):
429 429 if key == '.':
430 430 key, second = self.dirstate.parents()
431 431 if key == nullid:
432 432 raise repo.RepoError(_("no revision checked out"))
433 433 if second != nullid:
434 434 self.ui.warn(_("warning: working directory has two parents, "
435 435 "tag '.' uses the first\n"))
436 436 elif key == 'null':
437 437 return nullid
438 438 n = self.changelog._match(key)
439 439 if n:
440 440 return n
441 441 if key in self.tags():
442 442 return self.tags()[key]
443 443 if key in self.branchtags():
444 444 return self.branchtags()[key]
445 445 n = self.changelog._partialmatch(key)
446 446 if n:
447 447 return n
448 448 try:
449 449 if len(key) == 20:
450 450 key = hex(key)
451 451 except:
452 452 pass
453 453 raise repo.RepoError(_("unknown revision '%s'") % key)
454 454
455 455 def dev(self):
456 456 return os.lstat(self.path).st_dev
457 457
458 458 def local(self):
459 459 return True
460 460
461 461 def join(self, f):
462 462 return os.path.join(self.path, f)
463 463
464 464 def sjoin(self, f):
465 465 f = self.encodefn(f)
466 466 return os.path.join(self.spath, f)
467 467
468 468 def wjoin(self, f):
469 469 return os.path.join(self.root, f)
470 470
471 471 def file(self, f):
472 472 if f[0] == '/':
473 473 f = f[1:]
474 474 return filelog.filelog(self.sopener, f)
475 475
476 476 def changectx(self, changeid=None):
477 477 return context.changectx(self, changeid)
478 478
479 479 def workingctx(self):
480 480 return context.workingctx(self)
481 481
482 482 def parents(self, changeid=None):
483 483 '''
484 484 get list of changectxs for parents of changeid or working directory
485 485 '''
486 486 if changeid is None:
487 487 pl = self.dirstate.parents()
488 488 else:
489 489 n = self.changelog.lookup(changeid)
490 490 pl = self.changelog.parents(n)
491 491 if pl[1] == nullid:
492 492 return [self.changectx(pl[0])]
493 493 return [self.changectx(pl[0]), self.changectx(pl[1])]
494 494
495 495 def filectx(self, path, changeid=None, fileid=None):
496 496 """changeid can be a changeset revision, node, or tag.
497 497 fileid can be a file revision or node."""
498 498 return context.filectx(self, path, changeid, fileid)
499 499
500 500 def getcwd(self):
501 501 return self.dirstate.getcwd()
502 502
503 503 def pathto(self, f, cwd=None):
504 504 return self.dirstate.pathto(f, cwd)
505 505
506 506 def wfile(self, f, mode='r'):
507 507 return self.wopener(f, mode)
508 508
509 509 def _link(self, f):
510 510 return os.path.islink(self.wjoin(f))
511 511
512 512 def _filter(self, filter, filename, data):
513 513 if filter not in self.filterpats:
514 514 l = []
515 515 for pat, cmd in self.ui.configitems(filter):
516 516 mf = util.matcher(self.root, "", [pat], [], [])[1]
517 517 fn = None
518 518 params = cmd
519 519 for name, filterfn in self._datafilters.iteritems():
520 520 if cmd.startswith(name):
521 521 fn = filterfn
522 522 params = cmd[len(name):].lstrip()
523 523 break
524 524 if not fn:
525 525 fn = lambda s, c, **kwargs: util.filter(s, c)
526 526 # Wrap old filters not supporting keyword arguments
527 527 if not inspect.getargspec(fn)[2]:
528 528 oldfn = fn
529 529 fn = lambda s, c, **kwargs: oldfn(s, c)
530 530 l.append((mf, fn, params))
531 531 self.filterpats[filter] = l
532 532
533 533 for mf, fn, cmd in self.filterpats[filter]:
534 534 if mf(filename):
535 535 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
536 536 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
537 537 break
538 538
539 539 return data
540 540
541 541 def adddatafilter(self, name, filter):
542 542 self._datafilters[name] = filter
543 543
544 544 def wread(self, filename):
545 545 if self._link(filename):
546 546 data = os.readlink(self.wjoin(filename))
547 547 else:
548 548 data = self.wopener(filename, 'r').read()
549 549 return self._filter("encode", filename, data)
550 550
551 551 def wwrite(self, filename, data, flags):
552 552 data = self._filter("decode", filename, data)
553 553 try:
554 554 os.unlink(self.wjoin(filename))
555 555 except OSError:
556 556 pass
557 557 self.wopener(filename, 'w').write(data)
558 558 util.set_flags(self.wjoin(filename), flags)
559 559
560 560 def wwritedata(self, filename, data):
561 561 return self._filter("decode", filename, data)
562 562
563 563 def transaction(self):
564 564 if self._transref and self._transref():
565 565 return self._transref().nest()
566 566
567 567 # abort here if the journal already exists
568 568 if os.path.exists(self.sjoin("journal")):
569 569 raise repo.RepoError(_("journal already exists - run hg recover"))
570 570
571 571 # save dirstate for rollback
572 572 try:
573 573 ds = self.opener("dirstate").read()
574 574 except IOError:
575 575 ds = ""
576 576 self.opener("journal.dirstate", "w").write(ds)
577 577 self.opener("journal.branch", "w").write(self.dirstate.branch())
578 578
579 579 renames = [(self.sjoin("journal"), self.sjoin("undo")),
580 580 (self.join("journal.dirstate"), self.join("undo.dirstate")),
581 581 (self.join("journal.branch"), self.join("undo.branch"))]
582 582 tr = transaction.transaction(self.ui.warn, self.sopener,
583 583 self.sjoin("journal"),
584 584 aftertrans(renames),
585 585 self._createmode)
586 586 self._transref = weakref.ref(tr)
587 587 return tr
588 588
589 589 def recover(self):
590 590 l = self.lock()
591 591 try:
592 592 if os.path.exists(self.sjoin("journal")):
593 593 self.ui.status(_("rolling back interrupted transaction\n"))
594 594 transaction.rollback(self.sopener, self.sjoin("journal"))
595 595 self.invalidate()
596 596 return True
597 597 else:
598 598 self.ui.warn(_("no interrupted transaction available\n"))
599 599 return False
600 600 finally:
601 601 del l
602 602
603 603 def rollback(self):
604 604 wlock = lock = None
605 605 try:
606 606 wlock = self.wlock()
607 607 lock = self.lock()
608 608 if os.path.exists(self.sjoin("undo")):
609 609 self.ui.status(_("rolling back last transaction\n"))
610 610 transaction.rollback(self.sopener, self.sjoin("undo"))
611 611 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
612 612 try:
613 613 branch = self.opener("undo.branch").read()
614 614 self.dirstate.setbranch(branch)
615 615 except IOError:
616 616 self.ui.warn(_("Named branch could not be reset, "
617 617 "current branch still is: %s\n")
618 618 % util.tolocal(self.dirstate.branch()))
619 619 self.invalidate()
620 620 self.dirstate.invalidate()
621 621 else:
622 622 self.ui.warn(_("no rollback information available\n"))
623 623 finally:
624 624 del lock, wlock
625 625
626 626 def invalidate(self):
627 627 for a in "changelog manifest".split():
628 628 if hasattr(self, a):
629 629 self.__delattr__(a)
630 630 self.tagscache = None
631 631 self._tagstypecache = None
632 632 self.nodetagscache = None
633 633 self.branchcache = None
634 634 self._ubranchcache = None
635 635 self._branchcachetip = None
636 636
637 637 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
638 638 try:
639 639 l = lock.lock(lockname, 0, releasefn, desc=desc)
640 640 except lock.LockHeld, inst:
641 641 if not wait:
642 642 raise
643 643 self.ui.warn(_("waiting for lock on %s held by %r\n") %
644 644 (desc, inst.locker))
645 645 # default to 600 seconds timeout
646 646 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
647 647 releasefn, desc=desc)
648 648 if acquirefn:
649 649 acquirefn()
650 650 return l
651 651
652 652 def lock(self, wait=True):
653 653 if self._lockref and self._lockref():
654 654 return self._lockref()
655 655
656 656 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
657 657 _('repository %s') % self.origroot)
658 658 self._lockref = weakref.ref(l)
659 659 return l
660 660
661 661 def wlock(self, wait=True):
662 662 if self._wlockref and self._wlockref():
663 663 return self._wlockref()
664 664
665 665 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
666 666 self.dirstate.invalidate, _('working directory of %s') %
667 667 self.origroot)
668 668 self._wlockref = weakref.ref(l)
669 669 return l
670 670
671 671 def filecommit(self, fn, manifest1, manifest2, linkrev, tr, changelist):
672 672 """
673 673 commit an individual file as part of a larger transaction
674 674 """
675 675
676 676 t = self.wread(fn)
677 677 fl = self.file(fn)
678 678 fp1 = manifest1.get(fn, nullid)
679 679 fp2 = manifest2.get(fn, nullid)
680 680
681 681 meta = {}
682 682 cp = self.dirstate.copied(fn)
683 683 if cp:
684 684 # Mark the new revision of this file as a copy of another
685 685 # file. This copy data will effectively act as a parent
686 686 # of this new revision. If this is a merge, the first
687 687 # parent will be the nullid (meaning "look up the copy data")
688 688 # and the second one will be the other parent. For example:
689 689 #
690 690 # 0 --- 1 --- 3 rev1 changes file foo
691 691 # \ / rev2 renames foo to bar and changes it
692 692 # \- 2 -/ rev3 should have bar with all changes and
693 693 # should record that bar descends from
694 694 # bar in rev2 and foo in rev1
695 695 #
696 696 # this allows this merge to succeed:
697 697 #
698 698 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
699 699 # \ / merging rev3 and rev4 should use bar@rev2
700 700 # \- 2 --- 4 as the merge base
701 701 #
702 702 meta["copy"] = cp
703 703 if not manifest2: # not a branch merge
704 704 meta["copyrev"] = hex(manifest1.get(cp, nullid))
705 705 fp2 = nullid
706 706 elif fp2 != nullid: # copied on remote side
707 707 meta["copyrev"] = hex(manifest1.get(cp, nullid))
708 708 elif fp1 != nullid: # copied on local side, reversed
709 709 meta["copyrev"] = hex(manifest2.get(cp))
710 710 fp2 = fp1
711 711 elif cp in manifest2: # directory rename on local side
712 712 meta["copyrev"] = hex(manifest2[cp])
713 713 else: # directory rename on remote side
714 714 meta["copyrev"] = hex(manifest1.get(cp, nullid))
715 715 self.ui.debug(_(" %s: copy %s:%s\n") %
716 716 (fn, cp, meta["copyrev"]))
717 717 fp1 = nullid
718 718 elif fp2 != nullid:
719 719 # is one parent an ancestor of the other?
720 720 fpa = fl.ancestor(fp1, fp2)
721 721 if fpa == fp1:
722 722 fp1, fp2 = fp2, nullid
723 723 elif fpa == fp2:
724 724 fp2 = nullid
725 725
726 726 # is the file unmodified from the parent? report existing entry
727 727 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
728 728 return fp1
729 729
730 730 changelist.append(fn)
731 731 return fl.add(t, meta, tr, linkrev, fp1, fp2)
732 732
733 733 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
734 734 if p1 is None:
735 735 p1, p2 = self.dirstate.parents()
736 736 return self.commit(files=files, text=text, user=user, date=date,
737 737 p1=p1, p2=p2, extra=extra, empty_ok=True)
738 738
739 739 def commit(self, files=None, text="", user=None, date=None,
740 740 match=util.always, force=False, force_editor=False,
741 741 p1=None, p2=None, extra={}, empty_ok=False):
742 742 wlock = lock = tr = None
743 743 valid = 0 # don't save the dirstate if this isn't set
744 744 if files:
745 745 files = util.unique(files)
746 746 try:
747 747 commit = []
748 748 remove = []
749 749 changed = []
750 750 use_dirstate = (p1 is None) # not rawcommit
751 751 extra = extra.copy()
752 752
753 753 if use_dirstate:
754 754 if files:
755 755 for f in files:
756 756 s = self.dirstate[f]
757 757 if s in 'nma':
758 758 commit.append(f)
759 759 elif s == 'r':
760 760 remove.append(f)
761 761 else:
762 762 self.ui.warn(_("%s not tracked!\n") % f)
763 763 else:
764 764 changes = self.status(match=match)[:5]
765 765 modified, added, removed, deleted, unknown = changes
766 766 commit = modified + added
767 767 remove = removed
768 768 else:
769 769 commit = files
770 770
771 771 if use_dirstate:
772 772 p1, p2 = self.dirstate.parents()
773 773 update_dirstate = True
774 774 else:
775 775 p1, p2 = p1, p2 or nullid
776 776 update_dirstate = (self.dirstate.parents()[0] == p1)
777 777
778 778 c1 = self.changelog.read(p1)
779 779 c2 = self.changelog.read(p2)
780 780 m1 = self.manifest.read(c1[0]).copy()
781 781 m2 = self.manifest.read(c2[0])
782 782
783 783 if use_dirstate:
784 784 branchname = self.workingctx().branch()
785 785 try:
786 786 branchname = branchname.decode('UTF-8').encode('UTF-8')
787 787 except UnicodeDecodeError:
788 788 raise util.Abort(_('branch name not in UTF-8!'))
789 789 else:
790 790 branchname = ""
791 791
792 792 if use_dirstate:
793 793 oldname = c1[5].get("branch") # stored in UTF-8
794 794 if (not commit and not remove and not force and p2 == nullid
795 795 and branchname == oldname):
796 796 self.ui.status(_("nothing changed\n"))
797 797 return None
798 798
799 799 xp1 = hex(p1)
800 800 if p2 == nullid: xp2 = ''
801 801 else: xp2 = hex(p2)
802 802
803 803 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
804 804
805 805 wlock = self.wlock()
806 806 lock = self.lock()
807 807 tr = self.transaction()
808 808 trp = weakref.proxy(tr)
809 809
810 810 # check in files
811 811 new = {}
812 812 linkrev = self.changelog.count()
813 813 commit.sort()
814 814 is_exec = util.execfunc(self.root, m1.execf)
815 815 is_link = util.linkfunc(self.root, m1.linkf)
816 816 for f in commit:
817 817 self.ui.note(f + "\n")
818 818 try:
819 819 new[f] = self.filecommit(f, m1, m2, linkrev, trp, changed)
820 820 new_exec = is_exec(f)
821 821 new_link = is_link(f)
822 822 if ((not changed or changed[-1] != f) and
823 823 m2.get(f) != new[f]):
824 824 # mention the file in the changelog if some
825 825 # flag changed, even if there was no content
826 826 # change.
827 827 old_exec = m1.execf(f)
828 828 old_link = m1.linkf(f)
829 829 if old_exec != new_exec or old_link != new_link:
830 830 changed.append(f)
831 831 m1.set(f, new_exec, new_link)
832 832 if use_dirstate:
833 833 self.dirstate.normal(f)
834 834
835 835 except (OSError, IOError):
836 836 if use_dirstate:
837 837 self.ui.warn(_("trouble committing %s!\n") % f)
838 838 raise
839 839 else:
840 840 remove.append(f)
841 841
842 842 # update manifest
843 843 m1.update(new)
844 844 remove.sort()
845 845 removed = []
846 846
847 847 for f in remove:
848 848 if f in m1:
849 849 del m1[f]
850 850 removed.append(f)
851 851 elif f in m2:
852 852 removed.append(f)
853 853 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
854 854 (new, removed))
855 855
856 856 # add changeset
857 857 new = new.keys()
858 858 new.sort()
859 859
860 860 user = user or self.ui.username()
861 861 if (not empty_ok and not text) or force_editor:
862 862 edittext = []
863 863 if text:
864 864 edittext.append(text)
865 865 edittext.append("")
866 866 edittext.append(_("HG: Enter commit message."
867 867 " Lines beginning with 'HG:' are removed."))
868 868 edittext.append("HG: --")
869 869 edittext.append("HG: user: %s" % user)
870 870 if p2 != nullid:
871 871 edittext.append("HG: branch merge")
872 872 if branchname:
873 873 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
874 874 edittext.extend(["HG: changed %s" % f for f in changed])
875 875 edittext.extend(["HG: removed %s" % f for f in removed])
876 876 if not changed and not remove:
877 877 edittext.append("HG: no files changed")
878 878 edittext.append("")
879 879 # run editor in the repository root
880 880 olddir = os.getcwd()
881 881 os.chdir(self.root)
882 882 text = self.ui.edit("\n".join(edittext), user)
883 883 os.chdir(olddir)
884 884
885 885 if branchname:
886 886 extra["branch"] = branchname
887 887
888 888 if use_dirstate:
889 889 lines = [line.rstrip() for line in text.rstrip().splitlines()]
890 890 while lines and not lines[0]:
891 891 del lines[0]
892 892 if not lines:
893 893 raise util.Abort(_("empty commit message"))
894 894 text = '\n'.join(lines)
895 895
896 896 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
897 897 user, date, extra)
898 898 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
899 899 parent2=xp2)
900 900 tr.close()
901 901
902 902 if self.branchcache:
903 903 self.branchtags()
904 904
905 905 if use_dirstate or update_dirstate:
906 906 self.dirstate.setparents(n)
907 907 if use_dirstate:
908 908 for f in removed:
909 909 self.dirstate.forget(f)
910 910 valid = 1 # our dirstate updates are complete
911 911
912 912 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
913 913 return n
914 914 finally:
915 915 if not valid: # don't save our updated dirstate
916 916 self.dirstate.invalidate()
917 917 del tr, lock, wlock
918 918
919 919 def walk(self, node=None, files=[], match=util.always, badmatch=None):
920 920 '''
921 921 walk recursively through the directory tree or a given
922 922 changeset, finding all files matched by the match
923 923 function
924 924
925 925 results are yielded in a tuple (src, filename), where src
926 926 is one of:
927 927 'f' the file was found in the directory tree
928 928 'm' the file was only in the dirstate and not in the tree
929 929 'b' file was not found and matched badmatch
930 930 '''
931 931
932 932 if node:
933 933 fdict = dict.fromkeys(files)
934 934 # for dirstate.walk, files=['.'] means "walk the whole tree".
935 935 # follow that here, too
936 936 fdict.pop('.', None)
937 937 mdict = self.manifest.read(self.changelog.read(node)[0])
938 938 mfiles = mdict.keys()
939 939 mfiles.sort()
940 940 for fn in mfiles:
941 941 for ffn in fdict:
942 942 # match if the file is the exact name or a directory
943 943 if ffn == fn or fn.startswith("%s/" % ffn):
944 944 del fdict[ffn]
945 945 break
946 946 if match(fn):
947 947 yield 'm', fn
948 948 ffiles = fdict.keys()
949 949 ffiles.sort()
950 950 for fn in ffiles:
951 951 if badmatch and badmatch(fn):
952 952 if match(fn):
953 953 yield 'b', fn
954 954 else:
955 955 self.ui.warn(_('%s: No such file in rev %s\n')
956 956 % (self.pathto(fn), short(node)))
957 957 else:
958 958 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
959 959 yield src, fn
960 960
961 961 def status(self, node1=None, node2=None, files=[], match=util.always,
962 962 list_ignored=False, list_clean=False, list_unknown=True):
963 963 """return status of files between two nodes or node and working directory
964 964
965 965 If node1 is None, use the first dirstate parent instead.
966 966 If node2 is None, compare node1 with working directory.
967 967 """
968 968
969 969 def fcmp(fn, getnode):
970 970 t1 = self.wread(fn)
971 971 return self.file(fn).cmp(getnode(fn), t1)
972 972
973 973 def mfmatches(node):
974 974 change = self.changelog.read(node)
975 975 mf = self.manifest.read(change[0]).copy()
976 976 for fn in mf.keys():
977 977 if not match(fn):
978 978 del mf[fn]
979 979 return mf
980 980
981 981 modified, added, removed, deleted, unknown = [], [], [], [], []
982 982 ignored, clean = [], []
983 983
984 984 compareworking = False
985 985 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
986 986 compareworking = True
987 987
988 988 if not compareworking:
989 989 # read the manifest from node1 before the manifest from node2,
990 990 # so that we'll hit the manifest cache if we're going through
991 991 # all the revisions in parent->child order.
992 992 mf1 = mfmatches(node1)
993 993
994 994 # are we comparing the working directory?
995 995 if not node2:
996 996 (lookup, modified, added, removed, deleted, unknown,
997 997 ignored, clean) = self.dirstate.status(files, match,
998 998 list_ignored, list_clean,
999 999 list_unknown)
1000 1000
1001 1001 # are we comparing working dir against its parent?
1002 1002 if compareworking:
1003 1003 if lookup:
1004 1004 fixup = []
1005 1005 # do a full compare of any files that might have changed
1006 1006 ctx = self.changectx()
1007 1007 mexec = lambda f: 'x' in ctx.fileflags(f)
1008 1008 mlink = lambda f: 'l' in ctx.fileflags(f)
1009 1009 is_exec = util.execfunc(self.root, mexec)
1010 1010 is_link = util.linkfunc(self.root, mlink)
1011 1011 def flags(f):
1012 1012 return is_link(f) and 'l' or is_exec(f) and 'x' or ''
1013 1013 for f in lookup:
1014 1014 if (f not in ctx or flags(f) != ctx.fileflags(f)
1015 1015 or ctx[f].cmp(self.wread(f))):
1016 1016 modified.append(f)
1017 1017 else:
1018 1018 fixup.append(f)
1019 1019 if list_clean:
1020 1020 clean.append(f)
1021 1021
1022 1022 # update dirstate for files that are actually clean
1023 1023 if fixup:
1024 1024 wlock = None
1025 1025 try:
1026 1026 try:
1027 1027 wlock = self.wlock(False)
1028 1028 except lock.LockException:
1029 1029 pass
1030 1030 if wlock:
1031 1031 for f in fixup:
1032 1032 self.dirstate.normal(f)
1033 1033 finally:
1034 1034 del wlock
1035 1035 else:
1036 1036 # we are comparing working dir against non-parent
1037 1037 # generate a pseudo-manifest for the working dir
1038 1038 # XXX: create it in dirstate.py ?
1039 1039 mf2 = mfmatches(self.dirstate.parents()[0])
1040 1040 is_exec = util.execfunc(self.root, mf2.execf)
1041 1041 is_link = util.linkfunc(self.root, mf2.linkf)
1042 1042 for f in lookup + modified + added:
1043 1043 mf2[f] = ""
1044 1044 mf2.set(f, is_exec(f), is_link(f))
1045 1045 for f in removed:
1046 1046 if f in mf2:
1047 1047 del mf2[f]
1048 1048
1049 1049 else:
1050 1050 # we are comparing two revisions
1051 1051 mf2 = mfmatches(node2)
1052 1052
1053 1053 if not compareworking:
1054 1054 # flush lists from dirstate before comparing manifests
1055 1055 modified, added, clean = [], [], []
1056 1056
1057 1057 # make sure to sort the files so we talk to the disk in a
1058 1058 # reasonable order
1059 1059 mf2keys = mf2.keys()
1060 1060 mf2keys.sort()
1061 1061 getnode = lambda fn: mf1.get(fn, nullid)
1062 1062 for fn in mf2keys:
1063 1063 if fn in mf1:
1064 1064 if (mf1.flags(fn) != mf2.flags(fn) or
1065 1065 (mf1[fn] != mf2[fn] and
1066 1066 (mf2[fn] != "" or fcmp(fn, getnode)))):
1067 1067 modified.append(fn)
1068 1068 elif list_clean:
1069 1069 clean.append(fn)
1070 1070 del mf1[fn]
1071 1071 else:
1072 1072 added.append(fn)
1073 1073
1074 1074 removed = mf1.keys()
1075 1075
1076 1076 # sort and return results:
1077 1077 for l in modified, added, removed, deleted, unknown, ignored, clean:
1078 1078 l.sort()
1079 1079 return (modified, added, removed, deleted, unknown, ignored, clean)
1080 1080
1081 1081 def add(self, list):
1082 1082 wlock = self.wlock()
1083 1083 try:
1084 1084 rejected = []
1085 1085 for f in list:
1086 1086 p = self.wjoin(f)
1087 1087 try:
1088 1088 st = os.lstat(p)
1089 1089 except:
1090 1090 self.ui.warn(_("%s does not exist!\n") % f)
1091 1091 rejected.append(f)
1092 1092 continue
1093 1093 if st.st_size > 10000000:
1094 1094 self.ui.warn(_("%s: files over 10MB may cause memory and"
1095 1095 " performance problems\n"
1096 1096 "(use 'hg revert %s' to unadd the file)\n")
1097 1097 % (f, f))
1098 1098 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1099 1099 self.ui.warn(_("%s not added: only files and symlinks "
1100 1100 "supported currently\n") % f)
1101 1101 rejected.append(p)
1102 1102 elif self.dirstate[f] in 'amn':
1103 1103 self.ui.warn(_("%s already tracked!\n") % f)
1104 1104 elif self.dirstate[f] == 'r':
1105 1105 self.dirstate.normallookup(f)
1106 1106 else:
1107 1107 self.dirstate.add(f)
1108 1108 return rejected
1109 1109 finally:
1110 1110 del wlock
1111 1111
1112 1112 def forget(self, list):
1113 1113 wlock = self.wlock()
1114 1114 try:
1115 1115 for f in list:
1116 1116 if self.dirstate[f] != 'a':
1117 1117 self.ui.warn(_("%s not added!\n") % f)
1118 1118 else:
1119 1119 self.dirstate.forget(f)
1120 1120 finally:
1121 1121 del wlock
1122 1122
1123 1123 def remove(self, list, unlink=False):
1124 1124 wlock = None
1125 1125 try:
1126 1126 if unlink:
1127 1127 for f in list:
1128 1128 try:
1129 1129 util.unlink(self.wjoin(f))
1130 1130 except OSError, inst:
1131 1131 if inst.errno != errno.ENOENT:
1132 1132 raise
1133 1133 wlock = self.wlock()
1134 1134 for f in list:
1135 1135 if unlink and os.path.exists(self.wjoin(f)):
1136 1136 self.ui.warn(_("%s still exists!\n") % f)
1137 1137 elif self.dirstate[f] == 'a':
1138 1138 self.dirstate.forget(f)
1139 1139 elif f not in self.dirstate:
1140 1140 self.ui.warn(_("%s not tracked!\n") % f)
1141 1141 else:
1142 1142 self.dirstate.remove(f)
1143 1143 finally:
1144 1144 del wlock
1145 1145
1146 1146 def undelete(self, list):
1147 1147 wlock = None
1148 1148 try:
1149 1149 manifests = [self.manifest.read(self.changelog.read(p)[0])
1150 1150 for p in self.dirstate.parents() if p != nullid]
1151 1151 wlock = self.wlock()
1152 1152 for f in list:
1153 1153 if self.dirstate[f] != 'r':
1154 1154 self.ui.warn("%s not removed!\n" % f)
1155 1155 else:
1156 1156 m = f in manifests[0] and manifests[0] or manifests[1]
1157 1157 t = self.file(f).read(m[f])
1158 1158 self.wwrite(f, t, m.flags(f))
1159 1159 self.dirstate.normal(f)
1160 1160 finally:
1161 1161 del wlock
1162 1162
1163 1163 def copy(self, source, dest):
1164 1164 wlock = None
1165 1165 try:
1166 1166 p = self.wjoin(dest)
1167 1167 if not (os.path.exists(p) or os.path.islink(p)):
1168 1168 self.ui.warn(_("%s does not exist!\n") % dest)
1169 1169 elif not (os.path.isfile(p) or os.path.islink(p)):
1170 1170 self.ui.warn(_("copy failed: %s is not a file or a "
1171 1171 "symbolic link\n") % dest)
1172 1172 else:
1173 1173 wlock = self.wlock()
1174 1174 if dest not in self.dirstate:
1175 1175 self.dirstate.add(dest)
1176 1176 self.dirstate.copy(source, dest)
1177 1177 finally:
1178 1178 del wlock
1179 1179
1180 1180 def heads(self, start=None):
1181 1181 heads = self.changelog.heads(start)
1182 1182 # sort the output in rev descending order
1183 1183 heads = [(-self.changelog.rev(h), h) for h in heads]
1184 1184 heads.sort()
1185 1185 return [n for (r, n) in heads]
1186 1186
1187 1187 def branchheads(self, branch, start=None):
1188 1188 branches = self.branchtags()
1189 1189 if branch not in branches:
1190 1190 return []
1191 1191 # The basic algorithm is this:
1192 1192 #
1193 1193 # Start from the branch tip since there are no later revisions that can
1194 1194 # possibly be in this branch, and the tip is a guaranteed head.
1195 1195 #
1196 1196 # Remember the tip's parents as the first ancestors, since these by
1197 1197 # definition are not heads.
1198 1198 #
1199 1199 # Step backwards from the brach tip through all the revisions. We are
1200 1200 # guaranteed by the rules of Mercurial that we will now be visiting the
1201 1201 # nodes in reverse topological order (children before parents).
1202 1202 #
1203 1203 # If a revision is one of the ancestors of a head then we can toss it
1204 1204 # out of the ancestors set (we've already found it and won't be
1205 1205 # visiting it again) and put its parents in the ancestors set.
1206 1206 #
1207 1207 # Otherwise, if a revision is in the branch it's another head, since it
1208 1208 # wasn't in the ancestor list of an existing head. So add it to the
1209 1209 # head list, and add its parents to the ancestor list.
1210 1210 #
1211 1211 # If it is not in the branch ignore it.
1212 1212 #
1213 1213 # Once we have a list of heads, use nodesbetween to filter out all the
1214 1214 # heads that cannot be reached from startrev. There may be a more
1215 1215 # efficient way to do this as part of the previous algorithm.
1216 1216
1217 1217 set = util.set
1218 1218 heads = [self.changelog.rev(branches[branch])]
1219 1219 # Don't care if ancestors contains nullrev or not.
1220 1220 ancestors = set(self.changelog.parentrevs(heads[0]))
1221 1221 for rev in xrange(heads[0] - 1, nullrev, -1):
1222 1222 if rev in ancestors:
1223 1223 ancestors.update(self.changelog.parentrevs(rev))
1224 1224 ancestors.remove(rev)
1225 1225 elif self.changectx(rev).branch() == branch:
1226 1226 heads.append(rev)
1227 1227 ancestors.update(self.changelog.parentrevs(rev))
1228 1228 heads = [self.changelog.node(rev) for rev in heads]
1229 1229 if start is not None:
1230 1230 heads = self.changelog.nodesbetween([start], heads)[2]
1231 1231 return heads
1232 1232
1233 1233 def branches(self, nodes):
1234 1234 if not nodes:
1235 1235 nodes = [self.changelog.tip()]
1236 1236 b = []
1237 1237 for n in nodes:
1238 1238 t = n
1239 1239 while 1:
1240 1240 p = self.changelog.parents(n)
1241 1241 if p[1] != nullid or p[0] == nullid:
1242 1242 b.append((t, n, p[0], p[1]))
1243 1243 break
1244 1244 n = p[0]
1245 1245 return b
1246 1246
1247 1247 def between(self, pairs):
1248 1248 r = []
1249 1249
1250 1250 for top, bottom in pairs:
1251 1251 n, l, i = top, [], 0
1252 1252 f = 1
1253 1253
1254 1254 while n != bottom:
1255 1255 p = self.changelog.parents(n)[0]
1256 1256 if i == f:
1257 1257 l.append(n)
1258 1258 f = f * 2
1259 1259 n = p
1260 1260 i += 1
1261 1261
1262 1262 r.append(l)
1263 1263
1264 1264 return r
1265 1265
1266 1266 def findincoming(self, remote, base=None, heads=None, force=False):
1267 1267 """Return list of roots of the subsets of missing nodes from remote
1268 1268
1269 1269 If base dict is specified, assume that these nodes and their parents
1270 1270 exist on the remote side and that no child of a node of base exists
1271 1271 in both remote and self.
1272 1272 Furthermore base will be updated to include the nodes that exists
1273 1273 in self and remote but no children exists in self and remote.
1274 1274 If a list of heads is specified, return only nodes which are heads
1275 1275 or ancestors of these heads.
1276 1276
1277 1277 All the ancestors of base are in self and in remote.
1278 1278 All the descendants of the list returned are missing in self.
1279 1279 (and so we know that the rest of the nodes are missing in remote, see
1280 1280 outgoing)
1281 1281 """
1282 1282 m = self.changelog.nodemap
1283 1283 search = []
1284 1284 fetch = {}
1285 1285 seen = {}
1286 1286 seenbranch = {}
1287 1287 if base == None:
1288 1288 base = {}
1289 1289
1290 1290 if not heads:
1291 1291 heads = remote.heads()
1292 1292
1293 1293 if self.changelog.tip() == nullid:
1294 1294 base[nullid] = 1
1295 1295 if heads != [nullid]:
1296 1296 return [nullid]
1297 1297 return []
1298 1298
1299 1299 # assume we're closer to the tip than the root
1300 1300 # and start by examining the heads
1301 1301 self.ui.status(_("searching for changes\n"))
1302 1302
1303 1303 unknown = []
1304 1304 for h in heads:
1305 1305 if h not in m:
1306 1306 unknown.append(h)
1307 1307 else:
1308 1308 base[h] = 1
1309 1309
1310 1310 if not unknown:
1311 1311 return []
1312 1312
1313 1313 req = dict.fromkeys(unknown)
1314 1314 reqcnt = 0
1315 1315
1316 1316 # search through remote branches
1317 1317 # a 'branch' here is a linear segment of history, with four parts:
1318 1318 # head, root, first parent, second parent
1319 1319 # (a branch always has two parents (or none) by definition)
1320 1320 unknown = remote.branches(unknown)
1321 1321 while unknown:
1322 1322 r = []
1323 1323 while unknown:
1324 1324 n = unknown.pop(0)
1325 1325 if n[0] in seen:
1326 1326 continue
1327 1327
1328 1328 self.ui.debug(_("examining %s:%s\n")
1329 1329 % (short(n[0]), short(n[1])))
1330 1330 if n[0] == nullid: # found the end of the branch
1331 1331 pass
1332 1332 elif n in seenbranch:
1333 1333 self.ui.debug(_("branch already found\n"))
1334 1334 continue
1335 1335 elif n[1] and n[1] in m: # do we know the base?
1336 1336 self.ui.debug(_("found incomplete branch %s:%s\n")
1337 1337 % (short(n[0]), short(n[1])))
1338 1338 search.append(n) # schedule branch range for scanning
1339 1339 seenbranch[n] = 1
1340 1340 else:
1341 1341 if n[1] not in seen and n[1] not in fetch:
1342 1342 if n[2] in m and n[3] in m:
1343 1343 self.ui.debug(_("found new changeset %s\n") %
1344 1344 short(n[1]))
1345 1345 fetch[n[1]] = 1 # earliest unknown
1346 1346 for p in n[2:4]:
1347 1347 if p in m:
1348 1348 base[p] = 1 # latest known
1349 1349
1350 1350 for p in n[2:4]:
1351 1351 if p not in req and p not in m:
1352 1352 r.append(p)
1353 1353 req[p] = 1
1354 1354 seen[n[0]] = 1
1355 1355
1356 1356 if r:
1357 1357 reqcnt += 1
1358 1358 self.ui.debug(_("request %d: %s\n") %
1359 1359 (reqcnt, " ".join(map(short, r))))
1360 1360 for p in xrange(0, len(r), 10):
1361 1361 for b in remote.branches(r[p:p+10]):
1362 1362 self.ui.debug(_("received %s:%s\n") %
1363 1363 (short(b[0]), short(b[1])))
1364 1364 unknown.append(b)
1365 1365
1366 1366 # do binary search on the branches we found
1367 1367 while search:
1368 1368 n = search.pop(0)
1369 1369 reqcnt += 1
1370 1370 l = remote.between([(n[0], n[1])])[0]
1371 1371 l.append(n[1])
1372 1372 p = n[0]
1373 1373 f = 1
1374 1374 for i in l:
1375 1375 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1376 1376 if i in m:
1377 1377 if f <= 2:
1378 1378 self.ui.debug(_("found new branch changeset %s\n") %
1379 1379 short(p))
1380 1380 fetch[p] = 1
1381 1381 base[i] = 1
1382 1382 else:
1383 1383 self.ui.debug(_("narrowed branch search to %s:%s\n")
1384 1384 % (short(p), short(i)))
1385 1385 search.append((p, i))
1386 1386 break
1387 1387 p, f = i, f * 2
1388 1388
1389 1389 # sanity check our fetch list
1390 1390 for f in fetch.keys():
1391 1391 if f in m:
1392 1392 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1393 1393
1394 1394 if base.keys() == [nullid]:
1395 1395 if force:
1396 1396 self.ui.warn(_("warning: repository is unrelated\n"))
1397 1397 else:
1398 1398 raise util.Abort(_("repository is unrelated"))
1399 1399
1400 1400 self.ui.debug(_("found new changesets starting at ") +
1401 1401 " ".join([short(f) for f in fetch]) + "\n")
1402 1402
1403 1403 self.ui.debug(_("%d total queries\n") % reqcnt)
1404 1404
1405 1405 return fetch.keys()
1406 1406
1407 1407 def findoutgoing(self, remote, base=None, heads=None, force=False):
1408 1408 """Return list of nodes that are roots of subsets not in remote
1409 1409
1410 1410 If base dict is specified, assume that these nodes and their parents
1411 1411 exist on the remote side.
1412 1412 If a list of heads is specified, return only nodes which are heads
1413 1413 or ancestors of these heads, and return a second element which
1414 1414 contains all remote heads which get new children.
1415 1415 """
1416 1416 if base == None:
1417 1417 base = {}
1418 1418 self.findincoming(remote, base, heads, force=force)
1419 1419
1420 1420 self.ui.debug(_("common changesets up to ")
1421 1421 + " ".join(map(short, base.keys())) + "\n")
1422 1422
1423 1423 remain = dict.fromkeys(self.changelog.nodemap)
1424 1424
1425 1425 # prune everything remote has from the tree
1426 1426 del remain[nullid]
1427 1427 remove = base.keys()
1428 1428 while remove:
1429 1429 n = remove.pop(0)
1430 1430 if n in remain:
1431 1431 del remain[n]
1432 1432 for p in self.changelog.parents(n):
1433 1433 remove.append(p)
1434 1434
1435 1435 # find every node whose parents have been pruned
1436 1436 subset = []
1437 1437 # find every remote head that will get new children
1438 1438 updated_heads = {}
1439 1439 for n in remain:
1440 1440 p1, p2 = self.changelog.parents(n)
1441 1441 if p1 not in remain and p2 not in remain:
1442 1442 subset.append(n)
1443 1443 if heads:
1444 1444 if p1 in heads:
1445 1445 updated_heads[p1] = True
1446 1446 if p2 in heads:
1447 1447 updated_heads[p2] = True
1448 1448
1449 1449 # this is the set of all roots we have to push
1450 1450 if heads:
1451 1451 return subset, updated_heads.keys()
1452 1452 else:
1453 1453 return subset
1454 1454
1455 1455 def pull(self, remote, heads=None, force=False):
1456 1456 lock = self.lock()
1457 1457 try:
1458 1458 fetch = self.findincoming(remote, heads=heads, force=force)
1459 1459 if fetch == [nullid]:
1460 1460 self.ui.status(_("requesting all changes\n"))
1461 1461
1462 1462 if not fetch:
1463 1463 self.ui.status(_("no changes found\n"))
1464 1464 return 0
1465 1465
1466 1466 if heads is None:
1467 1467 cg = remote.changegroup(fetch, 'pull')
1468 1468 else:
1469 1469 if 'changegroupsubset' not in remote.capabilities:
1470 1470 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1471 1471 cg = remote.changegroupsubset(fetch, heads, 'pull')
1472 1472 return self.addchangegroup(cg, 'pull', remote.url())
1473 1473 finally:
1474 1474 del lock
1475 1475
1476 1476 def push(self, remote, force=False, revs=None):
1477 1477 # there are two ways to push to remote repo:
1478 1478 #
1479 1479 # addchangegroup assumes local user can lock remote
1480 1480 # repo (local filesystem, old ssh servers).
1481 1481 #
1482 1482 # unbundle assumes local user cannot lock remote repo (new ssh
1483 1483 # servers, http servers).
1484 1484
1485 1485 if remote.capable('unbundle'):
1486 1486 return self.push_unbundle(remote, force, revs)
1487 1487 return self.push_addchangegroup(remote, force, revs)
1488 1488
1489 1489 def prepush(self, remote, force, revs):
1490 1490 base = {}
1491 1491 remote_heads = remote.heads()
1492 1492 inc = self.findincoming(remote, base, remote_heads, force=force)
1493 1493
1494 1494 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1495 1495 if revs is not None:
1496 1496 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1497 1497 else:
1498 1498 bases, heads = update, self.changelog.heads()
1499 1499
1500 1500 if not bases:
1501 1501 self.ui.status(_("no changes found\n"))
1502 1502 return None, 1
1503 1503 elif not force:
1504 1504 # check if we're creating new remote heads
1505 1505 # to be a remote head after push, node must be either
1506 1506 # - unknown locally
1507 1507 # - a local outgoing head descended from update
1508 1508 # - a remote head that's known locally and not
1509 1509 # ancestral to an outgoing head
1510 1510
1511 1511 warn = 0
1512 1512
1513 1513 if remote_heads == [nullid]:
1514 1514 warn = 0
1515 1515 elif not revs and len(heads) > len(remote_heads):
1516 1516 warn = 1
1517 1517 else:
1518 1518 newheads = list(heads)
1519 1519 for r in remote_heads:
1520 1520 if r in self.changelog.nodemap:
1521 1521 desc = self.changelog.heads(r, heads)
1522 1522 l = [h for h in heads if h in desc]
1523 1523 if not l:
1524 1524 newheads.append(r)
1525 1525 else:
1526 1526 newheads.append(r)
1527 1527 if len(newheads) > len(remote_heads):
1528 1528 warn = 1
1529 1529
1530 1530 if warn:
1531 1531 self.ui.warn(_("abort: push creates new remote heads!\n"))
1532 1532 self.ui.status(_("(did you forget to merge?"
1533 1533 " use push -f to force)\n"))
1534 1534 return None, 0
1535 1535 elif inc:
1536 1536 self.ui.warn(_("note: unsynced remote changes!\n"))
1537 1537
1538 1538
1539 1539 if revs is None:
1540 1540 cg = self.changegroup(update, 'push')
1541 1541 else:
1542 1542 cg = self.changegroupsubset(update, revs, 'push')
1543 1543 return cg, remote_heads
1544 1544
1545 1545 def push_addchangegroup(self, remote, force, revs):
1546 1546 lock = remote.lock()
1547 1547 try:
1548 1548 ret = self.prepush(remote, force, revs)
1549 1549 if ret[0] is not None:
1550 1550 cg, remote_heads = ret
1551 1551 return remote.addchangegroup(cg, 'push', self.url())
1552 1552 return ret[1]
1553 1553 finally:
1554 1554 del lock
1555 1555
1556 1556 def push_unbundle(self, remote, force, revs):
1557 1557 # local repo finds heads on server, finds out what revs it
1558 1558 # must push. once revs transferred, if server finds it has
1559 1559 # different heads (someone else won commit/push race), server
1560 1560 # aborts.
1561 1561
1562 1562 ret = self.prepush(remote, force, revs)
1563 1563 if ret[0] is not None:
1564 1564 cg, remote_heads = ret
1565 1565 if force: remote_heads = ['force']
1566 1566 return remote.unbundle(cg, remote_heads, 'push')
1567 1567 return ret[1]
1568 1568
1569 1569 def changegroupinfo(self, nodes, source):
1570 1570 if self.ui.verbose or source == 'bundle':
1571 1571 self.ui.status(_("%d changesets found\n") % len(nodes))
1572 1572 if self.ui.debugflag:
1573 1573 self.ui.debug(_("List of changesets:\n"))
1574 1574 for node in nodes:
1575 1575 self.ui.debug("%s\n" % hex(node))
1576 1576
1577 1577 def changegroupsubset(self, bases, heads, source, extranodes=None):
1578 1578 """This function generates a changegroup consisting of all the nodes
1579 1579 that are descendents of any of the bases, and ancestors of any of
1580 1580 the heads.
1581 1581
1582 1582 It is fairly complex as determining which filenodes and which
1583 1583 manifest nodes need to be included for the changeset to be complete
1584 1584 is non-trivial.
1585 1585
1586 1586 Another wrinkle is doing the reverse, figuring out which changeset in
1587 1587 the changegroup a particular filenode or manifestnode belongs to.
1588 1588
1589 1589 The caller can specify some nodes that must be included in the
1590 1590 changegroup using the extranodes argument. It should be a dict
1591 1591 where the keys are the filenames (or 1 for the manifest), and the
1592 1592 values are lists of (node, linknode) tuples, where node is a wanted
1593 1593 node and linknode is the changelog node that should be transmitted as
1594 1594 the linkrev.
1595 1595 """
1596 1596
1597 1597 self.hook('preoutgoing', throw=True, source=source)
1598 1598
1599 1599 # Set up some initial variables
1600 1600 # Make it easy to refer to self.changelog
1601 1601 cl = self.changelog
1602 1602 # msng is short for missing - compute the list of changesets in this
1603 1603 # changegroup.
1604 1604 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1605 1605 self.changegroupinfo(msng_cl_lst, source)
1606 1606 # Some bases may turn out to be superfluous, and some heads may be
1607 1607 # too. nodesbetween will return the minimal set of bases and heads
1608 1608 # necessary to re-create the changegroup.
1609 1609
1610 1610 # Known heads are the list of heads that it is assumed the recipient
1611 1611 # of this changegroup will know about.
1612 1612 knownheads = {}
1613 1613 # We assume that all parents of bases are known heads.
1614 1614 for n in bases:
1615 1615 for p in cl.parents(n):
1616 1616 if p != nullid:
1617 1617 knownheads[p] = 1
1618 1618 knownheads = knownheads.keys()
1619 1619 if knownheads:
1620 1620 # Now that we know what heads are known, we can compute which
1621 1621 # changesets are known. The recipient must know about all
1622 1622 # changesets required to reach the known heads from the null
1623 1623 # changeset.
1624 1624 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1625 1625 junk = None
1626 1626 # Transform the list into an ersatz set.
1627 1627 has_cl_set = dict.fromkeys(has_cl_set)
1628 1628 else:
1629 1629 # If there were no known heads, the recipient cannot be assumed to
1630 1630 # know about any changesets.
1631 1631 has_cl_set = {}
1632 1632
1633 1633 # Make it easy to refer to self.manifest
1634 1634 mnfst = self.manifest
1635 1635 # We don't know which manifests are missing yet
1636 1636 msng_mnfst_set = {}
1637 1637 # Nor do we know which filenodes are missing.
1638 1638 msng_filenode_set = {}
1639 1639
1640 1640 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1641 1641 junk = None
1642 1642
1643 1643 # A changeset always belongs to itself, so the changenode lookup
1644 1644 # function for a changenode is identity.
1645 1645 def identity(x):
1646 1646 return x
1647 1647
1648 1648 # A function generating function. Sets up an environment for the
1649 1649 # inner function.
1650 1650 def cmp_by_rev_func(revlog):
1651 1651 # Compare two nodes by their revision number in the environment's
1652 1652 # revision history. Since the revision number both represents the
1653 1653 # most efficient order to read the nodes in, and represents a
1654 1654 # topological sorting of the nodes, this function is often useful.
1655 1655 def cmp_by_rev(a, b):
1656 1656 return cmp(revlog.rev(a), revlog.rev(b))
1657 1657 return cmp_by_rev
1658 1658
1659 1659 # If we determine that a particular file or manifest node must be a
1660 1660 # node that the recipient of the changegroup will already have, we can
1661 1661 # also assume the recipient will have all the parents. This function
1662 1662 # prunes them from the set of missing nodes.
1663 1663 def prune_parents(revlog, hasset, msngset):
1664 1664 haslst = hasset.keys()
1665 1665 haslst.sort(cmp_by_rev_func(revlog))
1666 1666 for node in haslst:
1667 1667 parentlst = [p for p in revlog.parents(node) if p != nullid]
1668 1668 while parentlst:
1669 1669 n = parentlst.pop()
1670 1670 if n not in hasset:
1671 1671 hasset[n] = 1
1672 1672 p = [p for p in revlog.parents(n) if p != nullid]
1673 1673 parentlst.extend(p)
1674 1674 for n in hasset:
1675 1675 msngset.pop(n, None)
1676 1676
1677 1677 # This is a function generating function used to set up an environment
1678 1678 # for the inner function to execute in.
1679 1679 def manifest_and_file_collector(changedfileset):
1680 1680 # This is an information gathering function that gathers
1681 1681 # information from each changeset node that goes out as part of
1682 1682 # the changegroup. The information gathered is a list of which
1683 1683 # manifest nodes are potentially required (the recipient may
1684 1684 # already have them) and total list of all files which were
1685 1685 # changed in any changeset in the changegroup.
1686 1686 #
1687 1687 # We also remember the first changenode we saw any manifest
1688 1688 # referenced by so we can later determine which changenode 'owns'
1689 1689 # the manifest.
1690 1690 def collect_manifests_and_files(clnode):
1691 1691 c = cl.read(clnode)
1692 1692 for f in c[3]:
1693 1693 # This is to make sure we only have one instance of each
1694 1694 # filename string for each filename.
1695 1695 changedfileset.setdefault(f, f)
1696 1696 msng_mnfst_set.setdefault(c[0], clnode)
1697 1697 return collect_manifests_and_files
1698 1698
1699 1699 # Figure out which manifest nodes (of the ones we think might be part
1700 1700 # of the changegroup) the recipient must know about and remove them
1701 1701 # from the changegroup.
1702 1702 def prune_manifests():
1703 1703 has_mnfst_set = {}
1704 1704 for n in msng_mnfst_set:
1705 1705 # If a 'missing' manifest thinks it belongs to a changenode
1706 1706 # the recipient is assumed to have, obviously the recipient
1707 1707 # must have that manifest.
1708 1708 linknode = cl.node(mnfst.linkrev(n))
1709 1709 if linknode in has_cl_set:
1710 1710 has_mnfst_set[n] = 1
1711 1711 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1712 1712
1713 1713 # Use the information collected in collect_manifests_and_files to say
1714 1714 # which changenode any manifestnode belongs to.
1715 1715 def lookup_manifest_link(mnfstnode):
1716 1716 return msng_mnfst_set[mnfstnode]
1717 1717
1718 1718 # A function generating function that sets up the initial environment
1719 1719 # the inner function.
1720 1720 def filenode_collector(changedfiles):
1721 1721 next_rev = [0]
1722 1722 # This gathers information from each manifestnode included in the
1723 1723 # changegroup about which filenodes the manifest node references
1724 1724 # so we can include those in the changegroup too.
1725 1725 #
1726 1726 # It also remembers which changenode each filenode belongs to. It
1727 1727 # does this by assuming the a filenode belongs to the changenode
1728 1728 # the first manifest that references it belongs to.
1729 1729 def collect_msng_filenodes(mnfstnode):
1730 1730 r = mnfst.rev(mnfstnode)
1731 1731 if r == next_rev[0]:
1732 1732 # If the last rev we looked at was the one just previous,
1733 1733 # we only need to see a diff.
1734 1734 deltamf = mnfst.readdelta(mnfstnode)
1735 1735 # For each line in the delta
1736 1736 for f, fnode in deltamf.items():
1737 1737 f = changedfiles.get(f, None)
1738 1738 # And if the file is in the list of files we care
1739 1739 # about.
1740 1740 if f is not None:
1741 1741 # Get the changenode this manifest belongs to
1742 1742 clnode = msng_mnfst_set[mnfstnode]
1743 1743 # Create the set of filenodes for the file if
1744 1744 # there isn't one already.
1745 1745 ndset = msng_filenode_set.setdefault(f, {})
1746 1746 # And set the filenode's changelog node to the
1747 1747 # manifest's if it hasn't been set already.
1748 1748 ndset.setdefault(fnode, clnode)
1749 1749 else:
1750 1750 # Otherwise we need a full manifest.
1751 1751 m = mnfst.read(mnfstnode)
1752 1752 # For every file in we care about.
1753 1753 for f in changedfiles:
1754 1754 fnode = m.get(f, None)
1755 1755 # If it's in the manifest
1756 1756 if fnode is not None:
1757 1757 # See comments above.
1758 1758 clnode = msng_mnfst_set[mnfstnode]
1759 1759 ndset = msng_filenode_set.setdefault(f, {})
1760 1760 ndset.setdefault(fnode, clnode)
1761 1761 # Remember the revision we hope to see next.
1762 1762 next_rev[0] = r + 1
1763 1763 return collect_msng_filenodes
1764 1764
1765 1765 # We have a list of filenodes we think we need for a file, lets remove
1766 1766 # all those we now the recipient must have.
1767 1767 def prune_filenodes(f, filerevlog):
1768 1768 msngset = msng_filenode_set[f]
1769 1769 hasset = {}
1770 1770 # If a 'missing' filenode thinks it belongs to a changenode we
1771 1771 # assume the recipient must have, then the recipient must have
1772 1772 # that filenode.
1773 1773 for n in msngset:
1774 1774 clnode = cl.node(filerevlog.linkrev(n))
1775 1775 if clnode in has_cl_set:
1776 1776 hasset[n] = 1
1777 1777 prune_parents(filerevlog, hasset, msngset)
1778 1778
1779 1779 # A function generator function that sets up the a context for the
1780 1780 # inner function.
1781 1781 def lookup_filenode_link_func(fname):
1782 1782 msngset = msng_filenode_set[fname]
1783 1783 # Lookup the changenode the filenode belongs to.
1784 1784 def lookup_filenode_link(fnode):
1785 1785 return msngset[fnode]
1786 1786 return lookup_filenode_link
1787 1787
1788 1788 # Add the nodes that were explicitly requested.
1789 1789 def add_extra_nodes(name, nodes):
1790 1790 if not extranodes or name not in extranodes:
1791 1791 return
1792 1792
1793 1793 for node, linknode in extranodes[name]:
1794 1794 if node not in nodes:
1795 1795 nodes[node] = linknode
1796 1796
1797 1797 # Now that we have all theses utility functions to help out and
1798 1798 # logically divide up the task, generate the group.
1799 1799 def gengroup():
1800 1800 # The set of changed files starts empty.
1801 1801 changedfiles = {}
1802 1802 # Create a changenode group generator that will call our functions
1803 1803 # back to lookup the owning changenode and collect information.
1804 1804 group = cl.group(msng_cl_lst, identity,
1805 1805 manifest_and_file_collector(changedfiles))
1806 1806 for chnk in group:
1807 1807 yield chnk
1808 1808
1809 1809 # The list of manifests has been collected by the generator
1810 1810 # calling our functions back.
1811 1811 prune_manifests()
1812 1812 add_extra_nodes(1, msng_mnfst_set)
1813 1813 msng_mnfst_lst = msng_mnfst_set.keys()
1814 1814 # Sort the manifestnodes by revision number.
1815 1815 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1816 1816 # Create a generator for the manifestnodes that calls our lookup
1817 1817 # and data collection functions back.
1818 1818 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1819 1819 filenode_collector(changedfiles))
1820 1820 for chnk in group:
1821 1821 yield chnk
1822 1822
1823 1823 # These are no longer needed, dereference and toss the memory for
1824 1824 # them.
1825 1825 msng_mnfst_lst = None
1826 1826 msng_mnfst_set.clear()
1827 1827
1828 1828 if extranodes:
1829 1829 for fname in extranodes:
1830 1830 if isinstance(fname, int):
1831 1831 continue
1832 1832 add_extra_nodes(fname,
1833 1833 msng_filenode_set.setdefault(fname, {}))
1834 1834 changedfiles[fname] = 1
1835 1835 changedfiles = changedfiles.keys()
1836 1836 changedfiles.sort()
1837 1837 # Go through all our files in order sorted by name.
1838 1838 for fname in changedfiles:
1839 1839 filerevlog = self.file(fname)
1840 1840 if filerevlog.count() == 0:
1841 1841 raise util.Abort(_("empty or missing revlog for %s") % fname)
1842 1842 # Toss out the filenodes that the recipient isn't really
1843 1843 # missing.
1844 1844 if fname in msng_filenode_set:
1845 1845 prune_filenodes(fname, filerevlog)
1846 1846 msng_filenode_lst = msng_filenode_set[fname].keys()
1847 1847 else:
1848 1848 msng_filenode_lst = []
1849 1849 # If any filenodes are left, generate the group for them,
1850 1850 # otherwise don't bother.
1851 1851 if len(msng_filenode_lst) > 0:
1852 1852 yield changegroup.chunkheader(len(fname))
1853 1853 yield fname
1854 1854 # Sort the filenodes by their revision #
1855 1855 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1856 1856 # Create a group generator and only pass in a changenode
1857 1857 # lookup function as we need to collect no information
1858 1858 # from filenodes.
1859 1859 group = filerevlog.group(msng_filenode_lst,
1860 1860 lookup_filenode_link_func(fname))
1861 1861 for chnk in group:
1862 1862 yield chnk
1863 1863 if fname in msng_filenode_set:
1864 1864 # Don't need this anymore, toss it to free memory.
1865 1865 del msng_filenode_set[fname]
1866 1866 # Signal that no more groups are left.
1867 1867 yield changegroup.closechunk()
1868 1868
1869 1869 if msng_cl_lst:
1870 1870 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1871 1871
1872 1872 return util.chunkbuffer(gengroup())
1873 1873
1874 1874 def changegroup(self, basenodes, source):
1875 1875 """Generate a changegroup of all nodes that we have that a recipient
1876 1876 doesn't.
1877 1877
1878 1878 This is much easier than the previous function as we can assume that
1879 1879 the recipient has any changenode we aren't sending them."""
1880 1880
1881 1881 self.hook('preoutgoing', throw=True, source=source)
1882 1882
1883 1883 cl = self.changelog
1884 1884 nodes = cl.nodesbetween(basenodes, None)[0]
1885 1885 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1886 1886 self.changegroupinfo(nodes, source)
1887 1887
1888 1888 def identity(x):
1889 1889 return x
1890 1890
1891 1891 def gennodelst(revlog):
1892 1892 for r in xrange(0, revlog.count()):
1893 1893 n = revlog.node(r)
1894 1894 if revlog.linkrev(n) in revset:
1895 1895 yield n
1896 1896
1897 1897 def changed_file_collector(changedfileset):
1898 1898 def collect_changed_files(clnode):
1899 1899 c = cl.read(clnode)
1900 1900 for fname in c[3]:
1901 1901 changedfileset[fname] = 1
1902 1902 return collect_changed_files
1903 1903
1904 1904 def lookuprevlink_func(revlog):
1905 1905 def lookuprevlink(n):
1906 1906 return cl.node(revlog.linkrev(n))
1907 1907 return lookuprevlink
1908 1908
1909 1909 def gengroup():
1910 1910 # construct a list of all changed files
1911 1911 changedfiles = {}
1912 1912
1913 1913 for chnk in cl.group(nodes, identity,
1914 1914 changed_file_collector(changedfiles)):
1915 1915 yield chnk
1916 1916 changedfiles = changedfiles.keys()
1917 1917 changedfiles.sort()
1918 1918
1919 1919 mnfst = self.manifest
1920 1920 nodeiter = gennodelst(mnfst)
1921 1921 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1922 1922 yield chnk
1923 1923
1924 1924 for fname in changedfiles:
1925 1925 filerevlog = self.file(fname)
1926 1926 if filerevlog.count() == 0:
1927 1927 raise util.Abort(_("empty or missing revlog for %s") % fname)
1928 1928 nodeiter = gennodelst(filerevlog)
1929 1929 nodeiter = list(nodeiter)
1930 1930 if nodeiter:
1931 1931 yield changegroup.chunkheader(len(fname))
1932 1932 yield fname
1933 1933 lookup = lookuprevlink_func(filerevlog)
1934 1934 for chnk in filerevlog.group(nodeiter, lookup):
1935 1935 yield chnk
1936 1936
1937 1937 yield changegroup.closechunk()
1938 1938
1939 1939 if nodes:
1940 1940 self.hook('outgoing', node=hex(nodes[0]), source=source)
1941 1941
1942 1942 return util.chunkbuffer(gengroup())
1943 1943
1944 1944 def addchangegroup(self, source, srctype, url, emptyok=False):
1945 1945 """add changegroup to repo.
1946 1946
1947 1947 return values:
1948 1948 - nothing changed or no source: 0
1949 1949 - more heads than before: 1+added heads (2..n)
1950 1950 - less heads than before: -1-removed heads (-2..-n)
1951 1951 - number of heads stays the same: 1
1952 1952 """
1953 1953 def csmap(x):
1954 1954 self.ui.debug(_("add changeset %s\n") % short(x))
1955 1955 return cl.count()
1956 1956
1957 1957 def revmap(x):
1958 1958 return cl.rev(x)
1959 1959
1960 1960 if not source:
1961 1961 return 0
1962 1962
1963 1963 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1964 1964
1965 1965 changesets = files = revisions = 0
1966 1966
1967 1967 # write changelog data to temp files so concurrent readers will not see
1968 1968 # inconsistent view
1969 1969 cl = self.changelog
1970 1970 cl.delayupdate()
1971 1971 oldheads = len(cl.heads())
1972 1972
1973 1973 tr = self.transaction()
1974 1974 try:
1975 1975 trp = weakref.proxy(tr)
1976 1976 # pull off the changeset group
1977 1977 self.ui.status(_("adding changesets\n"))
1978 1978 cor = cl.count() - 1
1979 1979 chunkiter = changegroup.chunkiter(source)
1980 1980 if cl.addgroup(chunkiter, csmap, trp, 1) is None and not emptyok:
1981 1981 raise util.Abort(_("received changelog group is empty"))
1982 1982 cnr = cl.count() - 1
1983 1983 changesets = cnr - cor
1984 1984
1985 1985 # pull off the manifest group
1986 1986 self.ui.status(_("adding manifests\n"))
1987 1987 chunkiter = changegroup.chunkiter(source)
1988 1988 # no need to check for empty manifest group here:
1989 1989 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1990 1990 # no new manifest will be created and the manifest group will
1991 1991 # be empty during the pull
1992 1992 self.manifest.addgroup(chunkiter, revmap, trp)
1993 1993
1994 1994 # process the files
1995 1995 self.ui.status(_("adding file changes\n"))
1996 1996 while 1:
1997 1997 f = changegroup.getchunk(source)
1998 1998 if not f:
1999 1999 break
2000 2000 self.ui.debug(_("adding %s revisions\n") % f)
2001 2001 fl = self.file(f)
2002 2002 o = fl.count()
2003 2003 chunkiter = changegroup.chunkiter(source)
2004 2004 if fl.addgroup(chunkiter, revmap, trp) is None:
2005 2005 raise util.Abort(_("received file revlog group is empty"))
2006 2006 revisions += fl.count() - o
2007 2007 files += 1
2008 2008
2009 2009 # make changelog see real files again
2010 2010 cl.finalize(trp)
2011 2011
2012 2012 newheads = len(self.changelog.heads())
2013 2013 heads = ""
2014 2014 if oldheads and newheads != oldheads:
2015 2015 heads = _(" (%+d heads)") % (newheads - oldheads)
2016 2016
2017 2017 self.ui.status(_("added %d changesets"
2018 2018 " with %d changes to %d files%s\n")
2019 2019 % (changesets, revisions, files, heads))
2020 2020
2021 2021 if changesets > 0:
2022 2022 self.hook('pretxnchangegroup', throw=True,
2023 2023 node=hex(self.changelog.node(cor+1)), source=srctype,
2024 2024 url=url)
2025 2025
2026 2026 tr.close()
2027 2027 finally:
2028 2028 del tr
2029 2029
2030 2030 if changesets > 0:
2031 2031 # forcefully update the on-disk branch cache
2032 2032 self.ui.debug(_("updating the branch cache\n"))
2033 2033 self.branchtags()
2034 2034 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
2035 2035 source=srctype, url=url)
2036 2036
2037 2037 for i in xrange(cor + 1, cnr + 1):
2038 2038 self.hook("incoming", node=hex(self.changelog.node(i)),
2039 2039 source=srctype, url=url)
2040 2040
2041 2041 # never return 0 here:
2042 2042 if newheads < oldheads:
2043 2043 return newheads - oldheads - 1
2044 2044 else:
2045 2045 return newheads - oldheads + 1
2046 2046
2047 2047
2048 2048 def stream_in(self, remote):
2049 2049 fp = remote.stream_out()
2050 2050 l = fp.readline()
2051 2051 try:
2052 2052 resp = int(l)
2053 2053 except ValueError:
2054 2054 raise util.UnexpectedOutput(
2055 2055 _('Unexpected response from remote server:'), l)
2056 2056 if resp == 1:
2057 2057 raise util.Abort(_('operation forbidden by server'))
2058 2058 elif resp == 2:
2059 2059 raise util.Abort(_('locking the remote repository failed'))
2060 2060 elif resp != 0:
2061 2061 raise util.Abort(_('the server sent an unknown error code'))
2062 2062 self.ui.status(_('streaming all changes\n'))
2063 2063 l = fp.readline()
2064 2064 try:
2065 2065 total_files, total_bytes = map(int, l.split(' ', 1))
2066 2066 except ValueError, TypeError:
2067 2067 raise util.UnexpectedOutput(
2068 2068 _('Unexpected response from remote server:'), l)
2069 2069 self.ui.status(_('%d files to transfer, %s of data\n') %
2070 2070 (total_files, util.bytecount(total_bytes)))
2071 2071 start = time.time()
2072 2072 for i in xrange(total_files):
2073 2073 # XXX doesn't support '\n' or '\r' in filenames
2074 2074 l = fp.readline()
2075 2075 try:
2076 2076 name, size = l.split('\0', 1)
2077 2077 size = int(size)
2078 2078 except ValueError, TypeError:
2079 2079 raise util.UnexpectedOutput(
2080 2080 _('Unexpected response from remote server:'), l)
2081 2081 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2082 2082 ofp = self.sopener(name, 'w')
2083 2083 for chunk in util.filechunkiter(fp, limit=size):
2084 2084 ofp.write(chunk)
2085 2085 ofp.close()
2086 2086 elapsed = time.time() - start
2087 2087 if elapsed <= 0:
2088 2088 elapsed = 0.001
2089 2089 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2090 2090 (util.bytecount(total_bytes), elapsed,
2091 2091 util.bytecount(total_bytes / elapsed)))
2092 2092 self.invalidate()
2093 2093 return len(self.heads()) + 1
2094 2094
2095 2095 def clone(self, remote, heads=[], stream=False):
2096 2096 '''clone remote repository.
2097 2097
2098 2098 keyword arguments:
2099 2099 heads: list of revs to clone (forces use of pull)
2100 2100 stream: use streaming clone if possible'''
2101 2101
2102 2102 # now, all clients that can request uncompressed clones can
2103 2103 # read repo formats supported by all servers that can serve
2104 2104 # them.
2105 2105
2106 2106 # if revlog format changes, client will have to check version
2107 2107 # and format flags on "stream" capability, and use
2108 2108 # uncompressed only if compatible.
2109 2109
2110 2110 if stream and not heads and remote.capable('stream'):
2111 2111 return self.stream_in(remote)
2112 2112 return self.pull(remote, heads)
2113 2113
2114 2114 # used to avoid circular references so destructors work
2115 2115 def aftertrans(files):
2116 2116 renamefiles = [tuple(t) for t in files]
2117 2117 def a():
2118 2118 for src, dest in renamefiles:
2119 2119 util.rename(src, dest)
2120 2120 return a
2121 2121
2122 2122 def instance(ui, path, create):
2123 2123 return localrepository(ui, util.drop_scheme('file', path), create)
2124 2124
2125 2125 def islocal(path):
2126 2126 return True
@@ -1,116 +1,116 b''
1 1 #! /usr/bin/env python
2 2
3 3 import sys
4 from _lsprof import Profiler, profiler_entry, profiler_subentry
4 from _lsprof import Profiler, profiler_entry
5 5
6 6 __all__ = ['profile', 'Stats']
7 7
8 8 def profile(f, *args, **kwds):
9 9 """XXX docstring"""
10 10 p = Profiler()
11 11 p.enable(subcalls=True, builtins=True)
12 12 try:
13 13 f(*args, **kwds)
14 14 finally:
15 15 p.disable()
16 16 return Stats(p.getstats())
17 17
18 18
19 19 class Stats(object):
20 20 """XXX docstring"""
21 21
22 22 def __init__(self, data):
23 23 self.data = data
24 24
25 25 def sort(self, crit="inlinetime"):
26 26 """XXX docstring"""
27 27 if crit not in profiler_entry.__dict__:
28 28 raise ValueError, "Can't sort by %s" % crit
29 29 self.data.sort(lambda b, a: cmp(getattr(a, crit),
30 30 getattr(b, crit)))
31 31 for e in self.data:
32 32 if e.calls:
33 33 e.calls.sort(lambda b, a: cmp(getattr(a, crit),
34 34 getattr(b, crit)))
35 35
36 36 def pprint(self, top=None, file=None, limit=None, climit=None):
37 37 """XXX docstring"""
38 38 if file is None:
39 39 file = sys.stdout
40 40 d = self.data
41 41 if top is not None:
42 42 d = d[:top]
43 43 cols = "% 12s %12s %11.4f %11.4f %s\n"
44 44 hcols = "% 12s %12s %12s %12s %s\n"
45 45 cols2 = "+%12s %12s %11.4f %11.4f + %s\n"
46 46 file.write(hcols % ("CallCount", "Recursive", "Total(ms)",
47 47 "Inline(ms)", "module:lineno(function)"))
48 48 count = 0
49 49 for e in d:
50 50 file.write(cols % (e.callcount, e.reccallcount, e.totaltime,
51 51 e.inlinetime, label(e.code)))
52 52 count += 1
53 53 if limit is not None and count == limit:
54 54 return
55 55 ccount = 0
56 56 if e.calls:
57 57 for se in e.calls:
58 58 file.write(cols % ("+%s" % se.callcount, se.reccallcount,
59 59 se.totaltime, se.inlinetime,
60 60 "+%s" % label(se.code)))
61 61 count += 1
62 62 ccount += 1
63 63 if limit is not None and count == limit:
64 64 return
65 65 if climit is not None and ccount == climit:
66 66 break
67 67
68 68 def freeze(self):
69 69 """Replace all references to code objects with string
70 70 descriptions; this makes it possible to pickle the instance."""
71 71
72 72 # this code is probably rather ickier than it needs to be!
73 73 for i in range(len(self.data)):
74 74 e = self.data[i]
75 75 if not isinstance(e.code, str):
76 76 self.data[i] = type(e)((label(e.code),) + e[1:])
77 77 if e.calls:
78 78 for j in range(len(e.calls)):
79 79 se = e.calls[j]
80 80 if not isinstance(se.code, str):
81 81 e.calls[j] = type(se)((label(se.code),) + se[1:])
82 82
83 83 _fn2mod = {}
84 84
85 85 def label(code):
86 86 if isinstance(code, str):
87 87 return code
88 88 try:
89 89 mname = _fn2mod[code.co_filename]
90 90 except KeyError:
91 91 for k, v in sys.modules.items():
92 92 if v is None:
93 93 continue
94 94 if not hasattr(v, '__file__'):
95 95 continue
96 96 if not isinstance(v.__file__, str):
97 97 continue
98 98 if v.__file__.startswith(code.co_filename):
99 99 mname = _fn2mod[code.co_filename] = k
100 100 break
101 101 else:
102 102 mname = _fn2mod[code.co_filename] = '<%s>'%code.co_filename
103 103
104 104 return '%s:%d(%s)' % (mname, code.co_firstlineno, code.co_name)
105 105
106 106
107 107 if __name__ == '__main__':
108 108 import os
109 109 sys.argv = sys.argv[1:]
110 110 if not sys.argv:
111 111 print >> sys.stderr, "usage: lsprof.py <script> <arguments...>"
112 112 sys.exit(2)
113 113 sys.path.insert(0, os.path.abspath(os.path.dirname(sys.argv[0])))
114 114 stats = profile(execfile, sys.argv[0], globals(), locals())
115 115 stats.sort()
116 116 stats.pprint()
@@ -1,214 +1,214 b''
1 1 # manifest.py - manifest revision class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import bin, hex, nullid
9 9 from revlog import revlog, RevlogError
10 10 from i18n import _
11 import array, bisect, struct, mdiff
11 import array, struct, mdiff
12 12
13 13 class manifestdict(dict):
14 14 def __init__(self, mapping=None, flags=None):
15 15 if mapping is None: mapping = {}
16 16 if flags is None: flags = {}
17 17 dict.__init__(self, mapping)
18 18 self._flags = flags
19 19 def flags(self, f):
20 20 return self._flags.get(f, "")
21 21 def execf(self, f):
22 22 "test for executable in manifest flags"
23 23 return "x" in self.flags(f)
24 24 def linkf(self, f):
25 25 "test for symlink in manifest flags"
26 26 return "l" in self.flags(f)
27 27 def set(self, f, execf=False, linkf=False):
28 28 if linkf: self._flags[f] = "l"
29 29 elif execf: self._flags[f] = "x"
30 30 else: self._flags[f] = ""
31 31 def copy(self):
32 32 return manifestdict(dict.copy(self), dict.copy(self._flags))
33 33
34 34 class manifest(revlog):
35 35 def __init__(self, opener):
36 36 self.mapcache = None
37 37 self.listcache = None
38 38 revlog.__init__(self, opener, "00manifest.i")
39 39
40 40 def parse(self, lines):
41 41 mfdict = manifestdict()
42 42 fdict = mfdict._flags
43 43 for l in lines.splitlines():
44 44 f, n = l.split('\0')
45 45 if len(n) > 40:
46 46 fdict[f] = n[40:]
47 47 mfdict[f] = bin(n[:40])
48 48 else:
49 49 mfdict[f] = bin(n)
50 50 return mfdict
51 51
52 52 def readdelta(self, node):
53 53 return self.parse(mdiff.patchtext(self.delta(node)))
54 54
55 55 def read(self, node):
56 56 if node == nullid: return manifestdict() # don't upset local cache
57 57 if self.mapcache and self.mapcache[0] == node:
58 58 return self.mapcache[1]
59 59 text = self.revision(node)
60 60 self.listcache = array.array('c', text)
61 61 mapping = self.parse(text)
62 62 self.mapcache = (node, mapping)
63 63 return mapping
64 64
65 65 def _search(self, m, s, lo=0, hi=None):
66 66 '''return a tuple (start, end) that says where to find s within m.
67 67
68 68 If the string is found m[start:end] are the line containing
69 69 that string. If start == end the string was not found and
70 70 they indicate the proper sorted insertion point. This was
71 71 taken from bisect_left, and modified to find line start/end as
72 72 it goes along.
73 73
74 74 m should be a buffer or a string
75 75 s is a string'''
76 76 def advance(i, c):
77 77 while i < lenm and m[i] != c:
78 78 i += 1
79 79 return i
80 80 lenm = len(m)
81 81 if not hi:
82 82 hi = lenm
83 83 while lo < hi:
84 84 mid = (lo + hi) // 2
85 85 start = mid
86 86 while start > 0 and m[start-1] != '\n':
87 87 start -= 1
88 88 end = advance(start, '\0')
89 89 if m[start:end] < s:
90 90 # we know that after the null there are 40 bytes of sha1
91 91 # this translates to the bisect lo = mid + 1
92 92 lo = advance(end + 40, '\n') + 1
93 93 else:
94 94 # this translates to the bisect hi = mid
95 95 hi = start
96 96 end = advance(lo, '\0')
97 97 found = m[lo:end]
98 98 if cmp(s, found) == 0:
99 99 # we know that after the null there are 40 bytes of sha1
100 100 end = advance(end + 40, '\n')
101 101 return (lo, end+1)
102 102 else:
103 103 return (lo, lo)
104 104
105 105 def find(self, node, f):
106 106 '''look up entry for a single file efficiently.
107 107 return (node, flags) pair if found, (None, None) if not.'''
108 108 if self.mapcache and node == self.mapcache[0]:
109 109 return self.mapcache[1].get(f), self.mapcache[1].flags(f)
110 110 text = self.revision(node)
111 111 start, end = self._search(text, f)
112 112 if start == end:
113 113 return None, None
114 114 l = text[start:end]
115 115 f, n = l.split('\0')
116 116 return bin(n[:40]), n[40:-1]
117 117
118 118 def add(self, map, transaction, link, p1=None, p2=None,
119 119 changed=None):
120 120 # apply the changes collected during the bisect loop to our addlist
121 121 # return a delta suitable for addrevision
122 122 def addlistdelta(addlist, x):
123 123 # start from the bottom up
124 124 # so changes to the offsets don't mess things up.
125 125 i = len(x)
126 126 while i > 0:
127 127 i -= 1
128 128 start = x[i][0]
129 129 end = x[i][1]
130 130 if x[i][2]:
131 131 addlist[start:end] = array.array('c', x[i][2])
132 132 else:
133 133 del addlist[start:end]
134 134 return "".join([struct.pack(">lll", d[0], d[1], len(d[2])) + d[2]
135 135 for d in x ])
136 136
137 137 def checkforbidden(f):
138 138 if '\n' in f or '\r' in f:
139 139 raise RevlogError(_("'\\n' and '\\r' disallowed in filenames"))
140 140
141 141 # if we're using the listcache, make sure it is valid and
142 142 # parented by the same node we're diffing against
143 143 if not (changed and self.listcache and p1 and self.mapcache[0] == p1):
144 144 files = map.keys()
145 145 files.sort()
146 146
147 147 for f in files:
148 148 checkforbidden(f)
149 149
150 150 # if this is changed to support newlines in filenames,
151 151 # be sure to check the templates/ dir again (especially *-raw.tmpl)
152 152 text = ["%s\000%s%s\n" % (f, hex(map[f]), map.flags(f))
153 153 for f in files]
154 154 self.listcache = array.array('c', "".join(text))
155 155 cachedelta = None
156 156 else:
157 157 addlist = self.listcache
158 158
159 159 for f in changed[0]:
160 160 checkforbidden(f)
161 161 # combine the changed lists into one list for sorting
162 162 work = [[x, 0] for x in changed[0]]
163 163 work[len(work):] = [[x, 1] for x in changed[1]]
164 164 work.sort()
165 165
166 166 delta = []
167 167 dstart = None
168 168 dend = None
169 169 dline = [""]
170 170 start = 0
171 171 # zero copy representation of addlist as a buffer
172 172 addbuf = buffer(addlist)
173 173
174 174 # start with a readonly loop that finds the offset of
175 175 # each line and creates the deltas
176 176 for w in work:
177 177 f = w[0]
178 178 # bs will either be the index of the item or the insert point
179 179 start, end = self._search(addbuf, f, start)
180 180 if w[1] == 0:
181 181 l = "%s\000%s%s\n" % (f, hex(map[f]), map.flags(f))
182 182 else:
183 183 l = ""
184 184 if start == end and w[1] == 1:
185 185 # item we want to delete was not found, error out
186 186 raise AssertionError(
187 187 _("failed to remove %s from manifest") % f)
188 188 if dstart != None and dstart <= start and dend >= start:
189 189 if dend < end:
190 190 dend = end
191 191 if l:
192 192 dline.append(l)
193 193 else:
194 194 if dstart != None:
195 195 delta.append([dstart, dend, "".join(dline)])
196 196 dstart = start
197 197 dend = end
198 198 dline = [l]
199 199
200 200 if dstart != None:
201 201 delta.append([dstart, dend, "".join(dline)])
202 202 # apply the delta to the addlist, and get a delta for addrevision
203 203 cachedelta = addlistdelta(addlist, delta)
204 204
205 205 # the delta is only valid if we've been processing the tip revision
206 206 if self.mapcache[0] != self.tip():
207 207 cachedelta = None
208 208 self.listcache = addlist
209 209
210 210 n = self.addrevision(buffer(self.listcache), transaction, link,
211 211 p1, p2, cachedelta)
212 212 self.mapcache = (n, map)
213 213
214 214 return n
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now