##// END OF EJS Templates
Merge with crew
Matt Mackall -
r4965:4106dde1 merge default
parent child Browse files
Show More
@@ -1,178 +1,204 b''
1 # churn.py - create a graph showing who changed the most lines
1 # churn.py - create a graph showing who changed the most lines
2 #
2 #
3 # Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net>
3 # Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7 #
7 #
8 #
8 #
9 # Aliases map file format is simple one alias per line in the following
9 # Aliases map file format is simple one alias per line in the following
10 # format:
10 # format:
11 #
11 #
12 # <alias email> <actual email>
12 # <alias email> <actual email>
13
13
14 import sys
15 from mercurial.i18n import gettext as _
14 from mercurial.i18n import gettext as _
16 from mercurial import hg, mdiff, cmdutil, ui, util, templater, node
15 from mercurial import hg, mdiff, cmdutil, ui, util, templater, node
16 import os, sys
17
18 def get_tty_width():
19 if 'COLUMNS' in os.environ:
20 try:
21 return int(os.environ['COLUMNS'])
22 except ValueError:
23 pass
24 try:
25 import termios, fcntl, struct
26 buf = 'abcd'
27 for dev in (sys.stdout, sys.stdin):
28 try:
29 if buf != 'abcd':
30 break
31 fd = dev.fileno()
32 if not os.isatty(fd):
33 continue
34 buf = fcntl.ioctl(fd, termios.TIOCGWINSZ, buf)
35 except ValueError:
36 pass
37 if buf != 'abcd':
38 return struct.unpack('hh', buf)[1]
39 except ImportError:
40 pass
41 return 80
17
42
18 def __gather(ui, repo, node1, node2):
43 def __gather(ui, repo, node1, node2):
19 def dirtywork(f, mmap1, mmap2):
44 def dirtywork(f, mmap1, mmap2):
20 lines = 0
45 lines = 0
21
46
22 to = mmap1 and repo.file(f).read(mmap1[f]) or None
47 to = mmap1 and repo.file(f).read(mmap1[f]) or None
23 tn = mmap2 and repo.file(f).read(mmap2[f]) or None
48 tn = mmap2 and repo.file(f).read(mmap2[f]) or None
24
49
25 diff = mdiff.unidiff(to, "", tn, "", f).split("\n")
50 diff = mdiff.unidiff(to, "", tn, "", f).split("\n")
26
51
27 for line in diff:
52 for line in diff:
28 if not line:
53 if not line:
29 continue # skip EOF
54 continue # skip EOF
30 if line.startswith(" "):
55 if line.startswith(" "):
31 continue # context line
56 continue # context line
32 if line.startswith("--- ") or line.startswith("+++ "):
57 if line.startswith("--- ") or line.startswith("+++ "):
33 continue # begining of diff
58 continue # begining of diff
34 if line.startswith("@@ "):
59 if line.startswith("@@ "):
35 continue # info line
60 continue # info line
36
61
37 # changed lines
62 # changed lines
38 lines += 1
63 lines += 1
39
64
40 return lines
65 return lines
41
66
42 ##
67 ##
43
68
44 lines = 0
69 lines = 0
45
70
46 changes = repo.status(node1, node2, None, util.always)[:5]
71 changes = repo.status(node1, node2, None, util.always)[:5]
47
72
48 modified, added, removed, deleted, unknown = changes
73 modified, added, removed, deleted, unknown = changes
49
74
50 who = repo.changelog.read(node2)[1]
75 who = repo.changelog.read(node2)[1]
51 who = templater.email(who) # get the email of the person
76 who = templater.email(who) # get the email of the person
52
77
53 mmap1 = repo.manifest.read(repo.changelog.read(node1)[0])
78 mmap1 = repo.manifest.read(repo.changelog.read(node1)[0])
54 mmap2 = repo.manifest.read(repo.changelog.read(node2)[0])
79 mmap2 = repo.manifest.read(repo.changelog.read(node2)[0])
55 for f in modified:
80 for f in modified:
56 lines += dirtywork(f, mmap1, mmap2)
81 lines += dirtywork(f, mmap1, mmap2)
57
82
58 for f in added:
83 for f in added:
59 lines += dirtywork(f, None, mmap2)
84 lines += dirtywork(f, None, mmap2)
60
85
61 for f in removed:
86 for f in removed:
62 lines += dirtywork(f, mmap1, None)
87 lines += dirtywork(f, mmap1, None)
63
88
64 for f in deleted:
89 for f in deleted:
65 lines += dirtywork(f, mmap1, mmap2)
90 lines += dirtywork(f, mmap1, mmap2)
66
91
67 for f in unknown:
92 for f in unknown:
68 lines += dirtywork(f, mmap1, mmap2)
93 lines += dirtywork(f, mmap1, mmap2)
69
94
70 return (who, lines)
95 return (who, lines)
71
96
72 def gather_stats(ui, repo, amap, revs=None, progress=False):
97 def gather_stats(ui, repo, amap, revs=None, progress=False):
73 stats = {}
98 stats = {}
74
99
75 cl = repo.changelog
100 cl = repo.changelog
76
101
77 if not revs:
102 if not revs:
78 revs = range(0, cl.count())
103 revs = range(0, cl.count())
79
104
80 nr_revs = len(revs)
105 nr_revs = len(revs)
81 cur_rev = 0
106 cur_rev = 0
82
107
83 for rev in revs:
108 for rev in revs:
84 cur_rev += 1 # next revision
109 cur_rev += 1 # next revision
85
110
86 node2 = cl.node(rev)
111 node2 = cl.node(rev)
87 node1 = cl.parents(node2)[0]
112 node1 = cl.parents(node2)[0]
88
113
89 if cl.parents(node2)[1] != node.nullid:
114 if cl.parents(node2)[1] != node.nullid:
90 ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,))
115 ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,))
91 continue
116 continue
92
117
93 who, lines = __gather(ui, repo, node1, node2)
118 who, lines = __gather(ui, repo, node1, node2)
94
119
95 # remap the owner if possible
120 # remap the owner if possible
96 if amap.has_key(who):
121 if amap.has_key(who):
97 ui.note("using '%s' alias for '%s'\n" % (amap[who], who))
122 ui.note("using '%s' alias for '%s'\n" % (amap[who], who))
98 who = amap[who]
123 who = amap[who]
99
124
100 if not stats.has_key(who):
125 if not stats.has_key(who):
101 stats[who] = 0
126 stats[who] = 0
102 stats[who] += lines
127 stats[who] += lines
103
128
104 ui.note("rev %d: %d lines by %s\n" % (rev, lines, who))
129 ui.note("rev %d: %d lines by %s\n" % (rev, lines, who))
105
130
106 if progress:
131 if progress:
107 if int(100.0*(cur_rev - 1)/nr_revs) < int(100.0*cur_rev/nr_revs):
132 if int(100.0*(cur_rev - 1)/nr_revs) < int(100.0*cur_rev/nr_revs):
108 ui.write("%d%%.." % (int(100.0*cur_rev/nr_revs),))
133 ui.write("%d%%.." % (int(100.0*cur_rev/nr_revs),))
109 sys.stdout.flush()
134 sys.stdout.flush()
110
135
111 if progress:
136 if progress:
112 ui.write("done\n")
137 ui.write("done\n")
113 sys.stdout.flush()
138 sys.stdout.flush()
114
139
115 return stats
140 return stats
116
141
117 def churn(ui, repo, **opts):
142 def churn(ui, repo, **opts):
118 "Graphs the number of lines changed"
143 "Graphs the number of lines changed"
119
144
120 def pad(s, l):
145 def pad(s, l):
121 if len(s) < l:
146 if len(s) < l:
122 return s + " " * (l-len(s))
147 return s + " " * (l-len(s))
123 return s[0:l]
148 return s[0:l]
124
149
125 def graph(n, maximum, width, char):
150 def graph(n, maximum, width, char):
126 n = int(n * width / float(maximum))
151 n = int(n * width / float(maximum))
127
152
128 return char * (n)
153 return char * (n)
129
154
130 def get_aliases(f):
155 def get_aliases(f):
131 aliases = {}
156 aliases = {}
132
157
133 for l in f.readlines():
158 for l in f.readlines():
134 l = l.strip()
159 l = l.strip()
135 alias, actual = l.split(" ")
160 alias, actual = l.split(" ")
136 aliases[alias] = actual
161 aliases[alias] = actual
137
162
138 return aliases
163 return aliases
139
164
140 amap = {}
165 amap = {}
141 aliases = opts.get('aliases')
166 aliases = opts.get('aliases')
142 if aliases:
167 if aliases:
143 try:
168 try:
144 f = open(aliases,"r")
169 f = open(aliases,"r")
145 except OSError, e:
170 except OSError, e:
146 print "Error: " + e
171 print "Error: " + e
147 return
172 return
148
173
149 amap = get_aliases(f)
174 amap = get_aliases(f)
150 f.close()
175 f.close()
151
176
152 revs = [int(r) for r in cmdutil.revrange(repo, opts['rev'])]
177 revs = [int(r) for r in cmdutil.revrange(repo, opts['rev'])]
153 revs.sort()
178 revs.sort()
154 stats = gather_stats(ui, repo, amap, revs, opts.get('progress'))
179 stats = gather_stats(ui, repo, amap, revs, opts.get('progress'))
155
180
156 # make a list of tuples (name, lines) and sort it in descending order
181 # make a list of tuples (name, lines) and sort it in descending order
157 ordered = stats.items()
182 ordered = stats.items()
158 ordered.sort(lambda x, y: cmp(y[1], x[1]))
183 ordered.sort(lambda x, y: cmp(y[1], x[1]))
159
184
160 maximum = ordered[0][1]
185 maximum = ordered[0][1]
161
186
162 ui.note("Assuming 80 character terminal\n")
187 width = get_tty_width()
163 width = 80 - 1
188 ui.note(_("assuming %i character terminal\n") % width)
189 width -= 1
164
190
165 for i in ordered:
191 for i in ordered:
166 person = i[0]
192 person = i[0]
167 lines = i[1]
193 lines = i[1]
168 print "%s %6d %s" % (pad(person, 20), lines,
194 print "%s %6d %s" % (pad(person, 20), lines,
169 graph(lines, maximum, width - 20 - 1 - 6 - 2 - 2, '*'))
195 graph(lines, maximum, width - 20 - 1 - 6 - 2 - 2, '*'))
170
196
171 cmdtable = {
197 cmdtable = {
172 "churn":
198 "churn":
173 (churn,
199 (churn,
174 [('r', 'rev', [], _('limit statistics to the specified revisions')),
200 [('r', 'rev', [], _('limit statistics to the specified revisions')),
175 ('', 'aliases', '', _('file with email aliases')),
201 ('', 'aliases', '', _('file with email aliases')),
176 ('', 'progress', None, _('show progress'))],
202 ('', 'progress', None, _('show progress'))],
177 'hg churn [-r revision range] [-a file] [--progress]'),
203 'hg churn [-r revision range] [-a file] [--progress]'),
178 }
204 }
@@ -1,77 +1,76 b''
1 # Copyright (C) 2007 Brendan Cully <brendan@kublai.com>
1 # Copyright (C) 2007 Brendan Cully <brendan@kublai.com>
2 # This file is published under the GNU GPL.
2 # This file is published under the GNU GPL.
3
3
4 '''allow user-defined command aliases
4 '''allow user-defined command aliases
5
5
6 To use, create entries in your hgrc of the form
6 To use, create entries in your hgrc of the form
7
7
8 [alias]
8 [alias]
9 mycmd = cmd --args
9 mycmd = cmd --args
10 '''
10 '''
11
11
12 from mercurial.cmdutil import findcmd, UnknownCommand, AmbiguousCommand
12 from mercurial.cmdutil import findcmd, UnknownCommand, AmbiguousCommand
13 from mercurial import commands
13 from mercurial import commands
14
14
15 cmdtable = {}
15 cmdtable = {}
16
16
17 class RecursiveCommand(Exception): pass
17 class RecursiveCommand(Exception): pass
18
18
19 class lazycommand(object):
19 class lazycommand(object):
20 '''defer command lookup until needed, so that extensions loaded
20 '''defer command lookup until needed, so that extensions loaded
21 after alias can be aliased'''
21 after alias can be aliased'''
22 def __init__(self, ui, name, target):
22 def __init__(self, ui, name, target):
23 self._ui = ui
23 self._ui = ui
24 self._name = name
24 self._name = name
25 self._target = target
25 self._target = target
26 self._cmd = None
26 self._cmd = None
27
27
28 def __len__(self):
28 def __len__(self):
29 self._resolve()
29 self._resolve()
30 return len(self._cmd)
30 return len(self._cmd)
31
31
32 def __getitem__(self, key):
32 def __getitem__(self, key):
33 self._resolve()
33 self._resolve()
34 return self._cmd[key]
34 return self._cmd[key]
35
35
36 def __iter__(self):
36 def __iter__(self):
37 self._resolve()
37 self._resolve()
38 return self._cmd.__iter__()
38 return self._cmd.__iter__()
39
39
40 def _resolve(self):
40 def _resolve(self):
41 if self._cmd is not None:
41 if self._cmd is not None:
42 return
42 return
43
43
44 try:
44 try:
45 self._cmd = findcmd(self._ui, self._target)[1]
45 self._cmd = findcmd(self._ui, self._target)[1]
46 if self._cmd == self:
46 if self._cmd == self:
47 raise RecursiveCommand()
47 raise RecursiveCommand()
48 if self._target in commands.norepo.split(' '):
48 if self._target in commands.norepo.split(' '):
49 commands.norepo += ' %s' % self._name
49 commands.norepo += ' %s' % self._name
50 return
50 return
51 except UnknownCommand:
51 except UnknownCommand:
52 msg = '*** [alias] %s: command %s is unknown' % \
52 msg = '*** [alias] %s: command %s is unknown' % \
53 (self._name, self._target)
53 (self._name, self._target)
54 except AmbiguousCommand:
54 except AmbiguousCommand:
55 msg = '*** [alias] %s: command %s is ambiguous' % \
55 msg = '*** [alias] %s: command %s is ambiguous' % \
56 (self._name, self._target)
56 (self._name, self._target)
57 except RecursiveCommand:
57 except RecursiveCommand:
58 msg = '*** [alias] %s: circular dependency on %s' % \
58 msg = '*** [alias] %s: circular dependency on %s' % \
59 (self._name, self._target)
59 (self._name, self._target)
60 def nocmd(*args, **opts):
60 def nocmd(*args, **opts):
61 self._ui.warn(msg + '\n')
61 self._ui.warn(msg + '\n')
62 return 1
62 return 1
63 nocmd.__doc__ = msg
63 nocmd.__doc__ = msg
64 self._cmd = (nocmd, [], '')
64 self._cmd = (nocmd, [], '')
65 commands.norepo += ' %s' % self._name
65 commands.norepo += ' %s' % self._name
66
66
67 def uisetup(ui):
67 def uisetup(ui):
68 for cmd, target in ui.configitems('alias'):
68 for cmd, target in ui.configitems('alias'):
69 if not target:
69 if not target:
70 ui.warn('*** [alias] %s: no definition\n' % cmd)
70 ui.warn('*** [alias] %s: no definition\n' % cmd)
71 continue
71 continue
72 args = target.split(' ')
72 args = target.split(' ')
73 tcmd = args.pop(0)
73 tcmd = args.pop(0)
74 if args:
74 if args:
75 pui = ui.parentui or ui
75 ui.setconfig('defaults', cmd, ' '.join(args))
76 pui.setconfig('defaults', cmd, ' '.join(args))
77 cmdtable[cmd] = lazycommand(ui, cmd, tcmd)
76 cmdtable[cmd] = lazycommand(ui, cmd, tcmd)
@@ -1,353 +1,353 b''
1 # convert.py Foreign SCM converter
1 # convert.py Foreign SCM converter
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from common import NoRepo, converter_source, converter_sink
8 from common import NoRepo, converter_source, converter_sink
9 from cvs import convert_cvs
9 from cvs import convert_cvs
10 from git import convert_git
10 from git import convert_git
11 from hg import convert_mercurial
11 from hg import convert_mercurial
12 from subversion import convert_svn
12 from subversion import convert_svn
13
13
14 import os, shutil
14 import os, shutil
15 from mercurial import hg, ui, util, commands
15 from mercurial import hg, ui, util, commands
16
16
17 commands.norepo += " convert"
17 commands.norepo += " convert"
18
18
19 converters = [convert_cvs, convert_git, convert_svn, convert_mercurial]
19 converters = [convert_cvs, convert_git, convert_svn, convert_mercurial]
20
20
21 def convertsource(ui, path, **opts):
21 def convertsource(ui, path, **opts):
22 for c in converters:
22 for c in converters:
23 if not hasattr(c, 'getcommit'):
23 if not hasattr(c, 'getcommit'):
24 continue
24 continue
25 try:
25 try:
26 return c(ui, path, **opts)
26 return c(ui, path, **opts)
27 except NoRepo:
27 except NoRepo:
28 pass
28 pass
29 raise util.Abort('%s: unknown repository type' % path)
29 raise util.Abort('%s: unknown repository type' % path)
30
30
31 def convertsink(ui, path):
31 def convertsink(ui, path):
32 if not os.path.isdir(path):
32 if not os.path.isdir(path):
33 raise util.Abort("%s: not a directory" % path)
33 raise util.Abort("%s: not a directory" % path)
34 for c in converters:
34 for c in converters:
35 if not hasattr(c, 'putcommit'):
35 if not hasattr(c, 'putcommit'):
36 continue
36 continue
37 try:
37 try:
38 return c(ui, path)
38 return c(ui, path)
39 except NoRepo:
39 except NoRepo:
40 pass
40 pass
41 raise util.Abort('%s: unknown repository type' % path)
41 raise util.Abort('%s: unknown repository type' % path)
42
42
43 class convert(object):
43 class convert(object):
44 def __init__(self, ui, source, dest, mapfile, opts):
44 def __init__(self, ui, source, dest, mapfile, opts):
45
45
46 self.source = source
46 self.source = source
47 self.dest = dest
47 self.dest = dest
48 self.ui = ui
48 self.ui = ui
49 self.opts = opts
49 self.opts = opts
50 self.commitcache = {}
50 self.commitcache = {}
51 self.mapfile = mapfile
51 self.mapfile = mapfile
52 self.mapfilefd = None
52 self.mapfilefd = None
53 self.authors = {}
53 self.authors = {}
54 self.authorfile = None
54 self.authorfile = None
55
55
56 self.map = {}
56 self.map = {}
57 try:
57 try:
58 origmapfile = open(self.mapfile, 'r')
58 origmapfile = open(self.mapfile, 'r')
59 for l in origmapfile:
59 for l in origmapfile:
60 sv, dv = l[:-1].split()
60 sv, dv = l[:-1].split()
61 self.map[sv] = dv
61 self.map[sv] = dv
62 origmapfile.close()
62 origmapfile.close()
63 except IOError:
63 except IOError:
64 pass
64 pass
65
65
66 # Read first the dst author map if any
66 # Read first the dst author map if any
67 authorfile = self.dest.authorfile()
67 authorfile = self.dest.authorfile()
68 if authorfile and os.path.exists(authorfile):
68 if authorfile and os.path.exists(authorfile):
69 self.readauthormap(authorfile)
69 self.readauthormap(authorfile)
70 # Extend/Override with new author map if necessary
70 # Extend/Override with new author map if necessary
71 if opts.get('authors'):
71 if opts.get('authors'):
72 self.readauthormap(opts.get('authors'))
72 self.readauthormap(opts.get('authors'))
73 self.authorfile = self.dest.authorfile()
73 self.authorfile = self.dest.authorfile()
74
74
75 def walktree(self, heads):
75 def walktree(self, heads):
76 '''Return a mapping that identifies the uncommitted parents of every
76 '''Return a mapping that identifies the uncommitted parents of every
77 uncommitted changeset.'''
77 uncommitted changeset.'''
78 visit = heads
78 visit = heads
79 known = {}
79 known = {}
80 parents = {}
80 parents = {}
81 while visit:
81 while visit:
82 n = visit.pop(0)
82 n = visit.pop(0)
83 if n in known or n in self.map: continue
83 if n in known or n in self.map: continue
84 known[n] = 1
84 known[n] = 1
85 self.commitcache[n] = self.source.getcommit(n)
85 self.commitcache[n] = self.source.getcommit(n)
86 cp = self.commitcache[n].parents
86 cp = self.commitcache[n].parents
87 parents[n] = []
87 parents[n] = []
88 for p in cp:
88 for p in cp:
89 parents[n].append(p)
89 parents[n].append(p)
90 visit.append(p)
90 visit.append(p)
91
91
92 return parents
92 return parents
93
93
94 def toposort(self, parents):
94 def toposort(self, parents):
95 '''Return an ordering such that every uncommitted changeset is
95 '''Return an ordering such that every uncommitted changeset is
96 preceeded by all its uncommitted ancestors.'''
96 preceeded by all its uncommitted ancestors.'''
97 visit = parents.keys()
97 visit = parents.keys()
98 seen = {}
98 seen = {}
99 children = {}
99 children = {}
100
100
101 while visit:
101 while visit:
102 n = visit.pop(0)
102 n = visit.pop(0)
103 if n in seen: continue
103 if n in seen: continue
104 seen[n] = 1
104 seen[n] = 1
105 # Ensure that nodes without parents are present in the 'children'
105 # Ensure that nodes without parents are present in the 'children'
106 # mapping.
106 # mapping.
107 children.setdefault(n, [])
107 children.setdefault(n, [])
108 for p in parents[n]:
108 for p in parents[n]:
109 if not p in self.map:
109 if not p in self.map:
110 visit.append(p)
110 visit.append(p)
111 children.setdefault(p, []).append(n)
111 children.setdefault(p, []).append(n)
112
112
113 s = []
113 s = []
114 removed = {}
114 removed = {}
115 visit = children.keys()
115 visit = children.keys()
116 while visit:
116 while visit:
117 n = visit.pop(0)
117 n = visit.pop(0)
118 if n in removed: continue
118 if n in removed: continue
119 dep = 0
119 dep = 0
120 if n in parents:
120 if n in parents:
121 for p in parents[n]:
121 for p in parents[n]:
122 if p in self.map: continue
122 if p in self.map: continue
123 if p not in removed:
123 if p not in removed:
124 # we're still dependent
124 # we're still dependent
125 visit.append(n)
125 visit.append(n)
126 dep = 1
126 dep = 1
127 break
127 break
128
128
129 if not dep:
129 if not dep:
130 # all n's parents are in the list
130 # all n's parents are in the list
131 removed[n] = 1
131 removed[n] = 1
132 if n not in self.map:
132 if n not in self.map:
133 s.append(n)
133 s.append(n)
134 if n in children:
134 if n in children:
135 for c in children[n]:
135 for c in children[n]:
136 visit.insert(0, c)
136 visit.insert(0, c)
137
137
138 if self.opts.get('datesort'):
138 if self.opts.get('datesort'):
139 depth = {}
139 depth = {}
140 for n in s:
140 for n in s:
141 depth[n] = 0
141 depth[n] = 0
142 pl = [p for p in self.commitcache[n].parents
142 pl = [p for p in self.commitcache[n].parents
143 if p not in self.map]
143 if p not in self.map]
144 if pl:
144 if pl:
145 depth[n] = max([depth[p] for p in pl]) + 1
145 depth[n] = max([depth[p] for p in pl]) + 1
146
146
147 s = [(depth[n], self.commitcache[n].date, n) for n in s]
147 s = [(depth[n], self.commitcache[n].date, n) for n in s]
148 s.sort()
148 s.sort()
149 s = [e[2] for e in s]
149 s = [e[2] for e in s]
150
150
151 return s
151 return s
152
152
153 def mapentry(self, src, dst):
153 def mapentry(self, src, dst):
154 if self.mapfilefd is None:
154 if self.mapfilefd is None:
155 try:
155 try:
156 self.mapfilefd = open(self.mapfile, "a")
156 self.mapfilefd = open(self.mapfile, "a")
157 except IOError, (errno, strerror):
157 except IOError, (errno, strerror):
158 raise util.Abort("Could not open map file %s: %s, %s\n" % (self.mapfile, errno, strerror))
158 raise util.Abort("Could not open map file %s: %s, %s\n" % (self.mapfile, errno, strerror))
159 self.map[src] = dst
159 self.map[src] = dst
160 self.mapfilefd.write("%s %s\n" % (src, dst))
160 self.mapfilefd.write("%s %s\n" % (src, dst))
161 self.mapfilefd.flush()
161 self.mapfilefd.flush()
162
162
163 def writeauthormap(self):
163 def writeauthormap(self):
164 authorfile = self.authorfile
164 authorfile = self.authorfile
165 if authorfile:
165 if authorfile:
166 self.ui.status('Writing author map file %s\n' % authorfile)
166 self.ui.status('Writing author map file %s\n' % authorfile)
167 ofile = open(authorfile, 'w+')
167 ofile = open(authorfile, 'w+')
168 for author in self.authors:
168 for author in self.authors:
169 ofile.write("%s=%s\n" % (author, self.authors[author]))
169 ofile.write("%s=%s\n" % (author, self.authors[author]))
170 ofile.close()
170 ofile.close()
171
171
172 def readauthormap(self, authorfile):
172 def readauthormap(self, authorfile):
173 afile = open(authorfile, 'r')
173 afile = open(authorfile, 'r')
174 for line in afile:
174 for line in afile:
175 try:
175 try:
176 srcauthor = line.split('=')[0].strip()
176 srcauthor = line.split('=')[0].strip()
177 dstauthor = line.split('=')[1].strip()
177 dstauthor = line.split('=')[1].strip()
178 if srcauthor in self.authors and dstauthor != self.authors[srcauthor]:
178 if srcauthor in self.authors and dstauthor != self.authors[srcauthor]:
179 self.ui.status(
179 self.ui.status(
180 'Overriding mapping for author %s, was %s, will be %s\n'
180 'Overriding mapping for author %s, was %s, will be %s\n'
181 % (srcauthor, self.authors[srcauthor], dstauthor))
181 % (srcauthor, self.authors[srcauthor], dstauthor))
182 else:
182 else:
183 self.ui.debug('Mapping author %s to %s\n'
183 self.ui.debug('Mapping author %s to %s\n'
184 % (srcauthor, dstauthor))
184 % (srcauthor, dstauthor))
185 self.authors[srcauthor] = dstauthor
185 self.authors[srcauthor] = dstauthor
186 except IndexError:
186 except IndexError:
187 self.ui.warn(
187 self.ui.warn(
188 'Ignoring bad line in author file map %s: %s\n'
188 'Ignoring bad line in author file map %s: %s\n'
189 % (authorfile, line))
189 % (authorfile, line))
190 afile.close()
190 afile.close()
191
191
192 def copy(self, rev):
192 def copy(self, rev):
193 c = self.commitcache[rev]
193 c = self.commitcache[rev]
194 files = self.source.getchanges(rev)
194 files = self.source.getchanges(rev)
195
195
196 do_copies = (hasattr(c, 'copies') and hasattr(self.dest, 'copyfile'))
196 do_copies = (hasattr(c, 'copies') and hasattr(self.dest, 'copyfile'))
197
197
198 for f, v in files:
198 for f, v in files:
199 try:
199 try:
200 data = self.source.getfile(f, v)
200 data = self.source.getfile(f, v)
201 except IOError, inst:
201 except IOError, inst:
202 self.dest.delfile(f)
202 self.dest.delfile(f)
203 else:
203 else:
204 e = self.source.getmode(f, v)
204 e = self.source.getmode(f, v)
205 self.dest.putfile(f, e, data)
205 self.dest.putfile(f, e, data)
206 if do_copies:
206 if do_copies:
207 if f in c.copies:
207 if f in c.copies:
208 # Merely marks that a copy happened.
208 # Merely marks that a copy happened.
209 self.dest.copyfile(c.copies[f], f)
209 self.dest.copyfile(c.copies[f], f)
210
210
211
211
212 r = [self.map[v] for v in c.parents]
212 r = [self.map[v] for v in c.parents]
213 f = [f for f, v in files]
213 f = [f for f, v in files]
214 newnode = self.dest.putcommit(f, r, c)
214 newnode = self.dest.putcommit(f, r, c)
215 self.mapentry(rev, newnode)
215 self.mapentry(rev, newnode)
216
216
217 def convert(self):
217 def convert(self):
218 try:
218 try:
219 self.source.setrevmap(self.map)
219 self.source.setrevmap(self.map)
220 self.ui.status("scanning source...\n")
220 self.ui.status("scanning source...\n")
221 heads = self.source.getheads()
221 heads = self.source.getheads()
222 parents = self.walktree(heads)
222 parents = self.walktree(heads)
223 self.ui.status("sorting...\n")
223 self.ui.status("sorting...\n")
224 t = self.toposort(parents)
224 t = self.toposort(parents)
225 num = len(t)
225 num = len(t)
226 c = None
226 c = None
227
227
228 self.ui.status("converting...\n")
228 self.ui.status("converting...\n")
229 for c in t:
229 for c in t:
230 num -= 1
230 num -= 1
231 desc = self.commitcache[c].desc
231 desc = self.commitcache[c].desc
232 if "\n" in desc:
232 if "\n" in desc:
233 desc = desc.splitlines()[0]
233 desc = desc.splitlines()[0]
234 author = self.commitcache[c].author
234 author = self.commitcache[c].author
235 author = self.authors.get(author, author)
235 author = self.authors.get(author, author)
236 self.commitcache[c].author = author
236 self.commitcache[c].author = author
237 self.ui.status("%d %s\n" % (num, desc))
237 self.ui.status("%d %s\n" % (num, desc))
238 self.copy(c)
238 self.copy(c)
239
239
240 tags = self.source.gettags()
240 tags = self.source.gettags()
241 ctags = {}
241 ctags = {}
242 for k in tags:
242 for k in tags:
243 v = tags[k]
243 v = tags[k]
244 if v in self.map:
244 if v in self.map:
245 ctags[k] = self.map[v]
245 ctags[k] = self.map[v]
246
246
247 if c and ctags:
247 if c and ctags:
248 nrev = self.dest.puttags(ctags)
248 nrev = self.dest.puttags(ctags)
249 # write another hash correspondence to override the previous
249 # write another hash correspondence to override the previous
250 # one so we don't end up with extra tag heads
250 # one so we don't end up with extra tag heads
251 if nrev:
251 if nrev:
252 self.mapentry(c, nrev)
252 self.mapentry(c, nrev)
253
253
254 self.writeauthormap()
254 self.writeauthormap()
255 finally:
255 finally:
256 self.cleanup()
256 self.cleanup()
257
257
258 def cleanup(self):
258 def cleanup(self):
259 if self.mapfilefd:
259 if self.mapfilefd:
260 self.mapfilefd.close()
260 self.mapfilefd.close()
261
261
262 def _convert(ui, src, dest=None, mapfile=None, **opts):
262 def _convert(ui, src, dest=None, mapfile=None, **opts):
263 '''Convert a foreign SCM repository to a Mercurial one.
263 """Convert a foreign SCM repository to a Mercurial one.
264
264
265 Accepted source formats:
265 Accepted source formats:
266 - GIT
266 - GIT
267 - CVS
267 - CVS
268 - SVN
268 - SVN
269
269
270 Accepted destination formats:
270 Accepted destination formats:
271 - Mercurial
271 - Mercurial
272
272
273 If no revision is given, all revisions will be converted. Otherwise,
273 If no revision is given, all revisions will be converted. Otherwise,
274 convert will only import up to the named revision (given in a format
274 convert will only import up to the named revision (given in a format
275 understood by the source).
275 understood by the source).
276
276
277 If no destination directory name is specified, it defaults to the
277 If no destination directory name is specified, it defaults to the
278 basename of the source with '-hg' appended. If the destination
278 basename of the source with '-hg' appended. If the destination
279 repository doesn't exist, it will be created.
279 repository doesn't exist, it will be created.
280
280
281 If <mapfile> isn't given, it will be put in a default location
281 If <mapfile> isn't given, it will be put in a default location
282 (<dest>/.hg/shamap by default). The <mapfile> is a simple text
282 (<dest>/.hg/shamap by default). The <mapfile> is a simple text
283 file that maps each source commit ID to the destination ID for
283 file that maps each source commit ID to the destination ID for
284 that revision, like so:
284 that revision, like so:
285 <source ID> <destination ID>
285 <source ID> <destination ID>
286
286
287 If the file doesn't exist, it's automatically created. It's updated
287 If the file doesn't exist, it's automatically created. It's updated
288 on each commit copied, so convert-repo can be interrupted and can
288 on each commit copied, so convert-repo can be interrupted and can
289 be run repeatedly to copy new commits.
289 be run repeatedly to copy new commits.
290
290
291 The [username mapping] file is a simple text file that maps each source
291 The [username mapping] file is a simple text file that maps each source
292 commit author to a destination commit author. It is handy for source SCMs
292 commit author to a destination commit author. It is handy for source SCMs
293 that use unix logins to identify authors (eg: CVS). One line per author
293 that use unix logins to identify authors (eg: CVS). One line per author
294 mapping and the line format is:
294 mapping and the line format is:
295 srcauthor=whatever string you want
295 srcauthor=whatever string you want
296 '''
296 """
297
297
298 util._encoding = 'UTF-8'
298 util._encoding = 'UTF-8'
299
299
300 if not dest:
300 if not dest:
301 dest = hg.defaultdest(src) + "-hg"
301 dest = hg.defaultdest(src) + "-hg"
302 ui.status("assuming destination %s\n" % dest)
302 ui.status("assuming destination %s\n" % dest)
303
303
304 # Try to be smart and initalize things when required
304 # Try to be smart and initalize things when required
305 created = False
305 created = False
306 if os.path.isdir(dest):
306 if os.path.isdir(dest):
307 if len(os.listdir(dest)) > 0:
307 if len(os.listdir(dest)) > 0:
308 try:
308 try:
309 hg.repository(ui, dest)
309 hg.repository(ui, dest)
310 ui.status("destination %s is a Mercurial repository\n" % dest)
310 ui.status("destination %s is a Mercurial repository\n" % dest)
311 except hg.RepoError:
311 except hg.RepoError:
312 raise util.Abort(
312 raise util.Abort(
313 "destination directory %s is not empty.\n"
313 "destination directory %s is not empty.\n"
314 "Please specify an empty directory to be initialized\n"
314 "Please specify an empty directory to be initialized\n"
315 "or an already initialized mercurial repository"
315 "or an already initialized mercurial repository"
316 % dest)
316 % dest)
317 else:
317 else:
318 ui.status("initializing destination %s repository\n" % dest)
318 ui.status("initializing destination %s repository\n" % dest)
319 hg.repository(ui, dest, create=True)
319 hg.repository(ui, dest, create=True)
320 created = True
320 created = True
321 elif os.path.exists(dest):
321 elif os.path.exists(dest):
322 raise util.Abort("destination %s exists and is not a directory" % dest)
322 raise util.Abort("destination %s exists and is not a directory" % dest)
323 else:
323 else:
324 ui.status("initializing destination %s repository\n" % dest)
324 ui.status("initializing destination %s repository\n" % dest)
325 hg.repository(ui, dest, create=True)
325 hg.repository(ui, dest, create=True)
326 created = True
326 created = True
327
327
328 destc = convertsink(ui, dest)
328 destc = convertsink(ui, dest)
329
329
330 try:
330 try:
331 srcc = convertsource(ui, src, rev=opts.get('rev'))
331 srcc = convertsource(ui, src, rev=opts.get('rev'))
332 except Exception:
332 except Exception:
333 if created:
333 if created:
334 shutil.rmtree(dest, True)
334 shutil.rmtree(dest, True)
335 raise
335 raise
336
336
337 if not mapfile:
337 if not mapfile:
338 try:
338 try:
339 mapfile = destc.mapfile()
339 mapfile = destc.mapfile()
340 except:
340 except:
341 mapfile = os.path.join(destc, "map")
341 mapfile = os.path.join(destc, "map")
342
342
343 c = convert(ui, srcc, destc, mapfile, opts)
343 c = convert(ui, srcc, destc, mapfile, opts)
344 c.convert()
344 c.convert()
345
345
346 cmdtable = {
346 cmdtable = {
347 "convert":
347 "convert":
348 (_convert,
348 (_convert,
349 [('A', 'authors', '', 'username mapping filename'),
349 [('A', 'authors', '', 'username mapping filename'),
350 ('r', 'rev', '', 'import up to target revision REV'),
350 ('r', 'rev', '', 'import up to target revision REV'),
351 ('', 'datesort', None, 'try to sort changesets by date')],
351 ('', 'datesort', None, 'try to sort changesets by date')],
352 'hg convert [OPTION]... SOURCE [DEST [MAPFILE]]'),
352 'hg convert [OPTION]... SOURCE [DEST [MAPFILE]]'),
353 }
353 }
@@ -1,589 +1,667 b''
1 # Subversion 1.4/1.5 Python API backend
1 # Subversion 1.4/1.5 Python API backend
2 #
2 #
3 # Copyright(C) 2007 Daniel Holth et al
3 # Copyright(C) 2007 Daniel Holth et al
4 #
5 # Configuration options:
6 #
7 # convert.svn.trunk
8 # Relative path to the trunk (default: "trunk")
9 # convert.svn.branches
10 # Relative path to tree of branches (default: "branches")
11 #
12 # Set these in a hgrc, or on the command line as follows:
13 #
14 # hg convert --config convert.svn.trunk=wackoname [...]
4
15
5 import pprint
6 import locale
16 import locale
7
17 import os
18 import cPickle as pickle
8 from mercurial import util
19 from mercurial import util
9
20
10 # Subversion stuff. Works best with very recent Python SVN bindings
21 # Subversion stuff. Works best with very recent Python SVN bindings
11 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
22 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
12 # these bindings.
23 # these bindings.
13
24
14 from cStringIO import StringIO
25 from cStringIO import StringIO
15
26
16 from common import NoRepo, commit, converter_source
27 from common import NoRepo, commit, converter_source
17
28
18 try:
29 try:
19 from svn.core import SubversionException, Pool
30 from svn.core import SubversionException, Pool
20 import svn.core
31 import svn.core
21 import svn.ra
32 import svn.ra
22 import svn.delta
33 import svn.delta
23 import svn
34 import svn
24 import transport
35 import transport
25 except ImportError:
36 except ImportError:
26 pass
37 pass
27
38
28 class CompatibilityException(Exception): pass
39 class CompatibilityException(Exception): pass
29
40
41 class changedpath(object):
42 def __init__(self, p):
43 self.copyfrom_path = p.copyfrom_path
44 self.copyfrom_rev = p.copyfrom_rev
45 self.action = p.action
46
30 # SVN conversion code stolen from bzr-svn and tailor
47 # SVN conversion code stolen from bzr-svn and tailor
31 class convert_svn(converter_source):
48 class convert_svn(converter_source):
32 def __init__(self, ui, url, rev=None):
49 def __init__(self, ui, url, rev=None):
33 super(convert_svn, self).__init__(ui, url, rev=rev)
50 super(convert_svn, self).__init__(ui, url, rev=rev)
34
51
35 try:
52 try:
36 SubversionException
53 SubversionException
37 except NameError:
54 except NameError:
38 msg = 'subversion python bindings could not be loaded\n'
55 msg = 'subversion python bindings could not be loaded\n'
39 ui.warn(msg)
56 ui.warn(msg)
40 raise NoRepo(msg)
57 raise NoRepo(msg)
41
58
42 self.encoding = locale.getpreferredencoding()
59 self.encoding = locale.getpreferredencoding()
43 self.lastrevs = {}
60 self.lastrevs = {}
44
61
45 latest = None
62 latest = None
46 if rev:
63 if rev:
47 try:
64 try:
48 latest = int(rev)
65 latest = int(rev)
49 except ValueError:
66 except ValueError:
50 raise util.Abort('svn: revision %s is not an integer' % rev)
67 raise util.Abort('svn: revision %s is not an integer' % rev)
51 try:
68 try:
52 # Support file://path@rev syntax. Useful e.g. to convert
69 # Support file://path@rev syntax. Useful e.g. to convert
53 # deleted branches.
70 # deleted branches.
54 url, latest = url.rsplit("@", 1)
71 at = url.rfind('@')
55 latest = int(latest)
72 if at >= 0:
73 latest = int(url[at+1:])
74 url = url[:at]
56 except ValueError, e:
75 except ValueError, e:
57 pass
76 pass
58 self.url = url
77 self.url = url
59 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
78 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
60 try:
79 try:
61 self.transport = transport.SvnRaTransport(url = url)
80 self.transport = transport.SvnRaTransport(url=url)
62 self.ra = self.transport.ra
81 self.ra = self.transport.ra
63 self.ctx = svn.client.create_context()
82 self.ctx = self.transport.client
64 self.base = svn.ra.get_repos_root(self.ra)
83 self.base = svn.ra.get_repos_root(self.ra)
65 self.module = self.url[len(self.base):]
84 self.module = self.url[len(self.base):]
66 self.modulemap = {} # revision, module
85 self.modulemap = {} # revision, module
67 self.commits = {}
86 self.commits = {}
68 self.files = {}
87 self.files = {}
69 self.uuid = svn.ra.get_uuid(self.ra).decode(self.encoding)
88 self.uuid = svn.ra.get_uuid(self.ra).decode(self.encoding)
70 except SubversionException, e:
89 except SubversionException, e:
71 raise NoRepo("couldn't open SVN repo %s" % url)
90 raise NoRepo("couldn't open SVN repo %s" % url)
72
91
73 try:
92 try:
74 self.get_blacklist()
93 self.get_blacklist()
75 except IOError, e:
94 except IOError, e:
76 pass
95 pass
77
96
78 self.last_changed = self.latest(self.module, latest)
97 self.last_changed = self.latest(self.module, latest)
79
98
80 self.head = self.revid(self.last_changed)
99 self.head = self.revid(self.last_changed)
81
100
82 def setrevmap(self, revmap):
101 def setrevmap(self, revmap):
83 lastrevs = {}
102 lastrevs = {}
84 for revid in revmap.keys():
103 for revid in revmap.keys():
85 uuid, module, revnum = self.revsplit(revid)
104 uuid, module, revnum = self.revsplit(revid)
86 lastrevnum = lastrevs.setdefault(module, revnum)
105 lastrevnum = lastrevs.setdefault(module, revnum)
87 if revnum > lastrevnum:
106 if revnum > lastrevnum:
88 lastrevs[module] = revnum
107 lastrevs[module] = revnum
89 self.lastrevs = lastrevs
108 self.lastrevs = lastrevs
90
109
110 def exists(self, path, optrev):
111 try:
112 return svn.client.ls(self.url.rstrip('/') + '/' + path,
113 optrev, False, self.ctx)
114 except SubversionException, err:
115 return []
116
91 def getheads(self):
117 def getheads(self):
92 # detect standard /branches, /tags, /trunk layout
118 # detect standard /branches, /tags, /trunk layout
93 optrev = svn.core.svn_opt_revision_t()
119 optrev = svn.core.svn_opt_revision_t()
94 optrev.kind = svn.core.svn_opt_revision_number
120 optrev.kind = svn.core.svn_opt_revision_number
95 optrev.value.number = self.last_changed
121 optrev.value.number = self.last_changed
96 rpath = self.url.strip('/')
122 rpath = self.url.strip('/')
97 paths = svn.client.ls(rpath, optrev, False, self.ctx)
123 cfgtrunk = self.ui.config('convert', 'svn.trunk')
98 if 'branches' in paths and 'trunk' in paths:
124 cfgbranches = self.ui.config('convert', 'svn.branches')
99 self.module += '/trunk'
125 trunk = (cfgtrunk or 'trunk').strip('/')
126 branches = (cfgbranches or 'branches').strip('/')
127 if self.exists(trunk, optrev) and self.exists(branches, optrev):
128 self.ui.note('found trunk at %r and branches at %r\n' %
129 (trunk, branches))
130 oldmodule = self.module
131 self.module += '/' + trunk
100 lt = self.latest(self.module, self.last_changed)
132 lt = self.latest(self.module, self.last_changed)
101 self.head = self.revid(lt)
133 self.head = self.revid(lt)
102 self.heads = [self.head]
134 self.heads = [self.head]
103 branches = svn.client.ls(rpath + '/branches', optrev, False, self.ctx)
135 branchnames = svn.client.ls(rpath + '/' + branches, optrev, False,
104 for branch in branches.keys():
136 self.ctx)
105 module = '/branches/' + branch
137 for branch in branchnames.keys():
138 if oldmodule:
139 module = '/' + oldmodule + '/' + branches + '/' + branch
140 else:
141 module = '/' + branches + '/' + branch
106 brevnum = self.latest(module, self.last_changed)
142 brevnum = self.latest(module, self.last_changed)
107 brev = self.revid(brevnum, module)
143 brev = self.revid(brevnum, module)
108 self.ui.note('found branch %s at %d\n' % (branch, brevnum))
144 self.ui.note('found branch %s at %d\n' % (branch, brevnum))
109 self.heads.append(brev)
145 self.heads.append(brev)
146 elif cfgtrunk or cfgbranches:
147 raise util.Abort(_('trunk/branch layout expected, '
148 'but not found'))
110 else:
149 else:
150 self.ui.note('working with one branch\n')
111 self.heads = [self.head]
151 self.heads = [self.head]
112 return self.heads
152 return self.heads
113
153
114 def getfile(self, file, rev):
154 def getfile(self, file, rev):
115 data, mode = self._getfile(file, rev)
155 data, mode = self._getfile(file, rev)
116 self.modecache[(file, rev)] = mode
156 self.modecache[(file, rev)] = mode
117 return data
157 return data
118
158
119 def getmode(self, file, rev):
159 def getmode(self, file, rev):
120 return self.modecache[(file, rev)]
160 return self.modecache[(file, rev)]
121
161
122 def getchanges(self, rev):
162 def getchanges(self, rev):
123 self.modecache = {}
163 self.modecache = {}
124 files = self.files[rev]
164 files = self.files[rev]
125 cl = files
165 cl = files
126 cl.sort()
166 cl.sort()
127 # caller caches the result, so free it here to release memory
167 # caller caches the result, so free it here to release memory
128 del self.files[rev]
168 del self.files[rev]
129 return cl
169 return cl
130
170
131 def getcommit(self, rev):
171 def getcommit(self, rev):
132 if rev not in self.commits:
172 if rev not in self.commits:
133 uuid, module, revnum = self.revsplit(rev)
173 uuid, module, revnum = self.revsplit(rev)
134 self.module = module
174 self.module = module
135 self.reparent(module)
175 self.reparent(module)
136 stop = self.lastrevs.get(module, 0)
176 stop = self.lastrevs.get(module, 0)
137 self._fetch_revisions(from_revnum=revnum, to_revnum=stop)
177 self._fetch_revisions(from_revnum=revnum, to_revnum=stop)
138 commit = self.commits[rev]
178 commit = self.commits[rev]
139 # caller caches the result, so free it here to release memory
179 # caller caches the result, so free it here to release memory
140 del self.commits[rev]
180 del self.commits[rev]
141 return commit
181 return commit
142
182
183 def get_log(self, paths, start, end, limit=0, discover_changed_paths=True,
184 strict_node_history=False):
185 '''wrapper for svn.ra.get_log.
186 on a large repository, svn.ra.get_log pins huge amounts of
187 memory that cannot be recovered. work around it by forking
188 and writing results over a pipe.'''
189
190 def child(fp):
191 protocol = -1
192 def receiver(orig_paths, revnum, author, date, message, pool):
193 if orig_paths is not None:
194 for k, v in orig_paths.iteritems():
195 orig_paths[k] = changedpath(v)
196 pickle.dump((orig_paths, revnum, author, date, message),
197 fp, protocol)
198
199 try:
200 # Use an ra of our own so that our parent can consume
201 # our results without confusing the server.
202 t = transport.SvnRaTransport(url=self.url)
203 svn.ra.get_log(t.ra, paths, start, end, limit,
204 discover_changed_paths,
205 strict_node_history,
206 receiver)
207 except SubversionException, (_, num):
208 self.ui.print_exc()
209 pickle.dump(num, fp, protocol)
210 else:
211 pickle.dump(None, fp, protocol)
212 fp.close()
213
214 def parent(fp):
215 while True:
216 entry = pickle.load(fp)
217 try:
218 orig_paths, revnum, author, date, message = entry
219 except:
220 if entry is None:
221 break
222 raise SubversionException("child raised exception", entry)
223 yield entry
224
225 rfd, wfd = os.pipe()
226 pid = os.fork()
227 if pid:
228 os.close(wfd)
229 for p in parent(os.fdopen(rfd, 'rb')):
230 yield p
231 ret = os.waitpid(pid, 0)[1]
232 if ret:
233 raise util.Abort(_('get_log %s') % util.explain_exit(ret))
234 else:
235 os.close(rfd)
236 child(os.fdopen(wfd, 'wb'))
237 os._exit(0)
238
143 def gettags(self):
239 def gettags(self):
144 tags = {}
240 tags = {}
145 def parselogentry(*arg, **args):
241 start = self.revnum(self.head)
146 orig_paths, revnum, author, date, message, pool = arg
242 try:
243 for entry in self.get_log(['/tags'], 0, start):
244 orig_paths, revnum, author, date, message = entry
147 for path in orig_paths:
245 for path in orig_paths:
148 if not path.startswith('/tags/'):
246 if not path.startswith('/tags/'):
149 continue
247 continue
150 ent = orig_paths[path]
248 ent = orig_paths[path]
151 source = ent.copyfrom_path
249 source = ent.copyfrom_path
152 rev = ent.copyfrom_rev
250 rev = ent.copyfrom_rev
153 tag = path.split('/', 2)[2]
251 tag = path.split('/', 2)[2]
154 tags[tag] = self.revid(rev, module=source)
252 tags[tag] = self.revid(rev, module=source)
155
253 except SubversionException, (_, num):
156 start = self.revnum(self.head)
254 self.ui.note('no tags found at revision %d\n' % start)
157 try:
158 svn.ra.get_log(self.ra, ['/tags'], 0, start, 0, True, False,
159 parselogentry)
160 return tags
255 return tags
161 except SubversionException:
162 self.ui.note('no tags found at revision %d\n' % start)
163 return {}
164
256
165 # -- helper functions --
257 # -- helper functions --
166
258
167 def revid(self, revnum, module=None):
259 def revid(self, revnum, module=None):
168 if not module:
260 if not module:
169 module = self.module
261 module = self.module
170 return (u"svn:%s%s@%s" % (self.uuid, module, revnum)).decode(self.encoding)
262 return (u"svn:%s%s@%s" % (self.uuid, module, revnum)).decode(self.encoding)
171
263
172 def revnum(self, rev):
264 def revnum(self, rev):
173 return int(rev.split('@')[-1])
265 return int(rev.split('@')[-1])
174
266
175 def revsplit(self, rev):
267 def revsplit(self, rev):
176 url, revnum = rev.encode(self.encoding).split('@', 1)
268 url, revnum = rev.encode(self.encoding).split('@', 1)
177 revnum = int(revnum)
269 revnum = int(revnum)
178 parts = url.split('/', 1)
270 parts = url.split('/', 1)
179 uuid = parts.pop(0)[4:]
271 uuid = parts.pop(0)[4:]
180 mod = ''
272 mod = ''
181 if parts:
273 if parts:
182 mod = '/' + parts[0]
274 mod = '/' + parts[0]
183 return uuid, mod, revnum
275 return uuid, mod, revnum
184
276
185 def latest(self, path, stop=0):
277 def latest(self, path, stop=0):
186 'find the latest revision affecting path, up to stop'
278 'find the latest revision affecting path, up to stop'
187 if not stop:
279 if not stop:
188 stop = svn.ra.get_latest_revnum(self.ra)
280 stop = svn.ra.get_latest_revnum(self.ra)
189 try:
281 try:
190 self.reparent('')
282 self.reparent('')
191 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
283 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
192 self.reparent(self.module)
284 self.reparent(self.module)
193 except SubversionException:
285 except SubversionException:
194 dirent = None
286 dirent = None
195 if not dirent:
287 if not dirent:
196 raise util.Abort('%s not found up to revision %d' \
288 print self.base, path
197 % (path, stop))
289 raise util.Abort('%s not found up to revision %d' % (path, stop))
198
290
199 return dirent.created_rev
291 return dirent.created_rev
200
292
201 def get_blacklist(self):
293 def get_blacklist(self):
202 """Avoid certain revision numbers.
294 """Avoid certain revision numbers.
203 It is not uncommon for two nearby revisions to cancel each other
295 It is not uncommon for two nearby revisions to cancel each other
204 out, e.g. 'I copied trunk into a subdirectory of itself instead
296 out, e.g. 'I copied trunk into a subdirectory of itself instead
205 of making a branch'. The converted repository is significantly
297 of making a branch'. The converted repository is significantly
206 smaller if we ignore such revisions."""
298 smaller if we ignore such revisions."""
207 self.blacklist = set()
299 self.blacklist = set()
208 blacklist = self.blacklist
300 blacklist = self.blacklist
209 for line in file("blacklist.txt", "r"):
301 for line in file("blacklist.txt", "r"):
210 if not line.startswith("#"):
302 if not line.startswith("#"):
211 try:
303 try:
212 svn_rev = int(line.strip())
304 svn_rev = int(line.strip())
213 blacklist.add(svn_rev)
305 blacklist.add(svn_rev)
214 except ValueError, e:
306 except ValueError, e:
215 pass # not an integer or a comment
307 pass # not an integer or a comment
216
308
217 def is_blacklisted(self, svn_rev):
309 def is_blacklisted(self, svn_rev):
218 return svn_rev in self.blacklist
310 return svn_rev in self.blacklist
219
311
220 def reparent(self, module):
312 def reparent(self, module):
221 svn_url = self.base + module
313 svn_url = self.base + module
222 self.ui.debug("reparent to %s\n" % svn_url.encode(self.encoding))
314 self.ui.debug("reparent to %s\n" % svn_url.encode(self.encoding))
223 svn.ra.reparent(self.ra, svn_url.encode(self.encoding))
315 svn.ra.reparent(self.ra, svn_url.encode(self.encoding))
224
316
225 def _fetch_revisions(self, from_revnum = 0, to_revnum = 347):
317 def _fetch_revisions(self, from_revnum = 0, to_revnum = 347):
226 def get_entry_from_path(path, module=self.module):
318 def get_entry_from_path(path, module=self.module):
227 # Given the repository url of this wc, say
319 # Given the repository url of this wc, say
228 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
320 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
229 # extract the "entry" portion (a relative path) from what
321 # extract the "entry" portion (a relative path) from what
230 # svn log --xml says, ie
322 # svn log --xml says, ie
231 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
323 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
232 # that is to say "tests/PloneTestCase.py"
324 # that is to say "tests/PloneTestCase.py"
233
325
234 if path.startswith(module):
326 if path.startswith(module):
235 relative = path[len(module):]
327 relative = path[len(module):]
236 if relative.startswith('/'):
328 if relative.startswith('/'):
237 return relative[1:]
329 return relative[1:]
238 else:
330 else:
239 return relative
331 return relative
240
332
241 # The path is outside our tracked tree...
333 # The path is outside our tracked tree...
242 self.ui.debug('Ignoring %r since it is not under %r\n' % (path, module))
334 self.ui.debug('Ignoring %r since it is not under %r\n' % (path, module))
243 return None
335 return None
244
336
245 received = []
246 # svn.ra.get_log requires no other calls to the ra until it completes,
247 # so we just collect the log entries and parse them afterwards
248 def receivelog(*arg, **args):
249 received.append(arg)
250
251 self.child_cset = None
337 self.child_cset = None
252 def parselogentry(*arg, **args):
338 def parselogentry(orig_paths, revnum, author, date, message):
253 orig_paths, revnum, author, date, message, pool = arg
339 self.ui.debug("parsing revision %d (%d changes)\n" %
254
340 (revnum, len(orig_paths)))
255 if self.is_blacklisted(revnum):
256 self.ui.note('skipping blacklisted revision %d\n' % revnum)
257 return
258
259 self.ui.debug("parsing revision %d\n" % revnum)
260
261 if orig_paths is None:
262 self.ui.debug('revision %d has no entries\n' % revnum)
263 return
264
341
265 if revnum in self.modulemap:
342 if revnum in self.modulemap:
266 new_module = self.modulemap[revnum]
343 new_module = self.modulemap[revnum]
267 if new_module != self.module:
344 if new_module != self.module:
268 self.module = new_module
345 self.module = new_module
269 self.reparent(self.module)
346 self.reparent(self.module)
270
347
271 copyfrom = {} # Map of entrypath, revision for finding source of deleted revisions.
348 copyfrom = {} # Map of entrypath, revision for finding source of deleted revisions.
272 copies = {}
349 copies = {}
273 entries = []
350 entries = []
274 rev = self.revid(revnum)
351 rev = self.revid(revnum)
275 parents = []
352 parents = []
276
353
277 # branch log might return entries for a parent we already have
354 # branch log might return entries for a parent we already have
278 if (rev in self.commits or
355 if (rev in self.commits or
279 (revnum < self.lastrevs.get(self.module, 0))):
356 (revnum < self.lastrevs.get(self.module, 0))):
280 return
357 return
281
358
282 try:
359 try:
283 branch = self.module.split("/")[-1]
360 branch = self.module.split("/")[-1]
284 if branch == 'trunk':
361 if branch == 'trunk':
285 branch = ''
362 branch = ''
286 except IndexError:
363 except IndexError:
287 branch = None
364 branch = None
288
365
289 paths = orig_paths.keys()
366 orig_paths = orig_paths.items()
290 paths.sort()
367 orig_paths.sort()
291 for path in paths:
368 for path, ent in orig_paths:
292 # self.ui.write("path %s\n" % path)
369 # self.ui.write("path %s\n" % path)
293 if path == self.module: # Follow branching back in history
370 if path == self.module: # Follow branching back in history
294 ent = orig_paths[path]
295 if ent:
371 if ent:
296 if ent.copyfrom_path:
372 if ent.copyfrom_path:
297 # ent.copyfrom_rev may not be the actual last revision
373 # ent.copyfrom_rev may not be the actual last revision
298 prev = self.latest(ent.copyfrom_path, ent.copyfrom_rev)
374 prev = self.latest(ent.copyfrom_path, ent.copyfrom_rev)
299 self.modulemap[prev] = ent.copyfrom_path
375 self.modulemap[prev] = ent.copyfrom_path
300 parents = [self.revid(prev, ent.copyfrom_path)]
376 parents = [self.revid(prev, ent.copyfrom_path)]
301 self.ui.note('found parent of branch %s at %d: %s\n' % \
377 self.ui.note('found parent of branch %s at %d: %s\n' % \
302 (self.module, prev, ent.copyfrom_path))
378 (self.module, prev, ent.copyfrom_path))
303 else:
379 else:
304 self.ui.debug("No copyfrom path, don't know what to do.\n")
380 self.ui.debug("No copyfrom path, don't know what to do.\n")
305 # Maybe it was added and there is no more history.
381 # Maybe it was added and there is no more history.
306 entrypath = get_entry_from_path(path, module=self.module)
382 entrypath = get_entry_from_path(path, module=self.module)
307 # self.ui.write("entrypath %s\n" % entrypath)
383 # self.ui.write("entrypath %s\n" % entrypath)
308 if entrypath is None:
384 if entrypath is None:
309 # Outside our area of interest
385 # Outside our area of interest
310 self.ui.debug("boring@%s: %s\n" % (revnum, path))
386 self.ui.debug("boring@%s: %s\n" % (revnum, path))
311 continue
387 continue
312 entry = entrypath.decode(self.encoding)
388 entry = entrypath.decode(self.encoding)
313 ent = orig_paths[path]
314
389
315 kind = svn.ra.check_path(self.ra, entrypath, revnum)
390 kind = svn.ra.check_path(self.ra, entrypath, revnum)
316 if kind == svn.core.svn_node_file:
391 if kind == svn.core.svn_node_file:
317 if ent.copyfrom_path:
392 if ent.copyfrom_path:
318 copyfrom_path = get_entry_from_path(ent.copyfrom_path)
393 copyfrom_path = get_entry_from_path(ent.copyfrom_path)
319 if copyfrom_path:
394 if copyfrom_path:
320 self.ui.debug("Copied to %s from %s@%s\n" % (entry, copyfrom_path, ent.copyfrom_rev))
395 self.ui.debug("Copied to %s from %s@%s\n" % (entry, copyfrom_path, ent.copyfrom_rev))
321 # It's probably important for hg that the source
396 # It's probably important for hg that the source
322 # exists in the revision's parent, not just the
397 # exists in the revision's parent, not just the
323 # ent.copyfrom_rev
398 # ent.copyfrom_rev
324 fromkind = svn.ra.check_path(self.ra, copyfrom_path, ent.copyfrom_rev)
399 fromkind = svn.ra.check_path(self.ra, copyfrom_path, ent.copyfrom_rev)
325 if fromkind != 0:
400 if fromkind != 0:
326 copies[self.recode(entry)] = self.recode(copyfrom_path)
401 copies[self.recode(entry)] = self.recode(copyfrom_path)
327 entries.append(self.recode(entry))
402 entries.append(self.recode(entry))
328 elif kind == 0: # gone, but had better be a deleted *file*
403 elif kind == 0: # gone, but had better be a deleted *file*
329 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
404 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
330
405
331 # if a branch is created but entries are removed in the same
406 # if a branch is created but entries are removed in the same
332 # changeset, get the right fromrev
407 # changeset, get the right fromrev
333 if parents:
408 if parents:
334 uuid, old_module, fromrev = self.revsplit(parents[0])
409 uuid, old_module, fromrev = self.revsplit(parents[0])
335 else:
410 else:
336 fromrev = revnum - 1
411 fromrev = revnum - 1
337 # might always need to be revnum - 1 in these 3 lines?
412 # might always need to be revnum - 1 in these 3 lines?
338 old_module = self.modulemap.get(fromrev, self.module)
413 old_module = self.modulemap.get(fromrev, self.module)
339
414
340 basepath = old_module + "/" + get_entry_from_path(path, module=self.module)
415 basepath = old_module + "/" + get_entry_from_path(path, module=self.module)
341 entrypath = old_module + "/" + get_entry_from_path(path, module=self.module)
416 entrypath = old_module + "/" + get_entry_from_path(path, module=self.module)
342
417
343 def lookup_parts(p):
418 def lookup_parts(p):
344 rc = None
419 rc = None
345 parts = p.split("/")
420 parts = p.split("/")
346 for i in range(len(parts)):
421 for i in range(len(parts)):
347 part = "/".join(parts[:i])
422 part = "/".join(parts[:i])
348 info = part, copyfrom.get(part, None)
423 info = part, copyfrom.get(part, None)
349 if info[1] is not None:
424 if info[1] is not None:
350 self.ui.debug("Found parent directory %s\n" % info[1])
425 self.ui.debug("Found parent directory %s\n" % info[1])
351 rc = info
426 rc = info
352 return rc
427 return rc
353
428
354 self.ui.debug("base, entry %s %s\n" % (basepath, entrypath))
429 self.ui.debug("base, entry %s %s\n" % (basepath, entrypath))
355
430
356 frompath, froment = lookup_parts(entrypath) or (None, revnum - 1)
431 frompath, froment = lookup_parts(entrypath) or (None, revnum - 1)
357
432
358 # need to remove fragment from lookup_parts and replace with copyfrom_path
433 # need to remove fragment from lookup_parts and replace with copyfrom_path
359 if frompath is not None:
434 if frompath is not None:
360 self.ui.debug("munge-o-matic\n")
435 self.ui.debug("munge-o-matic\n")
361 self.ui.debug(entrypath + '\n')
436 self.ui.debug(entrypath + '\n')
362 self.ui.debug(entrypath[len(frompath):] + '\n')
437 self.ui.debug(entrypath[len(frompath):] + '\n')
363 entrypath = froment.copyfrom_path + entrypath[len(frompath):]
438 entrypath = froment.copyfrom_path + entrypath[len(frompath):]
364 fromrev = froment.copyfrom_rev
439 fromrev = froment.copyfrom_rev
365 self.ui.debug("Info: %s %s %s %s\n" % (frompath, froment, ent, entrypath))
440 self.ui.debug("Info: %s %s %s %s\n" % (frompath, froment, ent, entrypath))
366
441
367 fromkind = svn.ra.check_path(self.ra, entrypath, fromrev)
442 fromkind = svn.ra.check_path(self.ra, entrypath, fromrev)
368 if fromkind == svn.core.svn_node_file: # a deleted file
443 if fromkind == svn.core.svn_node_file: # a deleted file
369 entries.append(self.recode(entry))
444 entries.append(self.recode(entry))
370 elif fromkind == svn.core.svn_node_dir:
445 elif fromkind == svn.core.svn_node_dir:
371 # print "Deleted/moved non-file:", revnum, path, ent
446 # print "Deleted/moved non-file:", revnum, path, ent
372 # children = self._find_children(path, revnum - 1)
447 # children = self._find_children(path, revnum - 1)
373 # print "find children %s@%d from %d action %s" % (path, revnum, ent.copyfrom_rev, ent.action)
448 # print "find children %s@%d from %d action %s" % (path, revnum, ent.copyfrom_rev, ent.action)
374 # Sometimes this is tricky. For example: in
449 # Sometimes this is tricky. For example: in
375 # The Subversion Repository revision 6940 a dir
450 # The Subversion Repository revision 6940 a dir
376 # was copied and one of its files was deleted
451 # was copied and one of its files was deleted
377 # from the new location in the same commit. This
452 # from the new location in the same commit. This
378 # code can't deal with that yet.
453 # code can't deal with that yet.
379 if ent.action == 'C':
454 if ent.action == 'C':
380 children = self._find_children(path, fromrev)
455 children = self._find_children(path, fromrev)
381 else:
456 else:
382 oroot = entrypath.strip('/')
457 oroot = entrypath.strip('/')
383 nroot = path.strip('/')
458 nroot = path.strip('/')
384 children = self._find_children(oroot, fromrev)
459 children = self._find_children(oroot, fromrev)
385 children = [s.replace(oroot,nroot) for s in children]
460 children = [s.replace(oroot,nroot) for s in children]
386 # Mark all [files, not directories] as deleted.
461 # Mark all [files, not directories] as deleted.
387 for child in children:
462 for child in children:
388 # Can we move a child directory and its
463 # Can we move a child directory and its
389 # parent in the same commit? (probably can). Could
464 # parent in the same commit? (probably can). Could
390 # cause problems if instead of revnum -1,
465 # cause problems if instead of revnum -1,
391 # we have to look in (copyfrom_path, revnum - 1)
466 # we have to look in (copyfrom_path, revnum - 1)
392 entrypath = get_entry_from_path("/" + child, module=old_module)
467 entrypath = get_entry_from_path("/" + child, module=old_module)
393 if entrypath:
468 if entrypath:
394 entry = self.recode(entrypath.decode(self.encoding))
469 entry = self.recode(entrypath.decode(self.encoding))
395 if entry in copies:
470 if entry in copies:
396 # deleted file within a copy
471 # deleted file within a copy
397 del copies[entry]
472 del copies[entry]
398 else:
473 else:
399 entries.append(entry)
474 entries.append(entry)
400 else:
475 else:
401 self.ui.debug('unknown path in revision %d: %s\n' % \
476 self.ui.debug('unknown path in revision %d: %s\n' % \
402 (revnum, path))
477 (revnum, path))
403 elif kind == svn.core.svn_node_dir:
478 elif kind == svn.core.svn_node_dir:
404 # Should probably synthesize normal file entries
479 # Should probably synthesize normal file entries
405 # and handle as above to clean up copy/rename handling.
480 # and handle as above to clean up copy/rename handling.
406
481
407 # If the directory just had a prop change,
482 # If the directory just had a prop change,
408 # then we shouldn't need to look for its children.
483 # then we shouldn't need to look for its children.
409 # Also this could create duplicate entries. Not sure
484 # Also this could create duplicate entries. Not sure
410 # whether this will matter. Maybe should make entries a set.
485 # whether this will matter. Maybe should make entries a set.
411 # print "Changed directory", revnum, path, ent.action, ent.copyfrom_path, ent.copyfrom_rev
486 # print "Changed directory", revnum, path, ent.action, ent.copyfrom_path, ent.copyfrom_rev
412 # This will fail if a directory was copied
487 # This will fail if a directory was copied
413 # from another branch and then some of its files
488 # from another branch and then some of its files
414 # were deleted in the same transaction.
489 # were deleted in the same transaction.
415 children = self._find_children(path, revnum)
490 children = self._find_children(path, revnum)
416 children.sort()
491 children.sort()
417 for child in children:
492 for child in children:
418 # Can we move a child directory and its
493 # Can we move a child directory and its
419 # parent in the same commit? (probably can). Could
494 # parent in the same commit? (probably can). Could
420 # cause problems if instead of revnum -1,
495 # cause problems if instead of revnum -1,
421 # we have to look in (copyfrom_path, revnum - 1)
496 # we have to look in (copyfrom_path, revnum - 1)
422 entrypath = get_entry_from_path("/" + child, module=self.module)
497 entrypath = get_entry_from_path("/" + child, module=self.module)
423 # print child, self.module, entrypath
498 # print child, self.module, entrypath
424 if entrypath:
499 if entrypath:
425 # Need to filter out directories here...
500 # Need to filter out directories here...
426 kind = svn.ra.check_path(self.ra, entrypath, revnum)
501 kind = svn.ra.check_path(self.ra, entrypath, revnum)
427 if kind != svn.core.svn_node_dir:
502 if kind != svn.core.svn_node_dir:
428 entries.append(self.recode(entrypath))
503 entries.append(self.recode(entrypath))
429
504
430 # Copies here (must copy all from source)
505 # Copies here (must copy all from source)
431 # Probably not a real problem for us if
506 # Probably not a real problem for us if
432 # source does not exist
507 # source does not exist
433
508
434 # Can do this with the copy command "hg copy"
509 # Can do this with the copy command "hg copy"
435 # if ent.copyfrom_path:
510 # if ent.copyfrom_path:
436 # copyfrom_entry = get_entry_from_path(ent.copyfrom_path.decode(self.encoding),
511 # copyfrom_entry = get_entry_from_path(ent.copyfrom_path.decode(self.encoding),
437 # module=self.module)
512 # module=self.module)
438 # copyto_entry = entrypath
513 # copyto_entry = entrypath
439 #
514 #
440 # print "copy directory", copyfrom_entry, 'to', copyto_entry
515 # print "copy directory", copyfrom_entry, 'to', copyto_entry
441 #
516 #
442 # copies.append((copyfrom_entry, copyto_entry))
517 # copies.append((copyfrom_entry, copyto_entry))
443
518
444 if ent.copyfrom_path:
519 if ent.copyfrom_path:
445 copyfrom_path = ent.copyfrom_path.decode(self.encoding)
520 copyfrom_path = ent.copyfrom_path.decode(self.encoding)
446 copyfrom_entry = get_entry_from_path(copyfrom_path, module=self.module)
521 copyfrom_entry = get_entry_from_path(copyfrom_path, module=self.module)
447 if copyfrom_entry:
522 if copyfrom_entry:
448 copyfrom[path] = ent
523 copyfrom[path] = ent
449 self.ui.debug("mark %s came from %s\n" % (path, copyfrom[path]))
524 self.ui.debug("mark %s came from %s\n" % (path, copyfrom[path]))
450
525
451 # Good, /probably/ a regular copy. Really should check
526 # Good, /probably/ a regular copy. Really should check
452 # to see whether the parent revision actually contains
527 # to see whether the parent revision actually contains
453 # the directory in question.
528 # the directory in question.
454 children = self._find_children(self.recode(copyfrom_path), ent.copyfrom_rev)
529 children = self._find_children(self.recode(copyfrom_path), ent.copyfrom_rev)
455 children.sort()
530 children.sort()
456 for child in children:
531 for child in children:
457 entrypath = get_entry_from_path("/" + child, module=self.module)
532 entrypath = get_entry_from_path("/" + child, module=self.module)
458 if entrypath:
533 if entrypath:
459 entry = entrypath.decode(self.encoding)
534 entry = entrypath.decode(self.encoding)
460 # print "COPY COPY From", copyfrom_entry, entry
535 # print "COPY COPY From", copyfrom_entry, entry
461 copyto_path = path + entry[len(copyfrom_entry):]
536 copyto_path = path + entry[len(copyfrom_entry):]
462 copyto_entry = get_entry_from_path(copyto_path, module=self.module)
537 copyto_entry = get_entry_from_path(copyto_path, module=self.module)
463 # print "COPY", entry, "COPY To", copyto_entry
538 # print "COPY", entry, "COPY To", copyto_entry
464 copies[self.recode(copyto_entry)] = self.recode(entry)
539 copies[self.recode(copyto_entry)] = self.recode(entry)
465 # copy from quux splort/quuxfile
540 # copy from quux splort/quuxfile
466
541
467 self.modulemap[revnum] = self.module # track backwards in time
542 self.modulemap[revnum] = self.module # track backwards in time
468 # a list of (filename, id) where id lets us retrieve the file.
543 # a list of (filename, id) where id lets us retrieve the file.
469 # eg in git, id is the object hash. for svn it'll be the
544 # eg in git, id is the object hash. for svn it'll be the
470 self.files[rev] = zip(entries, [rev] * len(entries))
545 self.files[rev] = zip(entries, [rev] * len(entries))
471 if not entries:
546 if not entries:
472 return
547 return
473
548
474 # Example SVN datetime. Includes microseconds.
549 # Example SVN datetime. Includes microseconds.
475 # ISO-8601 conformant
550 # ISO-8601 conformant
476 # '2007-01-04T17:35:00.902377Z'
551 # '2007-01-04T17:35:00.902377Z'
477 date = util.parsedate(date[:18] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
552 date = util.parsedate(date[:18] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
478
553
479 log = message and self.recode(message)
554 log = message and self.recode(message)
480 author = author and self.recode(author) or ''
555 author = author and self.recode(author) or ''
481
556
482 cset = commit(author=author,
557 cset = commit(author=author,
483 date=util.datestr(date),
558 date=util.datestr(date),
484 desc=log,
559 desc=log,
485 parents=parents,
560 parents=parents,
486 copies=copies,
561 copies=copies,
487 branch=branch,
562 branch=branch,
488 rev=rev.encode('utf-8'))
563 rev=rev.encode('utf-8'))
489
564
490 self.commits[rev] = cset
565 self.commits[rev] = cset
491 if self.child_cset and not self.child_cset.parents:
566 if self.child_cset and not self.child_cset.parents:
492 self.child_cset.parents = [rev]
567 self.child_cset.parents = [rev]
493 self.child_cset = cset
568 self.child_cset = cset
494
569
495 self.ui.note('fetching revision log for "%s" from %d to %d\n' % \
570 self.ui.note('fetching revision log for "%s" from %d to %d\n' %
496 (self.module, from_revnum, to_revnum))
571 (self.module, from_revnum, to_revnum))
497
572
498 try:
573 try:
499 discover_changed_paths = True
574 discover_changed_paths = True
500 strict_node_history = False
575 strict_node_history = False
501 svn.ra.get_log(self.ra, [self.module], from_revnum, to_revnum, 0,
576 for entry in self.get_log([self.module], from_revnum, to_revnum):
502 discover_changed_paths, strict_node_history,
577 orig_paths, revnum, author, date, message = entry
503 receivelog)
578 if self.is_blacklisted(revnum):
504 for entry in received:
579 self.ui.note('skipping blacklisted revision %d\n' % revnum)
505 parselogentry(*entry)
580 continue
581 if orig_paths is None:
582 self.ui.debug('revision %d has no entries\n' % revnum)
583 continue
584 parselogentry(orig_paths, revnum, author, date, message)
506 except SubversionException, (_, num):
585 except SubversionException, (_, num):
507 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
586 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
508 raise NoSuchRevision(branch=self,
587 raise NoSuchRevision(branch=self,
509 revision="Revision number %d" % to_revnum)
588 revision="Revision number %d" % to_revnum)
510 raise
589 raise
511
590
512 def _getfile(self, file, rev):
591 def _getfile(self, file, rev):
513 io = StringIO()
592 io = StringIO()
514 # TODO: ra.get_file transmits the whole file instead of diffs.
593 # TODO: ra.get_file transmits the whole file instead of diffs.
515 mode = ''
594 mode = ''
516 try:
595 try:
517 revnum = self.revnum(rev)
596 revnum = self.revnum(rev)
518 if self.module != self.modulemap[revnum]:
597 if self.module != self.modulemap[revnum]:
519 self.module = self.modulemap[revnum]
598 self.module = self.modulemap[revnum]
520 self.reparent(self.module)
599 self.reparent(self.module)
521 info = svn.ra.get_file(self.ra, file, revnum, io)
600 info = svn.ra.get_file(self.ra, file, revnum, io)
522 if isinstance(info, list):
601 if isinstance(info, list):
523 info = info[-1]
602 info = info[-1]
524 mode = ("svn:executable" in info) and 'x' or ''
603 mode = ("svn:executable" in info) and 'x' or ''
525 mode = ("svn:special" in info) and 'l' or mode
604 mode = ("svn:special" in info) and 'l' or mode
526 except SubversionException, e:
605 except SubversionException, e:
527 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
606 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
528 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
607 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
529 if e.apr_err in notfound: # File not found
608 if e.apr_err in notfound: # File not found
530 raise IOError()
609 raise IOError()
531 raise
610 raise
532 data = io.getvalue()
611 data = io.getvalue()
533 if mode == 'l':
612 if mode == 'l':
534 link_prefix = "link "
613 link_prefix = "link "
535 if data.startswith(link_prefix):
614 if data.startswith(link_prefix):
536 data = data[len(link_prefix):]
615 data = data[len(link_prefix):]
537 return data, mode
616 return data, mode
538
617
539 def _find_children(self, path, revnum):
618 def _find_children(self, path, revnum):
540 path = path.strip("/")
619 path = path.strip("/")
541
620
542 def _find_children_fallback(path, revnum):
621 def _find_children_fallback(path, revnum):
543 # SWIG python bindings for getdir are broken up to at least 1.4.3
622 # SWIG python bindings for getdir are broken up to at least 1.4.3
544 pool = Pool()
623 pool = Pool()
545 optrev = svn.core.svn_opt_revision_t()
624 optrev = svn.core.svn_opt_revision_t()
546 optrev.kind = svn.core.svn_opt_revision_number
625 optrev.kind = svn.core.svn_opt_revision_number
547 optrev.value.number = revnum
626 optrev.value.number = revnum
548 rpath = '/'.join([self.base, path]).strip('/')
627 rpath = '/'.join([self.base, path]).strip('/')
549 return ['%s/%s' % (path, x) for x in svn.client.ls(rpath, optrev, True, self.ctx, pool).keys()]
628 return ['%s/%s' % (path, x) for x in svn.client.ls(rpath, optrev, True, self.ctx, pool).keys()]
550
629
551 if hasattr(self, '_find_children_fallback'):
630 if hasattr(self, '_find_children_fallback'):
552 return _find_children_fallback(path, revnum)
631 return _find_children_fallback(path, revnum)
553
632
554 self.reparent("/" + path)
633 self.reparent("/" + path)
555 pool = Pool()
634 pool = Pool()
556
635
557 children = []
636 children = []
558 def find_children_inner(children, path, revnum = revnum):
637 def find_children_inner(children, path, revnum = revnum):
559 if hasattr(svn.ra, 'get_dir2'): # Since SVN 1.4
638 if hasattr(svn.ra, 'get_dir2'): # Since SVN 1.4
560 fields = 0xffffffff # Binding does not provide SVN_DIRENT_ALL
639 fields = 0xffffffff # Binding does not provide SVN_DIRENT_ALL
561 getdir = svn.ra.get_dir2(self.ra, path, revnum, fields, pool)
640 getdir = svn.ra.get_dir2(self.ra, path, revnum, fields, pool)
562 else:
641 else:
563 getdir = svn.ra.get_dir(self.ra, path, revnum, pool)
642 getdir = svn.ra.get_dir(self.ra, path, revnum, pool)
564 if type(getdir) == dict:
643 if type(getdir) == dict:
565 # python binding for getdir is broken up to at least 1.4.3
644 # python binding for getdir is broken up to at least 1.4.3
566 raise CompatibilityException()
645 raise CompatibilityException()
567 dirents = getdir[0]
646 dirents = getdir[0]
568 if type(dirents) == int:
647 if type(dirents) == int:
569 # got here once due to infinite recursion bug
648 # got here once due to infinite recursion bug
570 # pprint.pprint(getdir)
571 return
649 return
572 c = dirents.keys()
650 c = dirents.keys()
573 c.sort()
651 c.sort()
574 for child in c:
652 for child in c:
575 dirent = dirents[child]
653 dirent = dirents[child]
576 if dirent.kind == svn.core.svn_node_dir:
654 if dirent.kind == svn.core.svn_node_dir:
577 find_children_inner(children, (path + "/" + child).strip("/"))
655 find_children_inner(children, (path + "/" + child).strip("/"))
578 else:
656 else:
579 children.append((path + "/" + child).strip("/"))
657 children.append((path + "/" + child).strip("/"))
580
658
581 try:
659 try:
582 find_children_inner(children, "")
660 find_children_inner(children, "")
583 except CompatibilityException:
661 except CompatibilityException:
584 self._find_children_fallback = True
662 self._find_children_fallback = True
585 self.reparent(self.module)
663 self.reparent(self.module)
586 return _find_children_fallback(path, revnum)
664 return _find_children_fallback(path, revnum)
587
665
588 self.reparent(self.module)
666 self.reparent(self.module)
589 return [path + "/" + c for c in children]
667 return [path + "/" + c for c in children]
@@ -1,134 +1,125 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2007 Daniel Holth <dholth@fastmail.fm>
3 # Copyright (C) 2007 Daniel Holth <dholth@fastmail.fm>
4 # This is a stripped-down version of the original bzr-svn transport.py,
4 # This is a stripped-down version of the original bzr-svn transport.py,
5 # Copyright (C) 2006 Jelmer Vernooij <jelmer@samba.org>
5 # Copyright (C) 2006 Jelmer Vernooij <jelmer@samba.org>
6
6
7 # This program is free software; you can redistribute it and/or modify
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
10 # (at your option) any later version.
11
11
12 # This program is distributed in the hope that it will be useful,
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
15 # GNU General Public License for more details.
16
16
17 # You should have received a copy of the GNU General Public License
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20
20
21 from cStringIO import StringIO
21 from cStringIO import StringIO
22 import os
22 import os
23 from tempfile import mktemp
23 from tempfile import mktemp
24
24
25 from svn.core import SubversionException, Pool
25 from svn.core import SubversionException, Pool
26 import svn.ra
26 import svn.ra
27 import svn.client
27 import svn.core
28 import svn.core
28
29
29 # Some older versions of the Python bindings need to be
30 # Some older versions of the Python bindings need to be
30 # explicitly initialized. But what we want to do probably
31 # explicitly initialized. But what we want to do probably
31 # won't work worth a darn against those libraries anyway!
32 # won't work worth a darn against those libraries anyway!
32 svn.ra.initialize()
33 svn.ra.initialize()
33
34
34 svn_config = svn.core.svn_config_get_config(None)
35 svn_config = svn.core.svn_config_get_config(None)
35
36
36
37
37 def _create_auth_baton(pool):
38 def _create_auth_baton(pool):
38 """Create a Subversion authentication baton. """
39 """Create a Subversion authentication baton. """
39 import svn.client
40 import svn.client
40 # Give the client context baton a suite of authentication
41 # Give the client context baton a suite of authentication
41 # providers.h
42 # providers.h
42 providers = [
43 providers = [
43 svn.client.get_simple_provider(pool),
44 svn.client.get_simple_provider(pool),
44 svn.client.get_username_provider(pool),
45 svn.client.get_username_provider(pool),
45 svn.client.get_ssl_client_cert_file_provider(pool),
46 svn.client.get_ssl_client_cert_file_provider(pool),
46 svn.client.get_ssl_client_cert_pw_file_provider(pool),
47 svn.client.get_ssl_client_cert_pw_file_provider(pool),
47 svn.client.get_ssl_server_trust_file_provider(pool),
48 svn.client.get_ssl_server_trust_file_provider(pool),
48 ]
49 ]
49 return svn.core.svn_auth_open(providers, pool)
50 return svn.core.svn_auth_open(providers, pool)
50
51
51
52 # # The SVN libraries don't like trailing slashes...
53 # return url.rstrip('/')
54
55
56 class SvnRaCallbacks(svn.ra.callbacks2_t):
57 """Remote access callbacks implementation for bzr-svn."""
58 def __init__(self, pool):
59 svn.ra.callbacks2_t.__init__(self)
60 self.auth_baton = _create_auth_baton(pool)
61 self.pool = pool
62
63 def open_tmp_file(self, pool):
64 return mktemp(prefix='tailor-svn')
65
66 class NotBranchError(SubversionException):
52 class NotBranchError(SubversionException):
67 pass
53 pass
68
54
69 class SvnRaTransport(object):
55 class SvnRaTransport(object):
70 """
56 """
71 Open an ra connection to a Subversion repository.
57 Open an ra connection to a Subversion repository.
72 """
58 """
73 def __init__(self, url="", ra=None):
59 def __init__(self, url="", ra=None):
74 self.pool = Pool()
60 self.pool = Pool()
75 self.svn_url = url
61 self.svn_url = url
62 self.username = ''
63 self.password = ''
76
64
77 # Only Subversion 1.4 has reparent()
65 # Only Subversion 1.4 has reparent()
78 if ra is None or not hasattr(svn.ra, 'reparent'):
66 if ra is None or not hasattr(svn.ra, 'reparent'):
79 self.callbacks = SvnRaCallbacks(self.pool)
67 self.client = svn.client.create_context(self.pool)
68 ab = _create_auth_baton(self.pool)
69 if False:
70 svn.core.svn_auth_set_parameter(
71 ab, svn.core.SVN_AUTH_PARAM_DEFAULT_USERNAME, self.username)
72 svn.core.svn_auth_set_parameter(
73 ab, svn.core.SVN_AUTH_PARAM_DEFAULT_PASSWORD, self.password)
74 self.client.auth_baton = ab
75 self.client.config = svn_config
80 try:
76 try:
81 ver = svn.ra.version()
77 self.ra = svn.client.open_ra_session(
82 try: # Older SVN bindings
78 self.svn_url.encode('utf8'),
83 self.ra = svn.ra.open2(self.svn_url.encode('utf8'), self.callbacks, None, svn_config, None)
79 self.client, self.pool)
84 except TypeError, e:
85 self.ra = svn.ra.open2(self.svn_url.encode('utf8'), self.callbacks, svn_config, None)
86 except SubversionException, (_, num):
80 except SubversionException, (_, num):
87 if num == svn.core.SVN_ERR_RA_ILLEGAL_URL:
81 if num in (svn.core.SVN_ERR_RA_ILLEGAL_URL,
88 raise NotBranchError(url)
82 svn.core.SVN_ERR_RA_LOCAL_REPOS_OPEN_FAILED,
89 if num == svn.core.SVN_ERR_RA_LOCAL_REPOS_OPEN_FAILED:
83 svn.core.SVN_ERR_BAD_URL):
90 raise NotBranchError(url)
91 if num == svn.core.SVN_ERR_BAD_URL:
92 raise NotBranchError(url)
84 raise NotBranchError(url)
93 raise
85 raise
94
95 else:
86 else:
96 self.ra = ra
87 self.ra = ra
97 svn.ra.reparent(self.ra, self.svn_url.encode('utf8'))
88 svn.ra.reparent(self.ra, self.svn_url.encode('utf8'))
98
89
99 class Reporter:
90 class Reporter:
100 def __init__(self, (reporter, report_baton)):
91 def __init__(self, (reporter, report_baton)):
101 self._reporter = reporter
92 self._reporter = reporter
102 self._baton = report_baton
93 self._baton = report_baton
103
94
104 def set_path(self, path, revnum, start_empty, lock_token, pool=None):
95 def set_path(self, path, revnum, start_empty, lock_token, pool=None):
105 svn.ra.reporter2_invoke_set_path(self._reporter, self._baton,
96 svn.ra.reporter2_invoke_set_path(self._reporter, self._baton,
106 path, revnum, start_empty, lock_token, pool)
97 path, revnum, start_empty, lock_token, pool)
107
98
108 def delete_path(self, path, pool=None):
99 def delete_path(self, path, pool=None):
109 svn.ra.reporter2_invoke_delete_path(self._reporter, self._baton,
100 svn.ra.reporter2_invoke_delete_path(self._reporter, self._baton,
110 path, pool)
101 path, pool)
111
102
112 def link_path(self, path, url, revision, start_empty, lock_token,
103 def link_path(self, path, url, revision, start_empty, lock_token,
113 pool=None):
104 pool=None):
114 svn.ra.reporter2_invoke_link_path(self._reporter, self._baton,
105 svn.ra.reporter2_invoke_link_path(self._reporter, self._baton,
115 path, url, revision, start_empty, lock_token,
106 path, url, revision, start_empty, lock_token,
116 pool)
107 pool)
117
108
118 def finish_report(self, pool=None):
109 def finish_report(self, pool=None):
119 svn.ra.reporter2_invoke_finish_report(self._reporter,
110 svn.ra.reporter2_invoke_finish_report(self._reporter,
120 self._baton, pool)
111 self._baton, pool)
121
112
122 def abort_report(self, pool=None):
113 def abort_report(self, pool=None):
123 svn.ra.reporter2_invoke_abort_report(self._reporter,
114 svn.ra.reporter2_invoke_abort_report(self._reporter,
124 self._baton, pool)
115 self._baton, pool)
125
116
126 def do_update(self, revnum, path, *args, **kwargs):
117 def do_update(self, revnum, path, *args, **kwargs):
127 return self.Reporter(svn.ra.do_update(self.ra, revnum, path, *args, **kwargs))
118 return self.Reporter(svn.ra.do_update(self.ra, revnum, path, *args, **kwargs))
128
119
129 def clone(self, offset=None):
120 def clone(self, offset=None):
130 """See Transport.clone()."""
121 """See Transport.clone()."""
131 if offset is None:
122 if offset is None:
132 return self.__class__(self.base)
123 return self.__class__(self.base)
133
124
134 return SvnRaTransport(urlutils.join(self.base, offset), ra=self.ra)
125 return SvnRaTransport(urlutils.join(self.base, offset), ra=self.ra)
@@ -1,219 +1,220 b''
1 # archival.py - revision archival for mercurial
1 # archival.py - revision archival for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of
5 # This software may be used and distributed according to the terms of
6 # the GNU General Public License, incorporated herein by reference.
6 # the GNU General Public License, incorporated herein by reference.
7
7
8 from i18n import _
8 from i18n import _
9 from node import *
9 from node import *
10 import cStringIO, os, stat, tarfile, time, util, zipfile
10 import cStringIO, os, stat, tarfile, time, util, zipfile
11 import zlib, gzip
11 import zlib, gzip
12
12
13 def tidyprefix(dest, prefix, suffixes):
13 def tidyprefix(dest, prefix, suffixes):
14 '''choose prefix to use for names in archive. make sure prefix is
14 '''choose prefix to use for names in archive. make sure prefix is
15 safe for consumers.'''
15 safe for consumers.'''
16
16
17 if prefix:
17 if prefix:
18 prefix = prefix.replace('\\', '/')
18 prefix = prefix.replace('\\', '/')
19 else:
19 else:
20 if not isinstance(dest, str):
20 if not isinstance(dest, str):
21 raise ValueError('dest must be string if no prefix')
21 raise ValueError('dest must be string if no prefix')
22 prefix = os.path.basename(dest)
22 prefix = os.path.basename(dest)
23 lower = prefix.lower()
23 lower = prefix.lower()
24 for sfx in suffixes:
24 for sfx in suffixes:
25 if lower.endswith(sfx):
25 if lower.endswith(sfx):
26 prefix = prefix[:-len(sfx)]
26 prefix = prefix[:-len(sfx)]
27 break
27 break
28 lpfx = os.path.normpath(util.localpath(prefix))
28 lpfx = os.path.normpath(util.localpath(prefix))
29 prefix = util.pconvert(lpfx)
29 prefix = util.pconvert(lpfx)
30 if not prefix.endswith('/'):
30 if not prefix.endswith('/'):
31 prefix += '/'
31 prefix += '/'
32 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
32 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
33 raise util.Abort(_('archive prefix contains illegal components'))
33 raise util.Abort(_('archive prefix contains illegal components'))
34 return prefix
34 return prefix
35
35
36 class tarit:
36 class tarit:
37 '''write archive to tar file or stream. can write uncompressed,
37 '''write archive to tar file or stream. can write uncompressed,
38 or compress with gzip or bzip2.'''
38 or compress with gzip or bzip2.'''
39
39
40 class GzipFileWithTime(gzip.GzipFile):
40 class GzipFileWithTime(gzip.GzipFile):
41
41
42 def __init__(self, *args, **kw):
42 def __init__(self, *args, **kw):
43 timestamp = None
43 timestamp = None
44 if 'timestamp' in kw:
44 if 'timestamp' in kw:
45 timestamp = kw.pop('timestamp')
45 timestamp = kw.pop('timestamp')
46 if timestamp == None:
46 if timestamp == None:
47 self.timestamp = time.time()
47 self.timestamp = time.time()
48 else:
48 else:
49 self.timestamp = timestamp
49 self.timestamp = timestamp
50 gzip.GzipFile.__init__(self, *args, **kw)
50 gzip.GzipFile.__init__(self, *args, **kw)
51
51
52 def _write_gzip_header(self):
52 def _write_gzip_header(self):
53 self.fileobj.write('\037\213') # magic header
53 self.fileobj.write('\037\213') # magic header
54 self.fileobj.write('\010') # compression method
54 self.fileobj.write('\010') # compression method
55 fname = self.filename[:-3]
55 fname = self.filename[:-3]
56 flags = 0
56 flags = 0
57 if fname:
57 if fname:
58 flags = gzip.FNAME
58 flags = gzip.FNAME
59 self.fileobj.write(chr(flags))
59 self.fileobj.write(chr(flags))
60 gzip.write32u(self.fileobj, long(self.timestamp))
60 gzip.write32u(self.fileobj, long(self.timestamp))
61 self.fileobj.write('\002')
61 self.fileobj.write('\002')
62 self.fileobj.write('\377')
62 self.fileobj.write('\377')
63 if fname:
63 if fname:
64 self.fileobj.write(fname + '\000')
64 self.fileobj.write(fname + '\000')
65
65
66 def __init__(self, dest, prefix, mtime, kind=''):
66 def __init__(self, dest, prefix, mtime, kind=''):
67 self.prefix = tidyprefix(dest, prefix, ['.tar', '.tar.bz2', '.tar.gz',
67 self.prefix = tidyprefix(dest, prefix, ['.tar', '.tar.bz2', '.tar.gz',
68 '.tgz', '.tbz2'])
68 '.tgz', '.tbz2'])
69 self.mtime = mtime
69 self.mtime = mtime
70
70
71 def taropen(name, mode, fileobj=None):
71 def taropen(name, mode, fileobj=None):
72 if kind == 'gz':
72 if kind == 'gz':
73 mode = mode[0]
73 mode = mode[0]
74 if not fileobj:
74 if not fileobj:
75 fileobj = open(name, mode + 'b')
75 fileobj = open(name, mode + 'b')
76 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
76 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
77 zlib.Z_BEST_COMPRESSION,
77 zlib.Z_BEST_COMPRESSION,
78 fileobj, timestamp=mtime)
78 fileobj, timestamp=mtime)
79 return tarfile.TarFile.taropen(name, mode, gzfileobj)
79 return tarfile.TarFile.taropen(name, mode, gzfileobj)
80 else:
80 else:
81 return tarfile.open(name, mode + kind, fileobj)
81 return tarfile.open(name, mode + kind, fileobj)
82
82
83 if isinstance(dest, str):
83 if isinstance(dest, str):
84 self.z = taropen(dest, mode='w:')
84 self.z = taropen(dest, mode='w:')
85 else:
85 else:
86 # Python 2.5-2.5.1 have a regression that requires a name arg
86 # Python 2.5-2.5.1 have a regression that requires a name arg
87 self.z = taropen(name='', mode='w|', fileobj=dest)
87 self.z = taropen(name='', mode='w|', fileobj=dest)
88
88
89 def addfile(self, name, mode, islink, data):
89 def addfile(self, name, mode, islink, data):
90 i = tarfile.TarInfo(self.prefix + name)
90 i = tarfile.TarInfo(self.prefix + name)
91 i.mtime = self.mtime
91 i.mtime = self.mtime
92 i.size = len(data)
92 i.size = len(data)
93 if islink:
93 if islink:
94 i.type = tarfile.SYMTYPE
94 i.type = tarfile.SYMTYPE
95 i.mode = 0777
95 i.mode = 0777
96 i.linkname = data
96 i.linkname = data
97 data = None
97 data = None
98 else:
98 else:
99 i.mode = mode
99 i.mode = mode
100 data = cStringIO.StringIO(data)
100 data = cStringIO.StringIO(data)
101 self.z.addfile(i, data)
101 self.z.addfile(i, data)
102
102
103 def done(self):
103 def done(self):
104 self.z.close()
104 self.z.close()
105
105
106 class tellable:
106 class tellable:
107 '''provide tell method for zipfile.ZipFile when writing to http
107 '''provide tell method for zipfile.ZipFile when writing to http
108 response file object.'''
108 response file object.'''
109
109
110 def __init__(self, fp):
110 def __init__(self, fp):
111 self.fp = fp
111 self.fp = fp
112 self.offset = 0
112 self.offset = 0
113
113
114 def __getattr__(self, key):
114 def __getattr__(self, key):
115 return getattr(self.fp, key)
115 return getattr(self.fp, key)
116
116
117 def write(self, s):
117 def write(self, s):
118 self.fp.write(s)
118 self.fp.write(s)
119 self.offset += len(s)
119 self.offset += len(s)
120
120
121 def tell(self):
121 def tell(self):
122 return self.offset
122 return self.offset
123
123
124 class zipit:
124 class zipit:
125 '''write archive to zip file or stream. can write uncompressed,
125 '''write archive to zip file or stream. can write uncompressed,
126 or compressed with deflate.'''
126 or compressed with deflate.'''
127
127
128 def __init__(self, dest, prefix, mtime, compress=True):
128 def __init__(self, dest, prefix, mtime, compress=True):
129 self.prefix = tidyprefix(dest, prefix, ('.zip',))
129 self.prefix = tidyprefix(dest, prefix, ('.zip',))
130 if not isinstance(dest, str):
130 if not isinstance(dest, str):
131 try:
131 try:
132 dest.tell()
132 dest.tell()
133 except (AttributeError, IOError):
133 except (AttributeError, IOError):
134 dest = tellable(dest)
134 dest = tellable(dest)
135 self.z = zipfile.ZipFile(dest, 'w',
135 self.z = zipfile.ZipFile(dest, 'w',
136 compress and zipfile.ZIP_DEFLATED or
136 compress and zipfile.ZIP_DEFLATED or
137 zipfile.ZIP_STORED)
137 zipfile.ZIP_STORED)
138 self.date_time = time.gmtime(mtime)[:6]
138 self.date_time = time.gmtime(mtime)[:6]
139
139
140 def addfile(self, name, mode, islink, data):
140 def addfile(self, name, mode, islink, data):
141 i = zipfile.ZipInfo(self.prefix + name, self.date_time)
141 i = zipfile.ZipInfo(self.prefix + name, self.date_time)
142 i.compress_type = self.z.compression
142 i.compress_type = self.z.compression
143 # unzip will not honor unix file modes unless file creator is
143 # unzip will not honor unix file modes unless file creator is
144 # set to unix (id 3).
144 # set to unix (id 3).
145 i.create_system = 3
145 i.create_system = 3
146 ftype = stat.S_IFREG
146 ftype = stat.S_IFREG
147 if islink:
147 if islink:
148 mode = 0777
148 mode = 0777
149 ftype = stat.S_IFLNK
149 ftype = stat.S_IFLNK
150 i.external_attr = (mode | ftype) << 16L
150 i.external_attr = (mode | ftype) << 16L
151 self.z.writestr(i, data)
151 self.z.writestr(i, data)
152
152
153 def done(self):
153 def done(self):
154 self.z.close()
154 self.z.close()
155
155
156 class fileit:
156 class fileit:
157 '''write archive as files in directory.'''
157 '''write archive as files in directory.'''
158
158
159 def __init__(self, name, prefix, mtime):
159 def __init__(self, name, prefix, mtime):
160 if prefix:
160 if prefix:
161 raise util.Abort(_('cannot give prefix when archiving to files'))
161 raise util.Abort(_('cannot give prefix when archiving to files'))
162 self.basedir = name
162 self.basedir = name
163 self.opener = util.opener(self.basedir)
163 self.opener = util.opener(self.basedir)
164
164
165 def addfile(self, name, mode, islink, data):
165 def addfile(self, name, mode, islink, data):
166 if islink:
166 if islink:
167 self.opener.symlink(data, name)
167 self.opener.symlink(data, name)
168 return
168 return
169 f = self.opener(name, "w", atomictemp=True)
169 f = self.opener(name, "w", atomictemp=True)
170 f.write(data)
170 f.write(data)
171 f.rename()
171 f.rename()
172 destfile = os.path.join(self.basedir, name)
172 destfile = os.path.join(self.basedir, name)
173 os.chmod(destfile, mode)
173 os.chmod(destfile, mode)
174
174
175 def done(self):
175 def done(self):
176 pass
176 pass
177
177
178 archivers = {
178 archivers = {
179 'files': fileit,
179 'files': fileit,
180 'tar': tarit,
180 'tar': tarit,
181 'tbz2': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'bz2'),
181 'tbz2': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'bz2'),
182 'tgz': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'gz'),
182 'tgz': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'gz'),
183 'uzip': lambda name, prefix, mtime: zipit(name, prefix, mtime, False),
183 'uzip': lambda name, prefix, mtime: zipit(name, prefix, mtime, False),
184 'zip': zipit,
184 'zip': zipit,
185 }
185 }
186
186
187 def archive(repo, dest, node, kind, decode=True, matchfn=None,
187 def archive(repo, dest, node, kind, decode=True, matchfn=None,
188 prefix=None, mtime=None):
188 prefix=None, mtime=None):
189 '''create archive of repo as it was at node.
189 '''create archive of repo as it was at node.
190
190
191 dest can be name of directory, name of archive file, or file
191 dest can be name of directory, name of archive file, or file
192 object to write archive to.
192 object to write archive to.
193
193
194 kind is type of archive to create.
194 kind is type of archive to create.
195
195
196 decode tells whether to put files through decode filters from
196 decode tells whether to put files through decode filters from
197 hgrc.
197 hgrc.
198
198
199 matchfn is function to filter names of files to write to archive.
199 matchfn is function to filter names of files to write to archive.
200
200
201 prefix is name of path to put before every archive member.'''
201 prefix is name of path to put before every archive member.'''
202
202
203 def write(name, mode, islink, data):
203 def write(name, mode, islink, getdata):
204 if matchfn and not matchfn(name): return
204 if matchfn and not matchfn(name): return
205 data = getdata()
205 if decode:
206 if decode:
206 data = repo.wwritedata(name, data)
207 data = repo.wwritedata(name, data)
207 archiver.addfile(name, mode, islink, data)
208 archiver.addfile(name, mode, islink, data)
208
209
209 ctx = repo.changectx(node)
210 ctx = repo.changectx(node)
210 archiver = archivers[kind](dest, prefix, mtime or ctx.date()[0])
211 archiver = archivers[kind](dest, prefix, mtime or ctx.date()[0])
211 m = ctx.manifest()
212 m = ctx.manifest()
212 items = m.items()
213 items = m.items()
213 items.sort()
214 items.sort()
214 write('.hg_archival.txt', 0644, False,
215 write('.hg_archival.txt', 0644, False,
215 'repo: %s\nnode: %s\n' % (hex(repo.changelog.node(0)), hex(node)))
216 lambda: 'repo: %s\nnode: %s\n' % (hex(repo.changelog.node(0)), hex(node)))
216 for filename, filenode in items:
217 for filename, filenode in items:
217 write(filename, m.execf(filename) and 0755 or 0644, m.linkf(filename),
218 write(filename, m.execf(filename) and 0755 or 0644, m.linkf(filename),
218 repo.file(filename).read(filenode))
219 lambda: repo.file(filename).read(filenode))
219 archiver.done()
220 archiver.done()
@@ -1,199 +1,198 b''
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from revlog import *
8 from revlog import *
9 from i18n import _
9 from i18n import _
10 import os, time, util
10 import os, time, util
11
11
12 def _string_escape(text):
12 def _string_escape(text):
13 """
13 """
14 >>> d = {'nl': chr(10), 'bs': chr(92), 'cr': chr(13), 'nul': chr(0)}
14 >>> d = {'nl': chr(10), 'bs': chr(92), 'cr': chr(13), 'nul': chr(0)}
15 >>> s = "ab%(nl)scd%(bs)s%(bs)sn%(nul)sab%(cr)scd%(bs)s%(nl)s" % d
15 >>> s = "ab%(nl)scd%(bs)s%(bs)sn%(nul)sab%(cr)scd%(bs)s%(nl)s" % d
16 >>> s
16 >>> s
17 'ab\\ncd\\\\\\\\n\\x00ab\\rcd\\\\\\n'
17 'ab\\ncd\\\\\\\\n\\x00ab\\rcd\\\\\\n'
18 >>> res = _string_escape(s)
18 >>> res = _string_escape(s)
19 >>> s == _string_unescape(res)
19 >>> s == _string_unescape(res)
20 True
20 True
21 """
21 """
22 # subset of the string_escape codec
22 # subset of the string_escape codec
23 text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r')
23 text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r')
24 return text.replace('\0', '\\0')
24 return text.replace('\0', '\\0')
25
25
26 def _string_unescape(text):
26 def _string_unescape(text):
27 return text.decode('string_escape')
27 return text.decode('string_escape')
28
28
29 class appender:
29 class appender:
30 '''the changelog index must be update last on disk, so we use this class
30 '''the changelog index must be update last on disk, so we use this class
31 to delay writes to it'''
31 to delay writes to it'''
32 def __init__(self, fp, buf):
32 def __init__(self, fp, buf):
33 self.data = buf
33 self.data = buf
34 self.fp = fp
34 self.fp = fp
35 self.offset = fp.tell()
35 self.offset = fp.tell()
36 self.size = util.fstat(fp).st_size
36 self.size = util.fstat(fp).st_size
37
37
38 def end(self):
38 def end(self):
39 return self.size + len("".join(self.data))
39 return self.size + len("".join(self.data))
40 def tell(self):
40 def tell(self):
41 return self.offset
41 return self.offset
42 def flush(self):
42 def flush(self):
43 pass
43 pass
44 def close(self):
44 def close(self):
45 close(self.fp)
45 self.fp.close()
46
46
47 def seek(self, offset, whence=0):
47 def seek(self, offset, whence=0):
48 '''virtual file offset spans real file and data'''
48 '''virtual file offset spans real file and data'''
49 if whence == 0:
49 if whence == 0:
50 self.offset = offset
50 self.offset = offset
51 elif whence == 1:
51 elif whence == 1:
52 self.offset += offset
52 self.offset += offset
53 elif whence == 2:
53 elif whence == 2:
54 self.offset = self.end() + offset
54 self.offset = self.end() + offset
55 if self.offset < self.size:
55 if self.offset < self.size:
56 self.fp.seek(self.offset)
56 self.fp.seek(self.offset)
57
57
58 def read(self, count=-1):
58 def read(self, count=-1):
59 '''only trick here is reads that span real file and data'''
59 '''only trick here is reads that span real file and data'''
60 ret = ""
60 ret = ""
61 old_offset = self.offset
62 if self.offset < self.size:
61 if self.offset < self.size:
63 s = self.fp.read(count)
62 s = self.fp.read(count)
64 ret = s
63 ret = s
65 self.offset += len(s)
64 self.offset += len(s)
66 if count > 0:
65 if count > 0:
67 count -= len(s)
66 count -= len(s)
68 if count != 0:
67 if count != 0:
69 doff = self.offset - self.size
68 doff = self.offset - self.size
70 self.data.insert(0, "".join(self.data))
69 self.data.insert(0, "".join(self.data))
71 del self.data[1:]
70 del self.data[1:]
72 s = self.data[0][doff:doff+count]
71 s = self.data[0][doff:doff+count]
73 self.offset += len(s)
72 self.offset += len(s)
74 ret += s
73 ret += s
75 return ret
74 return ret
76
75
77 def write(self, s):
76 def write(self, s):
78 self.data.append(s)
77 self.data.append(s)
79 self.offset += len(s)
78 self.offset += len(s)
80
79
81 class changelog(revlog):
80 class changelog(revlog):
82 def __init__(self, opener):
81 def __init__(self, opener):
83 revlog.__init__(self, opener, "00changelog.i")
82 revlog.__init__(self, opener, "00changelog.i")
84
83
85 def delayupdate(self):
84 def delayupdate(self):
86 "delay visibility of index updates to other readers"
85 "delay visibility of index updates to other readers"
87 self._realopener = self.opener
86 self._realopener = self.opener
88 self.opener = self._delayopener
87 self.opener = self._delayopener
89 self._delaycount = self.count()
88 self._delaycount = self.count()
90 self._delaybuf = []
89 self._delaybuf = []
91 self._delayname = None
90 self._delayname = None
92
91
93 def finalize(self, tr):
92 def finalize(self, tr):
94 "finalize index updates"
93 "finalize index updates"
95 self.opener = self._realopener
94 self.opener = self._realopener
96 # move redirected index data back into place
95 # move redirected index data back into place
97 if self._delayname:
96 if self._delayname:
98 util.rename(self._delayname + ".a", self._delayname)
97 util.rename(self._delayname + ".a", self._delayname)
99 elif self._delaybuf:
98 elif self._delaybuf:
100 fp = self.opener(self.indexfile, 'a')
99 fp = self.opener(self.indexfile, 'a')
101 fp.write("".join(self._delaybuf))
100 fp.write("".join(self._delaybuf))
102 fp.close()
101 fp.close()
103 del self._delaybuf
102 del self._delaybuf
104 # split when we're done
103 # split when we're done
105 self.checkinlinesize(tr)
104 self.checkinlinesize(tr)
106
105
107 def _delayopener(self, name, mode='r'):
106 def _delayopener(self, name, mode='r'):
108 fp = self._realopener(name, mode)
107 fp = self._realopener(name, mode)
109 # only divert the index
108 # only divert the index
110 if not name == self.indexfile:
109 if not name == self.indexfile:
111 return fp
110 return fp
112 # if we're doing an initial clone, divert to another file
111 # if we're doing an initial clone, divert to another file
113 if self._delaycount == 0:
112 if self._delaycount == 0:
114 self._delayname = fp.name
113 self._delayname = fp.name
115 return self._realopener(name + ".a", mode)
114 return self._realopener(name + ".a", mode)
116 # otherwise, divert to memory
115 # otherwise, divert to memory
117 return appender(fp, self._delaybuf)
116 return appender(fp, self._delaybuf)
118
117
119 def checkinlinesize(self, tr, fp=None):
118 def checkinlinesize(self, tr, fp=None):
120 if self.opener == self._delayopener:
119 if self.opener == self._delayopener:
121 return
120 return
122 return revlog.checkinlinesize(self, tr, fp)
121 return revlog.checkinlinesize(self, tr, fp)
123
122
124 def decode_extra(self, text):
123 def decode_extra(self, text):
125 extra = {}
124 extra = {}
126 for l in text.split('\0'):
125 for l in text.split('\0'):
127 if not l:
126 if not l:
128 continue
127 continue
129 k, v = _string_unescape(l).split(':', 1)
128 k, v = _string_unescape(l).split(':', 1)
130 extra[k] = v
129 extra[k] = v
131 return extra
130 return extra
132
131
133 def encode_extra(self, d):
132 def encode_extra(self, d):
134 # keys must be sorted to produce a deterministic changelog entry
133 # keys must be sorted to produce a deterministic changelog entry
135 keys = d.keys()
134 keys = d.keys()
136 keys.sort()
135 keys.sort()
137 items = [_string_escape('%s:%s' % (k, d[k])) for k in keys]
136 items = [_string_escape('%s:%s' % (k, d[k])) for k in keys]
138 return "\0".join(items)
137 return "\0".join(items)
139
138
140 def extract(self, text):
139 def extract(self, text):
141 """
140 """
142 format used:
141 format used:
143 nodeid\n : manifest node in ascii
142 nodeid\n : manifest node in ascii
144 user\n : user, no \n or \r allowed
143 user\n : user, no \n or \r allowed
145 time tz extra\n : date (time is int or float, timezone is int)
144 time tz extra\n : date (time is int or float, timezone is int)
146 : extra is metadatas, encoded and separated by '\0'
145 : extra is metadatas, encoded and separated by '\0'
147 : older versions ignore it
146 : older versions ignore it
148 files\n\n : files modified by the cset, no \n or \r allowed
147 files\n\n : files modified by the cset, no \n or \r allowed
149 (.*) : comment (free text, ideally utf-8)
148 (.*) : comment (free text, ideally utf-8)
150
149
151 changelog v0 doesn't use extra
150 changelog v0 doesn't use extra
152 """
151 """
153 if not text:
152 if not text:
154 return (nullid, "", (0, 0), [], "", {'branch': 'default'})
153 return (nullid, "", (0, 0), [], "", {'branch': 'default'})
155 last = text.index("\n\n")
154 last = text.index("\n\n")
156 desc = util.tolocal(text[last + 2:])
155 desc = util.tolocal(text[last + 2:])
157 l = text[:last].split('\n')
156 l = text[:last].split('\n')
158 manifest = bin(l[0])
157 manifest = bin(l[0])
159 user = util.tolocal(l[1])
158 user = util.tolocal(l[1])
160
159
161 extra_data = l[2].split(' ', 2)
160 extra_data = l[2].split(' ', 2)
162 if len(extra_data) != 3:
161 if len(extra_data) != 3:
163 time = float(extra_data.pop(0))
162 time = float(extra_data.pop(0))
164 try:
163 try:
165 # various tools did silly things with the time zone field.
164 # various tools did silly things with the time zone field.
166 timezone = int(extra_data[0])
165 timezone = int(extra_data[0])
167 except:
166 except:
168 timezone = 0
167 timezone = 0
169 extra = {}
168 extra = {}
170 else:
169 else:
171 time, timezone, extra = extra_data
170 time, timezone, extra = extra_data
172 time, timezone = float(time), int(timezone)
171 time, timezone = float(time), int(timezone)
173 extra = self.decode_extra(extra)
172 extra = self.decode_extra(extra)
174 if not extra.get('branch'):
173 if not extra.get('branch'):
175 extra['branch'] = 'default'
174 extra['branch'] = 'default'
176 files = l[3:]
175 files = l[3:]
177 return (manifest, user, (time, timezone), files, desc, extra)
176 return (manifest, user, (time, timezone), files, desc, extra)
178
177
179 def read(self, node):
178 def read(self, node):
180 return self.extract(self.revision(node))
179 return self.extract(self.revision(node))
181
180
182 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
181 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
183 user=None, date=None, extra={}):
182 user=None, date=None, extra={}):
184
183
185 user, desc = util.fromlocal(user), util.fromlocal(desc)
184 user, desc = util.fromlocal(user), util.fromlocal(desc)
186
185
187 if date:
186 if date:
188 parseddate = "%d %d" % util.parsedate(date)
187 parseddate = "%d %d" % util.parsedate(date)
189 else:
188 else:
190 parseddate = "%d %d" % util.makedate()
189 parseddate = "%d %d" % util.makedate()
191 if extra and extra.get("branch") in ("default", ""):
190 if extra and extra.get("branch") in ("default", ""):
192 del extra["branch"]
191 del extra["branch"]
193 if extra:
192 if extra:
194 extra = self.encode_extra(extra)
193 extra = self.encode_extra(extra)
195 parseddate = "%s %s" % (parseddate, extra)
194 parseddate = "%s %s" % (parseddate, extra)
196 list.sort()
195 list.sort()
197 l = [hex(manifest), user, parseddate] + list + ["", desc]
196 l = [hex(manifest), user, parseddate] + list + ["", desc]
198 text = "\n".join(l)
197 text = "\n".join(l)
199 return self.addrevision(text, transaction, self.count(), p1, p2)
198 return self.addrevision(text, transaction, self.count(), p1, p2)
@@ -1,1277 +1,1277 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import os, sys, atexit, signal, pdb, traceback, socket, errno, shlex
10 import os, sys, atexit, signal, pdb, traceback, socket, errno, shlex
11 import mdiff, bdiff, util, templater, patch, commands, hg, lock, time
11 import mdiff, bdiff, util, templater, patch, commands, hg, lock, time
12 import fancyopts, revlog, version, extensions, hook
12 import fancyopts, revlog, version, extensions, hook
13
13
14 revrangesep = ':'
14 revrangesep = ':'
15
15
16 class UnknownCommand(Exception):
16 class UnknownCommand(Exception):
17 """Exception raised if command is not in the command table."""
17 """Exception raised if command is not in the command table."""
18 class AmbiguousCommand(Exception):
18 class AmbiguousCommand(Exception):
19 """Exception raised if command shortcut matches more than one command."""
19 """Exception raised if command shortcut matches more than one command."""
20 class ParseError(Exception):
20 class ParseError(Exception):
21 """Exception raised on errors in parsing the command line."""
21 """Exception raised on errors in parsing the command line."""
22
22
23 def runcatch(ui, args, argv0=None):
23 def runcatch(ui, args, argv0=None):
24 def catchterm(*args):
24 def catchterm(*args):
25 raise util.SignalInterrupt
25 raise util.SignalInterrupt
26
26
27 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
27 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
28 num = getattr(signal, name, None)
28 num = getattr(signal, name, None)
29 if num: signal.signal(num, catchterm)
29 if num: signal.signal(num, catchterm)
30
30
31 try:
31 try:
32 try:
32 try:
33 # enter the debugger before command execution
33 # enter the debugger before command execution
34 if '--debugger' in args:
34 if '--debugger' in args:
35 pdb.set_trace()
35 pdb.set_trace()
36 try:
36 try:
37 return dispatch(ui, args, argv0=argv0)
37 return dispatch(ui, args, argv0=argv0)
38 finally:
38 finally:
39 ui.flush()
39 ui.flush()
40 except:
40 except:
41 # enter the debugger when we hit an exception
41 # enter the debugger when we hit an exception
42 if '--debugger' in args:
42 if '--debugger' in args:
43 pdb.post_mortem(sys.exc_info()[2])
43 pdb.post_mortem(sys.exc_info()[2])
44 ui.print_exc()
44 ui.print_exc()
45 raise
45 raise
46
46
47 except ParseError, inst:
47 except ParseError, inst:
48 if inst.args[0]:
48 if inst.args[0]:
49 ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
49 ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
50 commands.help_(ui, inst.args[0])
50 commands.help_(ui, inst.args[0])
51 else:
51 else:
52 ui.warn(_("hg: %s\n") % inst.args[1])
52 ui.warn(_("hg: %s\n") % inst.args[1])
53 commands.help_(ui, 'shortlist')
53 commands.help_(ui, 'shortlist')
54 except AmbiguousCommand, inst:
54 except AmbiguousCommand, inst:
55 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
55 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
56 (inst.args[0], " ".join(inst.args[1])))
56 (inst.args[0], " ".join(inst.args[1])))
57 except UnknownCommand, inst:
57 except UnknownCommand, inst:
58 ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
58 ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
59 commands.help_(ui, 'shortlist')
59 commands.help_(ui, 'shortlist')
60 except hg.RepoError, inst:
60 except hg.RepoError, inst:
61 ui.warn(_("abort: %s!\n") % inst)
61 ui.warn(_("abort: %s!\n") % inst)
62 except lock.LockHeld, inst:
62 except lock.LockHeld, inst:
63 if inst.errno == errno.ETIMEDOUT:
63 if inst.errno == errno.ETIMEDOUT:
64 reason = _('timed out waiting for lock held by %s') % inst.locker
64 reason = _('timed out waiting for lock held by %s') % inst.locker
65 else:
65 else:
66 reason = _('lock held by %s') % inst.locker
66 reason = _('lock held by %s') % inst.locker
67 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
67 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
68 except lock.LockUnavailable, inst:
68 except lock.LockUnavailable, inst:
69 ui.warn(_("abort: could not lock %s: %s\n") %
69 ui.warn(_("abort: could not lock %s: %s\n") %
70 (inst.desc or inst.filename, inst.strerror))
70 (inst.desc or inst.filename, inst.strerror))
71 except revlog.RevlogError, inst:
71 except revlog.RevlogError, inst:
72 ui.warn(_("abort: %s!\n") % inst)
72 ui.warn(_("abort: %s!\n") % inst)
73 except util.SignalInterrupt:
73 except util.SignalInterrupt:
74 ui.warn(_("killed!\n"))
74 ui.warn(_("killed!\n"))
75 except KeyboardInterrupt:
75 except KeyboardInterrupt:
76 try:
76 try:
77 ui.warn(_("interrupted!\n"))
77 ui.warn(_("interrupted!\n"))
78 except IOError, inst:
78 except IOError, inst:
79 if inst.errno == errno.EPIPE:
79 if inst.errno == errno.EPIPE:
80 if ui.debugflag:
80 if ui.debugflag:
81 ui.warn(_("\nbroken pipe\n"))
81 ui.warn(_("\nbroken pipe\n"))
82 else:
82 else:
83 raise
83 raise
84 except socket.error, inst:
84 except socket.error, inst:
85 ui.warn(_("abort: %s\n") % inst[1])
85 ui.warn(_("abort: %s\n") % inst[1])
86 except IOError, inst:
86 except IOError, inst:
87 if hasattr(inst, "code"):
87 if hasattr(inst, "code"):
88 ui.warn(_("abort: %s\n") % inst)
88 ui.warn(_("abort: %s\n") % inst)
89 elif hasattr(inst, "reason"):
89 elif hasattr(inst, "reason"):
90 try: # usually it is in the form (errno, strerror)
90 try: # usually it is in the form (errno, strerror)
91 reason = inst.reason.args[1]
91 reason = inst.reason.args[1]
92 except: # it might be anything, for example a string
92 except: # it might be anything, for example a string
93 reason = inst.reason
93 reason = inst.reason
94 ui.warn(_("abort: error: %s\n") % reason)
94 ui.warn(_("abort: error: %s\n") % reason)
95 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
95 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
96 if ui.debugflag:
96 if ui.debugflag:
97 ui.warn(_("broken pipe\n"))
97 ui.warn(_("broken pipe\n"))
98 elif getattr(inst, "strerror", None):
98 elif getattr(inst, "strerror", None):
99 if getattr(inst, "filename", None):
99 if getattr(inst, "filename", None):
100 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
100 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
101 else:
101 else:
102 ui.warn(_("abort: %s\n") % inst.strerror)
102 ui.warn(_("abort: %s\n") % inst.strerror)
103 else:
103 else:
104 raise
104 raise
105 except OSError, inst:
105 except OSError, inst:
106 if getattr(inst, "filename", None):
106 if getattr(inst, "filename", None):
107 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
107 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
108 else:
108 else:
109 ui.warn(_("abort: %s\n") % inst.strerror)
109 ui.warn(_("abort: %s\n") % inst.strerror)
110 except util.UnexpectedOutput, inst:
110 except util.UnexpectedOutput, inst:
111 ui.warn(_("abort: %s") % inst[0])
111 ui.warn(_("abort: %s") % inst[0])
112 if not isinstance(inst[1], basestring):
112 if not isinstance(inst[1], basestring):
113 ui.warn(" %r\n" % (inst[1],))
113 ui.warn(" %r\n" % (inst[1],))
114 elif not inst[1]:
114 elif not inst[1]:
115 ui.warn(_(" empty string\n"))
115 ui.warn(_(" empty string\n"))
116 else:
116 else:
117 ui.warn("\n%r\n" % util.ellipsis(inst[1]))
117 ui.warn("\n%r\n" % util.ellipsis(inst[1]))
118 except ImportError, inst:
118 except ImportError, inst:
119 m = str(inst).split()[-1]
119 m = str(inst).split()[-1]
120 ui.warn(_("abort: could not import module %s!\n" % m))
120 ui.warn(_("abort: could not import module %s!\n" % m))
121 if m in "mpatch bdiff".split():
121 if m in "mpatch bdiff".split():
122 ui.warn(_("(did you forget to compile extensions?)\n"))
122 ui.warn(_("(did you forget to compile extensions?)\n"))
123 elif m in "zlib".split():
123 elif m in "zlib".split():
124 ui.warn(_("(is your Python install correct?)\n"))
124 ui.warn(_("(is your Python install correct?)\n"))
125
125
126 except util.Abort, inst:
126 except util.Abort, inst:
127 ui.warn(_("abort: %s\n") % inst)
127 ui.warn(_("abort: %s\n") % inst)
128 except SystemExit, inst:
128 except SystemExit, inst:
129 # Commands shouldn't sys.exit directly, but give a return code.
129 # Commands shouldn't sys.exit directly, but give a return code.
130 # Just in case catch this and and pass exit code to caller.
130 # Just in case catch this and and pass exit code to caller.
131 return inst.code
131 return inst.code
132 except:
132 except:
133 ui.warn(_("** unknown exception encountered, details follow\n"))
133 ui.warn(_("** unknown exception encountered, details follow\n"))
134 ui.warn(_("** report bug details to "
134 ui.warn(_("** report bug details to "
135 "http://www.selenic.com/mercurial/bts\n"))
135 "http://www.selenic.com/mercurial/bts\n"))
136 ui.warn(_("** or mercurial@selenic.com\n"))
136 ui.warn(_("** or mercurial@selenic.com\n"))
137 ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
137 ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
138 % version.get_version())
138 % version.get_version())
139 raise
139 raise
140
140
141 return -1
141 return -1
142
142
143 def findpossible(ui, cmd):
143 def findpossible(ui, cmd):
144 """
144 """
145 Return cmd -> (aliases, command table entry)
145 Return cmd -> (aliases, command table entry)
146 for each matching command.
146 for each matching command.
147 Return debug commands (or their aliases) only if no normal command matches.
147 Return debug commands (or their aliases) only if no normal command matches.
148 """
148 """
149 choice = {}
149 choice = {}
150 debugchoice = {}
150 debugchoice = {}
151 for e in commands.table.keys():
151 for e in commands.table.keys():
152 aliases = e.lstrip("^").split("|")
152 aliases = e.lstrip("^").split("|")
153 found = None
153 found = None
154 if cmd in aliases:
154 if cmd in aliases:
155 found = cmd
155 found = cmd
156 elif not ui.config("ui", "strict"):
156 elif not ui.config("ui", "strict"):
157 for a in aliases:
157 for a in aliases:
158 if a.startswith(cmd):
158 if a.startswith(cmd):
159 found = a
159 found = a
160 break
160 break
161 if found is not None:
161 if found is not None:
162 if aliases[0].startswith("debug") or found.startswith("debug"):
162 if aliases[0].startswith("debug") or found.startswith("debug"):
163 debugchoice[found] = (aliases, commands.table[e])
163 debugchoice[found] = (aliases, commands.table[e])
164 else:
164 else:
165 choice[found] = (aliases, commands.table[e])
165 choice[found] = (aliases, commands.table[e])
166
166
167 if not choice and debugchoice:
167 if not choice and debugchoice:
168 choice = debugchoice
168 choice = debugchoice
169
169
170 return choice
170 return choice
171
171
172 def findcmd(ui, cmd):
172 def findcmd(ui, cmd):
173 """Return (aliases, command table entry) for command string."""
173 """Return (aliases, command table entry) for command string."""
174 choice = findpossible(ui, cmd)
174 choice = findpossible(ui, cmd)
175
175
176 if choice.has_key(cmd):
176 if choice.has_key(cmd):
177 return choice[cmd]
177 return choice[cmd]
178
178
179 if len(choice) > 1:
179 if len(choice) > 1:
180 clist = choice.keys()
180 clist = choice.keys()
181 clist.sort()
181 clist.sort()
182 raise AmbiguousCommand(cmd, clist)
182 raise AmbiguousCommand(cmd, clist)
183
183
184 if choice:
184 if choice:
185 return choice.values()[0]
185 return choice.values()[0]
186
186
187 raise UnknownCommand(cmd)
187 raise UnknownCommand(cmd)
188
188
189 def findrepo():
189 def findrepo():
190 p = os.getcwd()
190 p = os.getcwd()
191 while not os.path.isdir(os.path.join(p, ".hg")):
191 while not os.path.isdir(os.path.join(p, ".hg")):
192 oldp, p = p, os.path.dirname(p)
192 oldp, p = p, os.path.dirname(p)
193 if p == oldp:
193 if p == oldp:
194 return None
194 return None
195
195
196 return p
196 return p
197
197
198 def parse(ui, args):
198 def parse(ui, args):
199 options = {}
199 options = {}
200 cmdoptions = {}
200 cmdoptions = {}
201
201
202 try:
202 try:
203 args = fancyopts.fancyopts(args, commands.globalopts, options)
203 args = fancyopts.fancyopts(args, commands.globalopts, options)
204 except fancyopts.getopt.GetoptError, inst:
204 except fancyopts.getopt.GetoptError, inst:
205 raise ParseError(None, inst)
205 raise ParseError(None, inst)
206
206
207 if args:
207 if args:
208 cmd, args = args[0], args[1:]
208 cmd, args = args[0], args[1:]
209 aliases, i = findcmd(ui, cmd)
209 aliases, i = findcmd(ui, cmd)
210 cmd = aliases[0]
210 cmd = aliases[0]
211 defaults = ui.config("defaults", cmd)
211 defaults = ui.config("defaults", cmd)
212 if defaults:
212 if defaults:
213 args = shlex.split(defaults) + args
213 args = shlex.split(defaults) + args
214 c = list(i[1])
214 c = list(i[1])
215 else:
215 else:
216 cmd = None
216 cmd = None
217 c = []
217 c = []
218
218
219 # combine global options into local
219 # combine global options into local
220 for o in commands.globalopts:
220 for o in commands.globalopts:
221 c.append((o[0], o[1], options[o[1]], o[3]))
221 c.append((o[0], o[1], options[o[1]], o[3]))
222
222
223 try:
223 try:
224 args = fancyopts.fancyopts(args, c, cmdoptions)
224 args = fancyopts.fancyopts(args, c, cmdoptions)
225 except fancyopts.getopt.GetoptError, inst:
225 except fancyopts.getopt.GetoptError, inst:
226 raise ParseError(cmd, inst)
226 raise ParseError(cmd, inst)
227
227
228 # separate global options back out
228 # separate global options back out
229 for o in commands.globalopts:
229 for o in commands.globalopts:
230 n = o[1]
230 n = o[1]
231 options[n] = cmdoptions[n]
231 options[n] = cmdoptions[n]
232 del cmdoptions[n]
232 del cmdoptions[n]
233
233
234 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
234 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
235
235
236 def parseconfig(config):
236 def parseconfig(config):
237 """parse the --config options from the command line"""
237 """parse the --config options from the command line"""
238 parsed = []
238 parsed = []
239 for cfg in config:
239 for cfg in config:
240 try:
240 try:
241 name, value = cfg.split('=', 1)
241 name, value = cfg.split('=', 1)
242 section, name = name.split('.', 1)
242 section, name = name.split('.', 1)
243 if not section or not name:
243 if not section or not name:
244 raise IndexError
244 raise IndexError
245 parsed.append((section, name, value))
245 parsed.append((section, name, value))
246 except (IndexError, ValueError):
246 except (IndexError, ValueError):
247 raise util.Abort(_('malformed --config option: %s') % cfg)
247 raise util.Abort(_('malformed --config option: %s') % cfg)
248 return parsed
248 return parsed
249
249
250 def earlygetopt(aliases, args):
250 def earlygetopt(aliases, args):
251 """Return list of values for an option (or aliases).
251 """Return list of values for an option (or aliases).
252
252
253 The values are listed in the order they appear in args.
253 The values are listed in the order they appear in args.
254 The options and values are removed from args.
254 The options and values are removed from args.
255 """
255 """
256 try:
256 try:
257 argcount = args.index("--")
257 argcount = args.index("--")
258 except ValueError:
258 except ValueError:
259 argcount = len(args)
259 argcount = len(args)
260 shortopts = [opt for opt in aliases if len(opt) == 2]
260 shortopts = [opt for opt in aliases if len(opt) == 2]
261 values = []
261 values = []
262 pos = 0
262 pos = 0
263 while pos < argcount:
263 while pos < argcount:
264 if args[pos] in aliases:
264 if args[pos] in aliases:
265 if pos + 1 >= argcount:
265 if pos + 1 >= argcount:
266 # ignore and let getopt report an error if there is no value
266 # ignore and let getopt report an error if there is no value
267 break
267 break
268 del args[pos]
268 del args[pos]
269 values.append(args.pop(pos))
269 values.append(args.pop(pos))
270 argcount -= 2
270 argcount -= 2
271 elif args[pos][:2] in shortopts:
271 elif args[pos][:2] in shortopts:
272 # short option can have no following space, e.g. hg log -Rfoo
272 # short option can have no following space, e.g. hg log -Rfoo
273 values.append(args.pop(pos)[2:])
273 values.append(args.pop(pos)[2:])
274 argcount -= 1
274 argcount -= 1
275 else:
275 else:
276 pos += 1
276 pos += 1
277 return values
277 return values
278
278
279 def dispatch(ui, args, argv0=None):
279 def dispatch(ui, args, argv0=None):
280 # remember how to call 'hg' before changing the working dir
280 # remember how to call 'hg' before changing the working dir
281 util.set_hgexecutable(argv0)
281 util.set_hgexecutable(argv0)
282
282
283 # read --config before doing anything else
283 # read --config before doing anything else
284 # (e.g. to change trust settings for reading .hg/hgrc)
284 # (e.g. to change trust settings for reading .hg/hgrc)
285 config = earlygetopt(['--config'], args)
285 config = earlygetopt(['--config'], args)
286 if config:
286 if config:
287 ui.updateopts(config=parseconfig(config))
287 ui.updateopts(config=parseconfig(config))
288
288
289 # check for cwd
289 # check for cwd
290 cwd = earlygetopt(['--cwd'], args)
290 cwd = earlygetopt(['--cwd'], args)
291 if cwd:
291 if cwd:
292 os.chdir(cwd[-1])
292 os.chdir(cwd[-1])
293
293
294 # read the local repository .hgrc into a local ui object
294 # read the local repository .hgrc into a local ui object
295 path = findrepo() or ""
295 path = findrepo() or ""
296 if not path:
296 if not path:
297 lui = ui
297 lui = ui
298 if path:
298 if path:
299 try:
299 try:
300 lui = commands.ui.ui(parentui=ui)
300 lui = commands.ui.ui(parentui=ui)
301 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
301 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
302 except IOError:
302 except IOError:
303 pass
303 pass
304
304
305 # now we can expand paths, even ones in .hg/hgrc
305 # now we can expand paths, even ones in .hg/hgrc
306 rpath = earlygetopt(["-R", "--repository", "--repo"], args)
306 rpath = earlygetopt(["-R", "--repository", "--repo"], args)
307 if rpath:
307 if rpath:
308 path = lui.expandpath(rpath[-1])
308 path = lui.expandpath(rpath[-1])
309 lui = commands.ui.ui(parentui=ui)
309 lui = commands.ui.ui(parentui=ui)
310 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
310 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
311
311
312 extensions.loadall(lui)
312 extensions.loadall(lui)
313 # check for fallback encoding
313 # check for fallback encoding
314 fallback = lui.config('ui', 'fallbackencoding')
314 fallback = lui.config('ui', 'fallbackencoding')
315 if fallback:
315 if fallback:
316 util._fallbackencoding = fallback
316 util._fallbackencoding = fallback
317
317
318 fullargs = args
318 fullargs = args
319 cmd, func, args, options, cmdoptions = parse(ui, args)
319 cmd, func, args, options, cmdoptions = parse(lui, args)
320
320
321 if options["config"]:
321 if options["config"]:
322 raise util.Abort(_("Option --config may not be abbreviated!"))
322 raise util.Abort(_("Option --config may not be abbreviated!"))
323 if options["cwd"]:
323 if options["cwd"]:
324 raise util.Abort(_("Option --cwd may not be abbreviated!"))
324 raise util.Abort(_("Option --cwd may not be abbreviated!"))
325 if options["repository"]:
325 if options["repository"]:
326 raise util.Abort(_(
326 raise util.Abort(_(
327 "Option -R has to be separated from other options (i.e. not -qR) "
327 "Option -R has to be separated from other options (i.e. not -qR) "
328 "and --repository may only be abbreviated as --repo!"))
328 "and --repository may only be abbreviated as --repo!"))
329
329
330 if options["encoding"]:
330 if options["encoding"]:
331 util._encoding = options["encoding"]
331 util._encoding = options["encoding"]
332 if options["encodingmode"]:
332 if options["encodingmode"]:
333 util._encodingmode = options["encodingmode"]
333 util._encodingmode = options["encodingmode"]
334 if options["time"]:
334 if options["time"]:
335 def get_times():
335 def get_times():
336 t = os.times()
336 t = os.times()
337 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
337 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
338 t = (t[0], t[1], t[2], t[3], time.clock())
338 t = (t[0], t[1], t[2], t[3], time.clock())
339 return t
339 return t
340 s = get_times()
340 s = get_times()
341 def print_time():
341 def print_time():
342 t = get_times()
342 t = get_times()
343 ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
343 ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
344 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
344 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
345 atexit.register(print_time)
345 atexit.register(print_time)
346
346
347 ui.updateopts(options["verbose"], options["debug"], options["quiet"],
347 ui.updateopts(options["verbose"], options["debug"], options["quiet"],
348 not options["noninteractive"], options["traceback"])
348 not options["noninteractive"], options["traceback"])
349
349
350 if options['help']:
350 if options['help']:
351 return commands.help_(ui, cmd, options['version'])
351 return commands.help_(ui, cmd, options['version'])
352 elif options['version']:
352 elif options['version']:
353 return commands.version_(ui)
353 return commands.version_(ui)
354 elif not cmd:
354 elif not cmd:
355 return commands.help_(ui, 'shortlist')
355 return commands.help_(ui, 'shortlist')
356
356
357 repo = None
357 repo = None
358 if cmd not in commands.norepo.split():
358 if cmd not in commands.norepo.split():
359 try:
359 try:
360 repo = hg.repository(ui, path=path)
360 repo = hg.repository(ui, path=path)
361 ui = repo.ui
361 ui = repo.ui
362 if not repo.local():
362 if not repo.local():
363 raise util.Abort(_("repository '%s' is not local") % path)
363 raise util.Abort(_("repository '%s' is not local") % path)
364 except hg.RepoError:
364 except hg.RepoError:
365 if cmd not in commands.optionalrepo.split():
365 if cmd not in commands.optionalrepo.split():
366 if not path:
366 if not path:
367 raise hg.RepoError(_("There is no Mercurial repository here"
367 raise hg.RepoError(_("There is no Mercurial repository here"
368 " (.hg not found)"))
368 " (.hg not found)"))
369 raise
369 raise
370 d = lambda: func(ui, repo, *args, **cmdoptions)
370 d = lambda: func(ui, repo, *args, **cmdoptions)
371 else:
371 else:
372 d = lambda: func(ui, *args, **cmdoptions)
372 d = lambda: func(ui, *args, **cmdoptions)
373
373
374 # run pre-hook, and abort if it fails
374 # run pre-hook, and abort if it fails
375 ret = hook.hook(ui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs))
375 ret = hook.hook(ui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs))
376 if ret:
376 if ret:
377 return ret
377 return ret
378 ret = runcommand(ui, options, cmd, d)
378 ret = runcommand(ui, options, cmd, d)
379 # run post-hook, passing command result
379 # run post-hook, passing command result
380 hook.hook(ui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
380 hook.hook(ui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
381 result = ret)
381 result = ret)
382 return ret
382 return ret
383
383
384 def runcommand(ui, options, cmd, cmdfunc):
384 def runcommand(ui, options, cmd, cmdfunc):
385 def checkargs():
385 def checkargs():
386 try:
386 try:
387 return cmdfunc()
387 return cmdfunc()
388 except TypeError, inst:
388 except TypeError, inst:
389 # was this an argument error?
389 # was this an argument error?
390 tb = traceback.extract_tb(sys.exc_info()[2])
390 tb = traceback.extract_tb(sys.exc_info()[2])
391 if len(tb) != 2: # no
391 if len(tb) != 2: # no
392 raise
392 raise
393 raise ParseError(cmd, _("invalid arguments"))
393 raise ParseError(cmd, _("invalid arguments"))
394
394
395 if options['profile']:
395 if options['profile']:
396 import hotshot, hotshot.stats
396 import hotshot, hotshot.stats
397 prof = hotshot.Profile("hg.prof")
397 prof = hotshot.Profile("hg.prof")
398 try:
398 try:
399 try:
399 try:
400 return prof.runcall(checkargs)
400 return prof.runcall(checkargs)
401 except:
401 except:
402 try:
402 try:
403 ui.warn(_('exception raised - generating '
403 ui.warn(_('exception raised - generating '
404 'profile anyway\n'))
404 'profile anyway\n'))
405 except:
405 except:
406 pass
406 pass
407 raise
407 raise
408 finally:
408 finally:
409 prof.close()
409 prof.close()
410 stats = hotshot.stats.load("hg.prof")
410 stats = hotshot.stats.load("hg.prof")
411 stats.strip_dirs()
411 stats.strip_dirs()
412 stats.sort_stats('time', 'calls')
412 stats.sort_stats('time', 'calls')
413 stats.print_stats(40)
413 stats.print_stats(40)
414 elif options['lsprof']:
414 elif options['lsprof']:
415 try:
415 try:
416 from mercurial import lsprof
416 from mercurial import lsprof
417 except ImportError:
417 except ImportError:
418 raise util.Abort(_(
418 raise util.Abort(_(
419 'lsprof not available - install from '
419 'lsprof not available - install from '
420 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
420 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
421 p = lsprof.Profiler()
421 p = lsprof.Profiler()
422 p.enable(subcalls=True)
422 p.enable(subcalls=True)
423 try:
423 try:
424 return checkargs()
424 return checkargs()
425 finally:
425 finally:
426 p.disable()
426 p.disable()
427 stats = lsprof.Stats(p.getstats())
427 stats = lsprof.Stats(p.getstats())
428 stats.sort()
428 stats.sort()
429 stats.pprint(top=10, file=sys.stderr, climit=5)
429 stats.pprint(top=10, file=sys.stderr, climit=5)
430 else:
430 else:
431 return checkargs()
431 return checkargs()
432
432
433 def bail_if_changed(repo):
433 def bail_if_changed(repo):
434 modified, added, removed, deleted = repo.status()[:4]
434 modified, added, removed, deleted = repo.status()[:4]
435 if modified or added or removed or deleted:
435 if modified or added or removed or deleted:
436 raise util.Abort(_("outstanding uncommitted changes"))
436 raise util.Abort(_("outstanding uncommitted changes"))
437
437
438 def logmessage(opts):
438 def logmessage(opts):
439 """ get the log message according to -m and -l option """
439 """ get the log message according to -m and -l option """
440 message = opts['message']
440 message = opts['message']
441 logfile = opts['logfile']
441 logfile = opts['logfile']
442
442
443 if message and logfile:
443 if message and logfile:
444 raise util.Abort(_('options --message and --logfile are mutually '
444 raise util.Abort(_('options --message and --logfile are mutually '
445 'exclusive'))
445 'exclusive'))
446 if not message and logfile:
446 if not message and logfile:
447 try:
447 try:
448 if logfile == '-':
448 if logfile == '-':
449 message = sys.stdin.read()
449 message = sys.stdin.read()
450 else:
450 else:
451 message = open(logfile).read()
451 message = open(logfile).read()
452 except IOError, inst:
452 except IOError, inst:
453 raise util.Abort(_("can't read commit message '%s': %s") %
453 raise util.Abort(_("can't read commit message '%s': %s") %
454 (logfile, inst.strerror))
454 (logfile, inst.strerror))
455 return message
455 return message
456
456
457 def setremoteconfig(ui, opts):
457 def setremoteconfig(ui, opts):
458 "copy remote options to ui tree"
458 "copy remote options to ui tree"
459 if opts.get('ssh'):
459 if opts.get('ssh'):
460 ui.setconfig("ui", "ssh", opts['ssh'])
460 ui.setconfig("ui", "ssh", opts['ssh'])
461 if opts.get('remotecmd'):
461 if opts.get('remotecmd'):
462 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
462 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
463
463
464 def parseurl(url, revs):
464 def parseurl(url, revs):
465 '''parse url#branch, returning url, branch + revs'''
465 '''parse url#branch, returning url, branch + revs'''
466
466
467 if '#' not in url:
467 if '#' not in url:
468 return url, (revs or None)
468 return url, (revs or None)
469
469
470 url, rev = url.split('#', 1)
470 url, rev = url.split('#', 1)
471 return url, revs + [rev]
471 return url, revs + [rev]
472
472
473 def revpair(repo, revs):
473 def revpair(repo, revs):
474 '''return pair of nodes, given list of revisions. second item can
474 '''return pair of nodes, given list of revisions. second item can
475 be None, meaning use working dir.'''
475 be None, meaning use working dir.'''
476
476
477 def revfix(repo, val, defval):
477 def revfix(repo, val, defval):
478 if not val and val != 0 and defval is not None:
478 if not val and val != 0 and defval is not None:
479 val = defval
479 val = defval
480 return repo.lookup(val)
480 return repo.lookup(val)
481
481
482 if not revs:
482 if not revs:
483 return repo.dirstate.parents()[0], None
483 return repo.dirstate.parents()[0], None
484 end = None
484 end = None
485 if len(revs) == 1:
485 if len(revs) == 1:
486 if revrangesep in revs[0]:
486 if revrangesep in revs[0]:
487 start, end = revs[0].split(revrangesep, 1)
487 start, end = revs[0].split(revrangesep, 1)
488 start = revfix(repo, start, 0)
488 start = revfix(repo, start, 0)
489 end = revfix(repo, end, repo.changelog.count() - 1)
489 end = revfix(repo, end, repo.changelog.count() - 1)
490 else:
490 else:
491 start = revfix(repo, revs[0], None)
491 start = revfix(repo, revs[0], None)
492 elif len(revs) == 2:
492 elif len(revs) == 2:
493 if revrangesep in revs[0] or revrangesep in revs[1]:
493 if revrangesep in revs[0] or revrangesep in revs[1]:
494 raise util.Abort(_('too many revisions specified'))
494 raise util.Abort(_('too many revisions specified'))
495 start = revfix(repo, revs[0], None)
495 start = revfix(repo, revs[0], None)
496 end = revfix(repo, revs[1], None)
496 end = revfix(repo, revs[1], None)
497 else:
497 else:
498 raise util.Abort(_('too many revisions specified'))
498 raise util.Abort(_('too many revisions specified'))
499 return start, end
499 return start, end
500
500
501 def revrange(repo, revs):
501 def revrange(repo, revs):
502 """Yield revision as strings from a list of revision specifications."""
502 """Yield revision as strings from a list of revision specifications."""
503
503
504 def revfix(repo, val, defval):
504 def revfix(repo, val, defval):
505 if not val and val != 0 and defval is not None:
505 if not val and val != 0 and defval is not None:
506 return defval
506 return defval
507 return repo.changelog.rev(repo.lookup(val))
507 return repo.changelog.rev(repo.lookup(val))
508
508
509 seen, l = {}, []
509 seen, l = {}, []
510 for spec in revs:
510 for spec in revs:
511 if revrangesep in spec:
511 if revrangesep in spec:
512 start, end = spec.split(revrangesep, 1)
512 start, end = spec.split(revrangesep, 1)
513 start = revfix(repo, start, 0)
513 start = revfix(repo, start, 0)
514 end = revfix(repo, end, repo.changelog.count() - 1)
514 end = revfix(repo, end, repo.changelog.count() - 1)
515 step = start > end and -1 or 1
515 step = start > end and -1 or 1
516 for rev in xrange(start, end+step, step):
516 for rev in xrange(start, end+step, step):
517 if rev in seen:
517 if rev in seen:
518 continue
518 continue
519 seen[rev] = 1
519 seen[rev] = 1
520 l.append(rev)
520 l.append(rev)
521 else:
521 else:
522 rev = revfix(repo, spec, None)
522 rev = revfix(repo, spec, None)
523 if rev in seen:
523 if rev in seen:
524 continue
524 continue
525 seen[rev] = 1
525 seen[rev] = 1
526 l.append(rev)
526 l.append(rev)
527
527
528 return l
528 return l
529
529
530 def make_filename(repo, pat, node,
530 def make_filename(repo, pat, node,
531 total=None, seqno=None, revwidth=None, pathname=None):
531 total=None, seqno=None, revwidth=None, pathname=None):
532 node_expander = {
532 node_expander = {
533 'H': lambda: hex(node),
533 'H': lambda: hex(node),
534 'R': lambda: str(repo.changelog.rev(node)),
534 'R': lambda: str(repo.changelog.rev(node)),
535 'h': lambda: short(node),
535 'h': lambda: short(node),
536 }
536 }
537 expander = {
537 expander = {
538 '%': lambda: '%',
538 '%': lambda: '%',
539 'b': lambda: os.path.basename(repo.root),
539 'b': lambda: os.path.basename(repo.root),
540 }
540 }
541
541
542 try:
542 try:
543 if node:
543 if node:
544 expander.update(node_expander)
544 expander.update(node_expander)
545 if node:
545 if node:
546 expander['r'] = (lambda:
546 expander['r'] = (lambda:
547 str(repo.changelog.rev(node)).zfill(revwidth or 0))
547 str(repo.changelog.rev(node)).zfill(revwidth or 0))
548 if total is not None:
548 if total is not None:
549 expander['N'] = lambda: str(total)
549 expander['N'] = lambda: str(total)
550 if seqno is not None:
550 if seqno is not None:
551 expander['n'] = lambda: str(seqno)
551 expander['n'] = lambda: str(seqno)
552 if total is not None and seqno is not None:
552 if total is not None and seqno is not None:
553 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
553 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
554 if pathname is not None:
554 if pathname is not None:
555 expander['s'] = lambda: os.path.basename(pathname)
555 expander['s'] = lambda: os.path.basename(pathname)
556 expander['d'] = lambda: os.path.dirname(pathname) or '.'
556 expander['d'] = lambda: os.path.dirname(pathname) or '.'
557 expander['p'] = lambda: pathname
557 expander['p'] = lambda: pathname
558
558
559 newname = []
559 newname = []
560 patlen = len(pat)
560 patlen = len(pat)
561 i = 0
561 i = 0
562 while i < patlen:
562 while i < patlen:
563 c = pat[i]
563 c = pat[i]
564 if c == '%':
564 if c == '%':
565 i += 1
565 i += 1
566 c = pat[i]
566 c = pat[i]
567 c = expander[c]()
567 c = expander[c]()
568 newname.append(c)
568 newname.append(c)
569 i += 1
569 i += 1
570 return ''.join(newname)
570 return ''.join(newname)
571 except KeyError, inst:
571 except KeyError, inst:
572 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
572 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
573 inst.args[0])
573 inst.args[0])
574
574
575 def make_file(repo, pat, node=None,
575 def make_file(repo, pat, node=None,
576 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
576 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
577 if not pat or pat == '-':
577 if not pat or pat == '-':
578 return 'w' in mode and sys.stdout or sys.stdin
578 return 'w' in mode and sys.stdout or sys.stdin
579 if hasattr(pat, 'write') and 'w' in mode:
579 if hasattr(pat, 'write') and 'w' in mode:
580 return pat
580 return pat
581 if hasattr(pat, 'read') and 'r' in mode:
581 if hasattr(pat, 'read') and 'r' in mode:
582 return pat
582 return pat
583 return open(make_filename(repo, pat, node, total, seqno, revwidth,
583 return open(make_filename(repo, pat, node, total, seqno, revwidth,
584 pathname),
584 pathname),
585 mode)
585 mode)
586
586
587 def matchpats(repo, pats=[], opts={}, globbed=False, default=None):
587 def matchpats(repo, pats=[], opts={}, globbed=False, default=None):
588 cwd = repo.getcwd()
588 cwd = repo.getcwd()
589 return util.cmdmatcher(repo.root, cwd, pats or [], opts.get('include'),
589 return util.cmdmatcher(repo.root, cwd, pats or [], opts.get('include'),
590 opts.get('exclude'), globbed=globbed,
590 opts.get('exclude'), globbed=globbed,
591 default=default)
591 default=default)
592
592
593 def walk(repo, pats=[], opts={}, node=None, badmatch=None, globbed=False,
593 def walk(repo, pats=[], opts={}, node=None, badmatch=None, globbed=False,
594 default=None):
594 default=None):
595 files, matchfn, anypats = matchpats(repo, pats, opts, globbed=globbed,
595 files, matchfn, anypats = matchpats(repo, pats, opts, globbed=globbed,
596 default=default)
596 default=default)
597 exact = dict.fromkeys(files)
597 exact = dict.fromkeys(files)
598 cwd = repo.getcwd()
598 cwd = repo.getcwd()
599 for src, fn in repo.walk(node=node, files=files, match=matchfn,
599 for src, fn in repo.walk(node=node, files=files, match=matchfn,
600 badmatch=badmatch):
600 badmatch=badmatch):
601 yield src, fn, repo.pathto(fn, cwd), fn in exact
601 yield src, fn, repo.pathto(fn, cwd), fn in exact
602
602
603 def findrenames(repo, added=None, removed=None, threshold=0.5):
603 def findrenames(repo, added=None, removed=None, threshold=0.5):
604 '''find renamed files -- yields (before, after, score) tuples'''
604 '''find renamed files -- yields (before, after, score) tuples'''
605 if added is None or removed is None:
605 if added is None or removed is None:
606 added, removed = repo.status()[1:3]
606 added, removed = repo.status()[1:3]
607 ctx = repo.changectx()
607 ctx = repo.changectx()
608 for a in added:
608 for a in added:
609 aa = repo.wread(a)
609 aa = repo.wread(a)
610 bestname, bestscore = None, threshold
610 bestname, bestscore = None, threshold
611 for r in removed:
611 for r in removed:
612 rr = ctx.filectx(r).data()
612 rr = ctx.filectx(r).data()
613
613
614 # bdiff.blocks() returns blocks of matching lines
614 # bdiff.blocks() returns blocks of matching lines
615 # count the number of bytes in each
615 # count the number of bytes in each
616 equal = 0
616 equal = 0
617 alines = mdiff.splitnewlines(aa)
617 alines = mdiff.splitnewlines(aa)
618 matches = bdiff.blocks(aa, rr)
618 matches = bdiff.blocks(aa, rr)
619 for x1,x2,y1,y2 in matches:
619 for x1,x2,y1,y2 in matches:
620 for line in alines[x1:x2]:
620 for line in alines[x1:x2]:
621 equal += len(line)
621 equal += len(line)
622
622
623 lengths = len(aa) + len(rr)
623 lengths = len(aa) + len(rr)
624 if lengths:
624 if lengths:
625 myscore = equal*2.0 / lengths
625 myscore = equal*2.0 / lengths
626 if myscore >= bestscore:
626 if myscore >= bestscore:
627 bestname, bestscore = r, myscore
627 bestname, bestscore = r, myscore
628 if bestname:
628 if bestname:
629 yield bestname, a, bestscore
629 yield bestname, a, bestscore
630
630
631 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
631 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
632 if dry_run is None:
632 if dry_run is None:
633 dry_run = opts.get('dry_run')
633 dry_run = opts.get('dry_run')
634 if similarity is None:
634 if similarity is None:
635 similarity = float(opts.get('similarity') or 0)
635 similarity = float(opts.get('similarity') or 0)
636 add, remove = [], []
636 add, remove = [], []
637 mapping = {}
637 mapping = {}
638 for src, abs, rel, exact in walk(repo, pats, opts):
638 for src, abs, rel, exact in walk(repo, pats, opts):
639 target = repo.wjoin(abs)
639 target = repo.wjoin(abs)
640 if src == 'f' and abs not in repo.dirstate:
640 if src == 'f' and abs not in repo.dirstate:
641 add.append(abs)
641 add.append(abs)
642 mapping[abs] = rel, exact
642 mapping[abs] = rel, exact
643 if repo.ui.verbose or not exact:
643 if repo.ui.verbose or not exact:
644 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
644 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
645 if repo.dirstate[abs] != 'r' and not util.lexists(target):
645 if repo.dirstate[abs] != 'r' and not util.lexists(target):
646 remove.append(abs)
646 remove.append(abs)
647 mapping[abs] = rel, exact
647 mapping[abs] = rel, exact
648 if repo.ui.verbose or not exact:
648 if repo.ui.verbose or not exact:
649 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
649 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
650 if not dry_run:
650 if not dry_run:
651 repo.add(add)
651 repo.add(add)
652 repo.remove(remove)
652 repo.remove(remove)
653 if similarity > 0:
653 if similarity > 0:
654 for old, new, score in findrenames(repo, add, remove, similarity):
654 for old, new, score in findrenames(repo, add, remove, similarity):
655 oldrel, oldexact = mapping[old]
655 oldrel, oldexact = mapping[old]
656 newrel, newexact = mapping[new]
656 newrel, newexact = mapping[new]
657 if repo.ui.verbose or not oldexact or not newexact:
657 if repo.ui.verbose or not oldexact or not newexact:
658 repo.ui.status(_('recording removal of %s as rename to %s '
658 repo.ui.status(_('recording removal of %s as rename to %s '
659 '(%d%% similar)\n') %
659 '(%d%% similar)\n') %
660 (oldrel, newrel, score * 100))
660 (oldrel, newrel, score * 100))
661 if not dry_run:
661 if not dry_run:
662 repo.copy(old, new)
662 repo.copy(old, new)
663
663
664 def service(opts, parentfn=None, initfn=None, runfn=None):
664 def service(opts, parentfn=None, initfn=None, runfn=None):
665 '''Run a command as a service.'''
665 '''Run a command as a service.'''
666
666
667 if opts['daemon'] and not opts['daemon_pipefds']:
667 if opts['daemon'] and not opts['daemon_pipefds']:
668 rfd, wfd = os.pipe()
668 rfd, wfd = os.pipe()
669 args = sys.argv[:]
669 args = sys.argv[:]
670 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
670 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
671 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
671 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
672 args[0], args)
672 args[0], args)
673 os.close(wfd)
673 os.close(wfd)
674 os.read(rfd, 1)
674 os.read(rfd, 1)
675 if parentfn:
675 if parentfn:
676 return parentfn(pid)
676 return parentfn(pid)
677 else:
677 else:
678 os._exit(0)
678 os._exit(0)
679
679
680 if initfn:
680 if initfn:
681 initfn()
681 initfn()
682
682
683 if opts['pid_file']:
683 if opts['pid_file']:
684 fp = open(opts['pid_file'], 'w')
684 fp = open(opts['pid_file'], 'w')
685 fp.write(str(os.getpid()) + '\n')
685 fp.write(str(os.getpid()) + '\n')
686 fp.close()
686 fp.close()
687
687
688 if opts['daemon_pipefds']:
688 if opts['daemon_pipefds']:
689 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
689 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
690 os.close(rfd)
690 os.close(rfd)
691 try:
691 try:
692 os.setsid()
692 os.setsid()
693 except AttributeError:
693 except AttributeError:
694 pass
694 pass
695 os.write(wfd, 'y')
695 os.write(wfd, 'y')
696 os.close(wfd)
696 os.close(wfd)
697 sys.stdout.flush()
697 sys.stdout.flush()
698 sys.stderr.flush()
698 sys.stderr.flush()
699 fd = os.open(util.nulldev, os.O_RDWR)
699 fd = os.open(util.nulldev, os.O_RDWR)
700 if fd != 0: os.dup2(fd, 0)
700 if fd != 0: os.dup2(fd, 0)
701 if fd != 1: os.dup2(fd, 1)
701 if fd != 1: os.dup2(fd, 1)
702 if fd != 2: os.dup2(fd, 2)
702 if fd != 2: os.dup2(fd, 2)
703 if fd not in (0, 1, 2): os.close(fd)
703 if fd not in (0, 1, 2): os.close(fd)
704
704
705 if runfn:
705 if runfn:
706 return runfn()
706 return runfn()
707
707
708 class changeset_printer(object):
708 class changeset_printer(object):
709 '''show changeset information when templating not requested.'''
709 '''show changeset information when templating not requested.'''
710
710
711 def __init__(self, ui, repo, patch, buffered):
711 def __init__(self, ui, repo, patch, buffered):
712 self.ui = ui
712 self.ui = ui
713 self.repo = repo
713 self.repo = repo
714 self.buffered = buffered
714 self.buffered = buffered
715 self.patch = patch
715 self.patch = patch
716 self.header = {}
716 self.header = {}
717 self.hunk = {}
717 self.hunk = {}
718 self.lastheader = None
718 self.lastheader = None
719
719
720 def flush(self, rev):
720 def flush(self, rev):
721 if rev in self.header:
721 if rev in self.header:
722 h = self.header[rev]
722 h = self.header[rev]
723 if h != self.lastheader:
723 if h != self.lastheader:
724 self.lastheader = h
724 self.lastheader = h
725 self.ui.write(h)
725 self.ui.write(h)
726 del self.header[rev]
726 del self.header[rev]
727 if rev in self.hunk:
727 if rev in self.hunk:
728 self.ui.write(self.hunk[rev])
728 self.ui.write(self.hunk[rev])
729 del self.hunk[rev]
729 del self.hunk[rev]
730 return 1
730 return 1
731 return 0
731 return 0
732
732
733 def show(self, rev=0, changenode=None, copies=(), **props):
733 def show(self, rev=0, changenode=None, copies=(), **props):
734 if self.buffered:
734 if self.buffered:
735 self.ui.pushbuffer()
735 self.ui.pushbuffer()
736 self._show(rev, changenode, copies, props)
736 self._show(rev, changenode, copies, props)
737 self.hunk[rev] = self.ui.popbuffer()
737 self.hunk[rev] = self.ui.popbuffer()
738 else:
738 else:
739 self._show(rev, changenode, copies, props)
739 self._show(rev, changenode, copies, props)
740
740
741 def _show(self, rev, changenode, copies, props):
741 def _show(self, rev, changenode, copies, props):
742 '''show a single changeset or file revision'''
742 '''show a single changeset or file revision'''
743 log = self.repo.changelog
743 log = self.repo.changelog
744 if changenode is None:
744 if changenode is None:
745 changenode = log.node(rev)
745 changenode = log.node(rev)
746 elif not rev:
746 elif not rev:
747 rev = log.rev(changenode)
747 rev = log.rev(changenode)
748
748
749 if self.ui.quiet:
749 if self.ui.quiet:
750 self.ui.write("%d:%s\n" % (rev, short(changenode)))
750 self.ui.write("%d:%s\n" % (rev, short(changenode)))
751 return
751 return
752
752
753 changes = log.read(changenode)
753 changes = log.read(changenode)
754 date = util.datestr(changes[2])
754 date = util.datestr(changes[2])
755 extra = changes[5]
755 extra = changes[5]
756 branch = extra.get("branch")
756 branch = extra.get("branch")
757
757
758 hexfunc = self.ui.debugflag and hex or short
758 hexfunc = self.ui.debugflag and hex or short
759
759
760 parents = [(p, hexfunc(log.node(p)))
760 parents = [(p, hexfunc(log.node(p)))
761 for p in self._meaningful_parentrevs(log, rev)]
761 for p in self._meaningful_parentrevs(log, rev)]
762
762
763 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
763 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
764
764
765 # don't show the default branch name
765 # don't show the default branch name
766 if branch != 'default':
766 if branch != 'default':
767 branch = util.tolocal(branch)
767 branch = util.tolocal(branch)
768 self.ui.write(_("branch: %s\n") % branch)
768 self.ui.write(_("branch: %s\n") % branch)
769 for tag in self.repo.nodetags(changenode):
769 for tag in self.repo.nodetags(changenode):
770 self.ui.write(_("tag: %s\n") % tag)
770 self.ui.write(_("tag: %s\n") % tag)
771 for parent in parents:
771 for parent in parents:
772 self.ui.write(_("parent: %d:%s\n") % parent)
772 self.ui.write(_("parent: %d:%s\n") % parent)
773
773
774 if self.ui.debugflag:
774 if self.ui.debugflag:
775 self.ui.write(_("manifest: %d:%s\n") %
775 self.ui.write(_("manifest: %d:%s\n") %
776 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
776 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
777 self.ui.write(_("user: %s\n") % changes[1])
777 self.ui.write(_("user: %s\n") % changes[1])
778 self.ui.write(_("date: %s\n") % date)
778 self.ui.write(_("date: %s\n") % date)
779
779
780 if self.ui.debugflag:
780 if self.ui.debugflag:
781 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
781 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
782 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
782 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
783 files):
783 files):
784 if value:
784 if value:
785 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
785 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
786 elif changes[3] and self.ui.verbose:
786 elif changes[3] and self.ui.verbose:
787 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
787 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
788 if copies and self.ui.verbose:
788 if copies and self.ui.verbose:
789 copies = ['%s (%s)' % c for c in copies]
789 copies = ['%s (%s)' % c for c in copies]
790 self.ui.write(_("copies: %s\n") % ' '.join(copies))
790 self.ui.write(_("copies: %s\n") % ' '.join(copies))
791
791
792 if extra and self.ui.debugflag:
792 if extra and self.ui.debugflag:
793 extraitems = extra.items()
793 extraitems = extra.items()
794 extraitems.sort()
794 extraitems.sort()
795 for key, value in extraitems:
795 for key, value in extraitems:
796 self.ui.write(_("extra: %s=%s\n")
796 self.ui.write(_("extra: %s=%s\n")
797 % (key, value.encode('string_escape')))
797 % (key, value.encode('string_escape')))
798
798
799 description = changes[4].strip()
799 description = changes[4].strip()
800 if description:
800 if description:
801 if self.ui.verbose:
801 if self.ui.verbose:
802 self.ui.write(_("description:\n"))
802 self.ui.write(_("description:\n"))
803 self.ui.write(description)
803 self.ui.write(description)
804 self.ui.write("\n\n")
804 self.ui.write("\n\n")
805 else:
805 else:
806 self.ui.write(_("summary: %s\n") %
806 self.ui.write(_("summary: %s\n") %
807 description.splitlines()[0])
807 description.splitlines()[0])
808 self.ui.write("\n")
808 self.ui.write("\n")
809
809
810 self.showpatch(changenode)
810 self.showpatch(changenode)
811
811
812 def showpatch(self, node):
812 def showpatch(self, node):
813 if self.patch:
813 if self.patch:
814 prev = self.repo.changelog.parents(node)[0]
814 prev = self.repo.changelog.parents(node)[0]
815 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui,
815 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui,
816 opts=patch.diffopts(self.ui))
816 opts=patch.diffopts(self.ui))
817 self.ui.write("\n")
817 self.ui.write("\n")
818
818
819 def _meaningful_parentrevs(self, log, rev):
819 def _meaningful_parentrevs(self, log, rev):
820 """Return list of meaningful (or all if debug) parentrevs for rev.
820 """Return list of meaningful (or all if debug) parentrevs for rev.
821
821
822 For merges (two non-nullrev revisions) both parents are meaningful.
822 For merges (two non-nullrev revisions) both parents are meaningful.
823 Otherwise the first parent revision is considered meaningful if it
823 Otherwise the first parent revision is considered meaningful if it
824 is not the preceding revision.
824 is not the preceding revision.
825 """
825 """
826 parents = log.parentrevs(rev)
826 parents = log.parentrevs(rev)
827 if not self.ui.debugflag and parents[1] == nullrev:
827 if not self.ui.debugflag and parents[1] == nullrev:
828 if parents[0] >= rev - 1:
828 if parents[0] >= rev - 1:
829 parents = []
829 parents = []
830 else:
830 else:
831 parents = [parents[0]]
831 parents = [parents[0]]
832 return parents
832 return parents
833
833
834
834
835 class changeset_templater(changeset_printer):
835 class changeset_templater(changeset_printer):
836 '''format changeset information.'''
836 '''format changeset information.'''
837
837
838 def __init__(self, ui, repo, patch, mapfile, buffered):
838 def __init__(self, ui, repo, patch, mapfile, buffered):
839 changeset_printer.__init__(self, ui, repo, patch, buffered)
839 changeset_printer.__init__(self, ui, repo, patch, buffered)
840 filters = templater.common_filters.copy()
840 filters = templater.common_filters.copy()
841 filters['formatnode'] = (ui.debugflag and (lambda x: x)
841 filters['formatnode'] = (ui.debugflag and (lambda x: x)
842 or (lambda x: x[:12]))
842 or (lambda x: x[:12]))
843 self.t = templater.templater(mapfile, filters,
843 self.t = templater.templater(mapfile, filters,
844 cache={
844 cache={
845 'parent': '{rev}:{node|formatnode} ',
845 'parent': '{rev}:{node|formatnode} ',
846 'manifest': '{rev}:{node|formatnode}',
846 'manifest': '{rev}:{node|formatnode}',
847 'filecopy': '{name} ({source})'})
847 'filecopy': '{name} ({source})'})
848
848
849 def use_template(self, t):
849 def use_template(self, t):
850 '''set template string to use'''
850 '''set template string to use'''
851 self.t.cache['changeset'] = t
851 self.t.cache['changeset'] = t
852
852
853 def _show(self, rev, changenode, copies, props):
853 def _show(self, rev, changenode, copies, props):
854 '''show a single changeset or file revision'''
854 '''show a single changeset or file revision'''
855 log = self.repo.changelog
855 log = self.repo.changelog
856 if changenode is None:
856 if changenode is None:
857 changenode = log.node(rev)
857 changenode = log.node(rev)
858 elif not rev:
858 elif not rev:
859 rev = log.rev(changenode)
859 rev = log.rev(changenode)
860
860
861 changes = log.read(changenode)
861 changes = log.read(changenode)
862
862
863 def showlist(name, values, plural=None, **args):
863 def showlist(name, values, plural=None, **args):
864 '''expand set of values.
864 '''expand set of values.
865 name is name of key in template map.
865 name is name of key in template map.
866 values is list of strings or dicts.
866 values is list of strings or dicts.
867 plural is plural of name, if not simply name + 's'.
867 plural is plural of name, if not simply name + 's'.
868
868
869 expansion works like this, given name 'foo'.
869 expansion works like this, given name 'foo'.
870
870
871 if values is empty, expand 'no_foos'.
871 if values is empty, expand 'no_foos'.
872
872
873 if 'foo' not in template map, return values as a string,
873 if 'foo' not in template map, return values as a string,
874 joined by space.
874 joined by space.
875
875
876 expand 'start_foos'.
876 expand 'start_foos'.
877
877
878 for each value, expand 'foo'. if 'last_foo' in template
878 for each value, expand 'foo'. if 'last_foo' in template
879 map, expand it instead of 'foo' for last key.
879 map, expand it instead of 'foo' for last key.
880
880
881 expand 'end_foos'.
881 expand 'end_foos'.
882 '''
882 '''
883 if plural: names = plural
883 if plural: names = plural
884 else: names = name + 's'
884 else: names = name + 's'
885 if not values:
885 if not values:
886 noname = 'no_' + names
886 noname = 'no_' + names
887 if noname in self.t:
887 if noname in self.t:
888 yield self.t(noname, **args)
888 yield self.t(noname, **args)
889 return
889 return
890 if name not in self.t:
890 if name not in self.t:
891 if isinstance(values[0], str):
891 if isinstance(values[0], str):
892 yield ' '.join(values)
892 yield ' '.join(values)
893 else:
893 else:
894 for v in values:
894 for v in values:
895 yield dict(v, **args)
895 yield dict(v, **args)
896 return
896 return
897 startname = 'start_' + names
897 startname = 'start_' + names
898 if startname in self.t:
898 if startname in self.t:
899 yield self.t(startname, **args)
899 yield self.t(startname, **args)
900 vargs = args.copy()
900 vargs = args.copy()
901 def one(v, tag=name):
901 def one(v, tag=name):
902 try:
902 try:
903 vargs.update(v)
903 vargs.update(v)
904 except (AttributeError, ValueError):
904 except (AttributeError, ValueError):
905 try:
905 try:
906 for a, b in v:
906 for a, b in v:
907 vargs[a] = b
907 vargs[a] = b
908 except ValueError:
908 except ValueError:
909 vargs[name] = v
909 vargs[name] = v
910 return self.t(tag, **vargs)
910 return self.t(tag, **vargs)
911 lastname = 'last_' + name
911 lastname = 'last_' + name
912 if lastname in self.t:
912 if lastname in self.t:
913 last = values.pop()
913 last = values.pop()
914 else:
914 else:
915 last = None
915 last = None
916 for v in values:
916 for v in values:
917 yield one(v)
917 yield one(v)
918 if last is not None:
918 if last is not None:
919 yield one(last, tag=lastname)
919 yield one(last, tag=lastname)
920 endname = 'end_' + names
920 endname = 'end_' + names
921 if endname in self.t:
921 if endname in self.t:
922 yield self.t(endname, **args)
922 yield self.t(endname, **args)
923
923
924 def showbranches(**args):
924 def showbranches(**args):
925 branch = changes[5].get("branch")
925 branch = changes[5].get("branch")
926 if branch != 'default':
926 if branch != 'default':
927 branch = util.tolocal(branch)
927 branch = util.tolocal(branch)
928 return showlist('branch', [branch], plural='branches', **args)
928 return showlist('branch', [branch], plural='branches', **args)
929
929
930 def showparents(**args):
930 def showparents(**args):
931 parents = [[('rev', p), ('node', hex(log.node(p)))]
931 parents = [[('rev', p), ('node', hex(log.node(p)))]
932 for p in self._meaningful_parentrevs(log, rev)]
932 for p in self._meaningful_parentrevs(log, rev)]
933 return showlist('parent', parents, **args)
933 return showlist('parent', parents, **args)
934
934
935 def showtags(**args):
935 def showtags(**args):
936 return showlist('tag', self.repo.nodetags(changenode), **args)
936 return showlist('tag', self.repo.nodetags(changenode), **args)
937
937
938 def showextras(**args):
938 def showextras(**args):
939 extras = changes[5].items()
939 extras = changes[5].items()
940 extras.sort()
940 extras.sort()
941 for key, value in extras:
941 for key, value in extras:
942 args = args.copy()
942 args = args.copy()
943 args.update(dict(key=key, value=value))
943 args.update(dict(key=key, value=value))
944 yield self.t('extra', **args)
944 yield self.t('extra', **args)
945
945
946 def showcopies(**args):
946 def showcopies(**args):
947 c = [{'name': x[0], 'source': x[1]} for x in copies]
947 c = [{'name': x[0], 'source': x[1]} for x in copies]
948 return showlist('file_copy', c, plural='file_copies', **args)
948 return showlist('file_copy', c, plural='file_copies', **args)
949
949
950 if self.ui.debugflag:
950 if self.ui.debugflag:
951 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
951 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
952 def showfiles(**args):
952 def showfiles(**args):
953 return showlist('file', files[0], **args)
953 return showlist('file', files[0], **args)
954 def showadds(**args):
954 def showadds(**args):
955 return showlist('file_add', files[1], **args)
955 return showlist('file_add', files[1], **args)
956 def showdels(**args):
956 def showdels(**args):
957 return showlist('file_del', files[2], **args)
957 return showlist('file_del', files[2], **args)
958 def showmanifest(**args):
958 def showmanifest(**args):
959 args = args.copy()
959 args = args.copy()
960 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
960 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
961 node=hex(changes[0])))
961 node=hex(changes[0])))
962 return self.t('manifest', **args)
962 return self.t('manifest', **args)
963 else:
963 else:
964 def showfiles(**args):
964 def showfiles(**args):
965 return showlist('file', changes[3], **args)
965 return showlist('file', changes[3], **args)
966 showadds = ''
966 showadds = ''
967 showdels = ''
967 showdels = ''
968 showmanifest = ''
968 showmanifest = ''
969
969
970 defprops = {
970 defprops = {
971 'author': changes[1],
971 'author': changes[1],
972 'branches': showbranches,
972 'branches': showbranches,
973 'date': changes[2],
973 'date': changes[2],
974 'desc': changes[4].strip(),
974 'desc': changes[4].strip(),
975 'file_adds': showadds,
975 'file_adds': showadds,
976 'file_dels': showdels,
976 'file_dels': showdels,
977 'files': showfiles,
977 'files': showfiles,
978 'file_copies': showcopies,
978 'file_copies': showcopies,
979 'manifest': showmanifest,
979 'manifest': showmanifest,
980 'node': hex(changenode),
980 'node': hex(changenode),
981 'parents': showparents,
981 'parents': showparents,
982 'rev': rev,
982 'rev': rev,
983 'tags': showtags,
983 'tags': showtags,
984 'extras': showextras,
984 'extras': showextras,
985 }
985 }
986 props = props.copy()
986 props = props.copy()
987 props.update(defprops)
987 props.update(defprops)
988
988
989 try:
989 try:
990 if self.ui.debugflag and 'header_debug' in self.t:
990 if self.ui.debugflag and 'header_debug' in self.t:
991 key = 'header_debug'
991 key = 'header_debug'
992 elif self.ui.quiet and 'header_quiet' in self.t:
992 elif self.ui.quiet and 'header_quiet' in self.t:
993 key = 'header_quiet'
993 key = 'header_quiet'
994 elif self.ui.verbose and 'header_verbose' in self.t:
994 elif self.ui.verbose and 'header_verbose' in self.t:
995 key = 'header_verbose'
995 key = 'header_verbose'
996 elif 'header' in self.t:
996 elif 'header' in self.t:
997 key = 'header'
997 key = 'header'
998 else:
998 else:
999 key = ''
999 key = ''
1000 if key:
1000 if key:
1001 h = templater.stringify(self.t(key, **props))
1001 h = templater.stringify(self.t(key, **props))
1002 if self.buffered:
1002 if self.buffered:
1003 self.header[rev] = h
1003 self.header[rev] = h
1004 else:
1004 else:
1005 self.ui.write(h)
1005 self.ui.write(h)
1006 if self.ui.debugflag and 'changeset_debug' in self.t:
1006 if self.ui.debugflag and 'changeset_debug' in self.t:
1007 key = 'changeset_debug'
1007 key = 'changeset_debug'
1008 elif self.ui.quiet and 'changeset_quiet' in self.t:
1008 elif self.ui.quiet and 'changeset_quiet' in self.t:
1009 key = 'changeset_quiet'
1009 key = 'changeset_quiet'
1010 elif self.ui.verbose and 'changeset_verbose' in self.t:
1010 elif self.ui.verbose and 'changeset_verbose' in self.t:
1011 key = 'changeset_verbose'
1011 key = 'changeset_verbose'
1012 else:
1012 else:
1013 key = 'changeset'
1013 key = 'changeset'
1014 self.ui.write(templater.stringify(self.t(key, **props)))
1014 self.ui.write(templater.stringify(self.t(key, **props)))
1015 self.showpatch(changenode)
1015 self.showpatch(changenode)
1016 except KeyError, inst:
1016 except KeyError, inst:
1017 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
1017 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
1018 inst.args[0]))
1018 inst.args[0]))
1019 except SyntaxError, inst:
1019 except SyntaxError, inst:
1020 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
1020 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
1021
1021
1022 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
1022 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
1023 """show one changeset using template or regular display.
1023 """show one changeset using template or regular display.
1024
1024
1025 Display format will be the first non-empty hit of:
1025 Display format will be the first non-empty hit of:
1026 1. option 'template'
1026 1. option 'template'
1027 2. option 'style'
1027 2. option 'style'
1028 3. [ui] setting 'logtemplate'
1028 3. [ui] setting 'logtemplate'
1029 4. [ui] setting 'style'
1029 4. [ui] setting 'style'
1030 If all of these values are either the unset or the empty string,
1030 If all of these values are either the unset or the empty string,
1031 regular display via changeset_printer() is done.
1031 regular display via changeset_printer() is done.
1032 """
1032 """
1033 # options
1033 # options
1034 patch = False
1034 patch = False
1035 if opts.get('patch'):
1035 if opts.get('patch'):
1036 patch = matchfn or util.always
1036 patch = matchfn or util.always
1037
1037
1038 tmpl = opts.get('template')
1038 tmpl = opts.get('template')
1039 mapfile = None
1039 mapfile = None
1040 if tmpl:
1040 if tmpl:
1041 tmpl = templater.parsestring(tmpl, quoted=False)
1041 tmpl = templater.parsestring(tmpl, quoted=False)
1042 else:
1042 else:
1043 mapfile = opts.get('style')
1043 mapfile = opts.get('style')
1044 # ui settings
1044 # ui settings
1045 if not mapfile:
1045 if not mapfile:
1046 tmpl = ui.config('ui', 'logtemplate')
1046 tmpl = ui.config('ui', 'logtemplate')
1047 if tmpl:
1047 if tmpl:
1048 tmpl = templater.parsestring(tmpl)
1048 tmpl = templater.parsestring(tmpl)
1049 else:
1049 else:
1050 mapfile = ui.config('ui', 'style')
1050 mapfile = ui.config('ui', 'style')
1051
1051
1052 if tmpl or mapfile:
1052 if tmpl or mapfile:
1053 if mapfile:
1053 if mapfile:
1054 if not os.path.split(mapfile)[0]:
1054 if not os.path.split(mapfile)[0]:
1055 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1055 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1056 or templater.templatepath(mapfile))
1056 or templater.templatepath(mapfile))
1057 if mapname: mapfile = mapname
1057 if mapname: mapfile = mapname
1058 try:
1058 try:
1059 t = changeset_templater(ui, repo, patch, mapfile, buffered)
1059 t = changeset_templater(ui, repo, patch, mapfile, buffered)
1060 except SyntaxError, inst:
1060 except SyntaxError, inst:
1061 raise util.Abort(inst.args[0])
1061 raise util.Abort(inst.args[0])
1062 if tmpl: t.use_template(tmpl)
1062 if tmpl: t.use_template(tmpl)
1063 return t
1063 return t
1064 return changeset_printer(ui, repo, patch, buffered)
1064 return changeset_printer(ui, repo, patch, buffered)
1065
1065
1066 def finddate(ui, repo, date):
1066 def finddate(ui, repo, date):
1067 """Find the tipmost changeset that matches the given date spec"""
1067 """Find the tipmost changeset that matches the given date spec"""
1068 df = util.matchdate(date + " to " + date)
1068 df = util.matchdate(date + " to " + date)
1069 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1069 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1070 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
1070 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
1071 results = {}
1071 results = {}
1072 for st, rev, fns in changeiter:
1072 for st, rev, fns in changeiter:
1073 if st == 'add':
1073 if st == 'add':
1074 d = get(rev)[2]
1074 d = get(rev)[2]
1075 if df(d[0]):
1075 if df(d[0]):
1076 results[rev] = d
1076 results[rev] = d
1077 elif st == 'iter':
1077 elif st == 'iter':
1078 if rev in results:
1078 if rev in results:
1079 ui.status("Found revision %s from %s\n" %
1079 ui.status("Found revision %s from %s\n" %
1080 (rev, util.datestr(results[rev])))
1080 (rev, util.datestr(results[rev])))
1081 return str(rev)
1081 return str(rev)
1082
1082
1083 raise util.Abort(_("revision matching date not found"))
1083 raise util.Abort(_("revision matching date not found"))
1084
1084
1085 def walkchangerevs(ui, repo, pats, change, opts):
1085 def walkchangerevs(ui, repo, pats, change, opts):
1086 '''Iterate over files and the revs they changed in.
1086 '''Iterate over files and the revs they changed in.
1087
1087
1088 Callers most commonly need to iterate backwards over the history
1088 Callers most commonly need to iterate backwards over the history
1089 it is interested in. Doing so has awful (quadratic-looking)
1089 it is interested in. Doing so has awful (quadratic-looking)
1090 performance, so we use iterators in a "windowed" way.
1090 performance, so we use iterators in a "windowed" way.
1091
1091
1092 We walk a window of revisions in the desired order. Within the
1092 We walk a window of revisions in the desired order. Within the
1093 window, we first walk forwards to gather data, then in the desired
1093 window, we first walk forwards to gather data, then in the desired
1094 order (usually backwards) to display it.
1094 order (usually backwards) to display it.
1095
1095
1096 This function returns an (iterator, matchfn) tuple. The iterator
1096 This function returns an (iterator, matchfn) tuple. The iterator
1097 yields 3-tuples. They will be of one of the following forms:
1097 yields 3-tuples. They will be of one of the following forms:
1098
1098
1099 "window", incrementing, lastrev: stepping through a window,
1099 "window", incrementing, lastrev: stepping through a window,
1100 positive if walking forwards through revs, last rev in the
1100 positive if walking forwards through revs, last rev in the
1101 sequence iterated over - use to reset state for the current window
1101 sequence iterated over - use to reset state for the current window
1102
1102
1103 "add", rev, fns: out-of-order traversal of the given file names
1103 "add", rev, fns: out-of-order traversal of the given file names
1104 fns, which changed during revision rev - use to gather data for
1104 fns, which changed during revision rev - use to gather data for
1105 possible display
1105 possible display
1106
1106
1107 "iter", rev, None: in-order traversal of the revs earlier iterated
1107 "iter", rev, None: in-order traversal of the revs earlier iterated
1108 over with "add" - use to display data'''
1108 over with "add" - use to display data'''
1109
1109
1110 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1110 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1111 if start < end:
1111 if start < end:
1112 while start < end:
1112 while start < end:
1113 yield start, min(windowsize, end-start)
1113 yield start, min(windowsize, end-start)
1114 start += windowsize
1114 start += windowsize
1115 if windowsize < sizelimit:
1115 if windowsize < sizelimit:
1116 windowsize *= 2
1116 windowsize *= 2
1117 else:
1117 else:
1118 while start > end:
1118 while start > end:
1119 yield start, min(windowsize, start-end-1)
1119 yield start, min(windowsize, start-end-1)
1120 start -= windowsize
1120 start -= windowsize
1121 if windowsize < sizelimit:
1121 if windowsize < sizelimit:
1122 windowsize *= 2
1122 windowsize *= 2
1123
1123
1124 files, matchfn, anypats = matchpats(repo, pats, opts)
1124 files, matchfn, anypats = matchpats(repo, pats, opts)
1125 follow = opts.get('follow') or opts.get('follow_first')
1125 follow = opts.get('follow') or opts.get('follow_first')
1126
1126
1127 if repo.changelog.count() == 0:
1127 if repo.changelog.count() == 0:
1128 return [], matchfn
1128 return [], matchfn
1129
1129
1130 if follow:
1130 if follow:
1131 defrange = '%s:0' % repo.changectx().rev()
1131 defrange = '%s:0' % repo.changectx().rev()
1132 else:
1132 else:
1133 defrange = 'tip:0'
1133 defrange = 'tip:0'
1134 revs = revrange(repo, opts['rev'] or [defrange])
1134 revs = revrange(repo, opts['rev'] or [defrange])
1135 wanted = {}
1135 wanted = {}
1136 slowpath = anypats or opts.get('removed')
1136 slowpath = anypats or opts.get('removed')
1137 fncache = {}
1137 fncache = {}
1138
1138
1139 if not slowpath and not files:
1139 if not slowpath and not files:
1140 # No files, no patterns. Display all revs.
1140 # No files, no patterns. Display all revs.
1141 wanted = dict.fromkeys(revs)
1141 wanted = dict.fromkeys(revs)
1142 copies = []
1142 copies = []
1143 if not slowpath:
1143 if not slowpath:
1144 # Only files, no patterns. Check the history of each file.
1144 # Only files, no patterns. Check the history of each file.
1145 def filerevgen(filelog, node):
1145 def filerevgen(filelog, node):
1146 cl_count = repo.changelog.count()
1146 cl_count = repo.changelog.count()
1147 if node is None:
1147 if node is None:
1148 last = filelog.count() - 1
1148 last = filelog.count() - 1
1149 else:
1149 else:
1150 last = filelog.rev(node)
1150 last = filelog.rev(node)
1151 for i, window in increasing_windows(last, nullrev):
1151 for i, window in increasing_windows(last, nullrev):
1152 revs = []
1152 revs = []
1153 for j in xrange(i - window, i + 1):
1153 for j in xrange(i - window, i + 1):
1154 n = filelog.node(j)
1154 n = filelog.node(j)
1155 revs.append((filelog.linkrev(n),
1155 revs.append((filelog.linkrev(n),
1156 follow and filelog.renamed(n)))
1156 follow and filelog.renamed(n)))
1157 revs.reverse()
1157 revs.reverse()
1158 for rev in revs:
1158 for rev in revs:
1159 # only yield rev for which we have the changelog, it can
1159 # only yield rev for which we have the changelog, it can
1160 # happen while doing "hg log" during a pull or commit
1160 # happen while doing "hg log" during a pull or commit
1161 if rev[0] < cl_count:
1161 if rev[0] < cl_count:
1162 yield rev
1162 yield rev
1163 def iterfiles():
1163 def iterfiles():
1164 for filename in files:
1164 for filename in files:
1165 yield filename, None
1165 yield filename, None
1166 for filename_node in copies:
1166 for filename_node in copies:
1167 yield filename_node
1167 yield filename_node
1168 minrev, maxrev = min(revs), max(revs)
1168 minrev, maxrev = min(revs), max(revs)
1169 for file_, node in iterfiles():
1169 for file_, node in iterfiles():
1170 filelog = repo.file(file_)
1170 filelog = repo.file(file_)
1171 # A zero count may be a directory or deleted file, so
1171 # A zero count may be a directory or deleted file, so
1172 # try to find matching entries on the slow path.
1172 # try to find matching entries on the slow path.
1173 if filelog.count() == 0:
1173 if filelog.count() == 0:
1174 slowpath = True
1174 slowpath = True
1175 break
1175 break
1176 for rev, copied in filerevgen(filelog, node):
1176 for rev, copied in filerevgen(filelog, node):
1177 if rev <= maxrev:
1177 if rev <= maxrev:
1178 if rev < minrev:
1178 if rev < minrev:
1179 break
1179 break
1180 fncache.setdefault(rev, [])
1180 fncache.setdefault(rev, [])
1181 fncache[rev].append(file_)
1181 fncache[rev].append(file_)
1182 wanted[rev] = 1
1182 wanted[rev] = 1
1183 if follow and copied:
1183 if follow and copied:
1184 copies.append(copied)
1184 copies.append(copied)
1185 if slowpath:
1185 if slowpath:
1186 if follow:
1186 if follow:
1187 raise util.Abort(_('can only follow copies/renames for explicit '
1187 raise util.Abort(_('can only follow copies/renames for explicit '
1188 'file names'))
1188 'file names'))
1189
1189
1190 # The slow path checks files modified in every changeset.
1190 # The slow path checks files modified in every changeset.
1191 def changerevgen():
1191 def changerevgen():
1192 for i, window in increasing_windows(repo.changelog.count()-1,
1192 for i, window in increasing_windows(repo.changelog.count()-1,
1193 nullrev):
1193 nullrev):
1194 for j in xrange(i - window, i + 1):
1194 for j in xrange(i - window, i + 1):
1195 yield j, change(j)[3]
1195 yield j, change(j)[3]
1196
1196
1197 for rev, changefiles in changerevgen():
1197 for rev, changefiles in changerevgen():
1198 matches = filter(matchfn, changefiles)
1198 matches = filter(matchfn, changefiles)
1199 if matches:
1199 if matches:
1200 fncache[rev] = matches
1200 fncache[rev] = matches
1201 wanted[rev] = 1
1201 wanted[rev] = 1
1202
1202
1203 class followfilter:
1203 class followfilter:
1204 def __init__(self, onlyfirst=False):
1204 def __init__(self, onlyfirst=False):
1205 self.startrev = nullrev
1205 self.startrev = nullrev
1206 self.roots = []
1206 self.roots = []
1207 self.onlyfirst = onlyfirst
1207 self.onlyfirst = onlyfirst
1208
1208
1209 def match(self, rev):
1209 def match(self, rev):
1210 def realparents(rev):
1210 def realparents(rev):
1211 if self.onlyfirst:
1211 if self.onlyfirst:
1212 return repo.changelog.parentrevs(rev)[0:1]
1212 return repo.changelog.parentrevs(rev)[0:1]
1213 else:
1213 else:
1214 return filter(lambda x: x != nullrev,
1214 return filter(lambda x: x != nullrev,
1215 repo.changelog.parentrevs(rev))
1215 repo.changelog.parentrevs(rev))
1216
1216
1217 if self.startrev == nullrev:
1217 if self.startrev == nullrev:
1218 self.startrev = rev
1218 self.startrev = rev
1219 return True
1219 return True
1220
1220
1221 if rev > self.startrev:
1221 if rev > self.startrev:
1222 # forward: all descendants
1222 # forward: all descendants
1223 if not self.roots:
1223 if not self.roots:
1224 self.roots.append(self.startrev)
1224 self.roots.append(self.startrev)
1225 for parent in realparents(rev):
1225 for parent in realparents(rev):
1226 if parent in self.roots:
1226 if parent in self.roots:
1227 self.roots.append(rev)
1227 self.roots.append(rev)
1228 return True
1228 return True
1229 else:
1229 else:
1230 # backwards: all parents
1230 # backwards: all parents
1231 if not self.roots:
1231 if not self.roots:
1232 self.roots.extend(realparents(self.startrev))
1232 self.roots.extend(realparents(self.startrev))
1233 if rev in self.roots:
1233 if rev in self.roots:
1234 self.roots.remove(rev)
1234 self.roots.remove(rev)
1235 self.roots.extend(realparents(rev))
1235 self.roots.extend(realparents(rev))
1236 return True
1236 return True
1237
1237
1238 return False
1238 return False
1239
1239
1240 # it might be worthwhile to do this in the iterator if the rev range
1240 # it might be worthwhile to do this in the iterator if the rev range
1241 # is descending and the prune args are all within that range
1241 # is descending and the prune args are all within that range
1242 for rev in opts.get('prune', ()):
1242 for rev in opts.get('prune', ()):
1243 rev = repo.changelog.rev(repo.lookup(rev))
1243 rev = repo.changelog.rev(repo.lookup(rev))
1244 ff = followfilter()
1244 ff = followfilter()
1245 stop = min(revs[0], revs[-1])
1245 stop = min(revs[0], revs[-1])
1246 for x in xrange(rev, stop-1, -1):
1246 for x in xrange(rev, stop-1, -1):
1247 if ff.match(x) and x in wanted:
1247 if ff.match(x) and x in wanted:
1248 del wanted[x]
1248 del wanted[x]
1249
1249
1250 def iterate():
1250 def iterate():
1251 if follow and not files:
1251 if follow and not files:
1252 ff = followfilter(onlyfirst=opts.get('follow_first'))
1252 ff = followfilter(onlyfirst=opts.get('follow_first'))
1253 def want(rev):
1253 def want(rev):
1254 if ff.match(rev) and rev in wanted:
1254 if ff.match(rev) and rev in wanted:
1255 return True
1255 return True
1256 return False
1256 return False
1257 else:
1257 else:
1258 def want(rev):
1258 def want(rev):
1259 return rev in wanted
1259 return rev in wanted
1260
1260
1261 for i, window in increasing_windows(0, len(revs)):
1261 for i, window in increasing_windows(0, len(revs)):
1262 yield 'window', revs[0] < revs[-1], revs[-1]
1262 yield 'window', revs[0] < revs[-1], revs[-1]
1263 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1263 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1264 srevs = list(nrevs)
1264 srevs = list(nrevs)
1265 srevs.sort()
1265 srevs.sort()
1266 for rev in srevs:
1266 for rev in srevs:
1267 fns = fncache.get(rev)
1267 fns = fncache.get(rev)
1268 if not fns:
1268 if not fns:
1269 def fns_generator():
1269 def fns_generator():
1270 for f in change(rev)[3]:
1270 for f in change(rev)[3]:
1271 if matchfn(f):
1271 if matchfn(f):
1272 yield f
1272 yield f
1273 fns = fns_generator()
1273 fns = fns_generator()
1274 yield 'add', rev, fns
1274 yield 'add', rev, fns
1275 for rev in nrevs:
1275 for rev in nrevs:
1276 yield 'iter', rev, None
1276 yield 'iter', rev, None
1277 return iterate(), matchfn
1277 return iterate(), matchfn
@@ -1,3179 +1,3174 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import demandimport; demandimport.enable()
8 import demandimport; demandimport.enable()
9 from node import *
9 from node import *
10 from i18n import _
10 from i18n import _
11 import bisect, os, re, sys, urllib, shlex, stat
11 import bisect, os, re, sys, urllib, stat
12 import ui, hg, util, revlog, bundlerepo, extensions
12 import ui, hg, util, revlog, bundlerepo, extensions
13 import difflib, patch, time, help, mdiff, tempfile
13 import difflib, patch, time, help, mdiff, tempfile
14 import errno, version, socket
14 import errno, version, socket
15 import archival, changegroup, cmdutil, hgweb.server, sshserver
15 import archival, changegroup, cmdutil, hgweb.server, sshserver
16
16
17 # Commands start here, listed alphabetically
17 # Commands start here, listed alphabetically
18
18
19 def add(ui, repo, *pats, **opts):
19 def add(ui, repo, *pats, **opts):
20 """add the specified files on the next commit
20 """add the specified files on the next commit
21
21
22 Schedule files to be version controlled and added to the repository.
22 Schedule files to be version controlled and added to the repository.
23
23
24 The files will be added to the repository at the next commit. To
24 The files will be added to the repository at the next commit. To
25 undo an add before that, see hg revert.
25 undo an add before that, see hg revert.
26
26
27 If no names are given, add all files in the repository.
27 If no names are given, add all files in the repository.
28 """
28 """
29
29
30 names = []
30 names = []
31 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
31 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
32 if exact:
32 if exact:
33 if ui.verbose:
33 if ui.verbose:
34 ui.status(_('adding %s\n') % rel)
34 ui.status(_('adding %s\n') % rel)
35 names.append(abs)
35 names.append(abs)
36 elif abs not in repo.dirstate:
36 elif abs not in repo.dirstate:
37 ui.status(_('adding %s\n') % rel)
37 ui.status(_('adding %s\n') % rel)
38 names.append(abs)
38 names.append(abs)
39 if not opts.get('dry_run'):
39 if not opts.get('dry_run'):
40 repo.add(names)
40 repo.add(names)
41
41
42 def addremove(ui, repo, *pats, **opts):
42 def addremove(ui, repo, *pats, **opts):
43 """add all new files, delete all missing files
43 """add all new files, delete all missing files
44
44
45 Add all new files and remove all missing files from the repository.
45 Add all new files and remove all missing files from the repository.
46
46
47 New files are ignored if they match any of the patterns in .hgignore. As
47 New files are ignored if they match any of the patterns in .hgignore. As
48 with add, these changes take effect at the next commit.
48 with add, these changes take effect at the next commit.
49
49
50 Use the -s option to detect renamed files. With a parameter > 0,
50 Use the -s option to detect renamed files. With a parameter > 0,
51 this compares every removed file with every added file and records
51 this compares every removed file with every added file and records
52 those similar enough as renames. This option takes a percentage
52 those similar enough as renames. This option takes a percentage
53 between 0 (disabled) and 100 (files must be identical) as its
53 between 0 (disabled) and 100 (files must be identical) as its
54 parameter. Detecting renamed files this way can be expensive.
54 parameter. Detecting renamed files this way can be expensive.
55 """
55 """
56 sim = float(opts.get('similarity') or 0)
56 sim = float(opts.get('similarity') or 0)
57 if sim < 0 or sim > 100:
57 if sim < 0 or sim > 100:
58 raise util.Abort(_('similarity must be between 0 and 100'))
58 raise util.Abort(_('similarity must be between 0 and 100'))
59 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
59 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
60
60
61 def annotate(ui, repo, *pats, **opts):
61 def annotate(ui, repo, *pats, **opts):
62 """show changeset information per file line
62 """show changeset information per file line
63
63
64 List changes in files, showing the revision id responsible for each line
64 List changes in files, showing the revision id responsible for each line
65
65
66 This command is useful to discover who did a change or when a change took
66 This command is useful to discover who did a change or when a change took
67 place.
67 place.
68
68
69 Without the -a option, annotate will avoid processing files it
69 Without the -a option, annotate will avoid processing files it
70 detects as binary. With -a, annotate will generate an annotation
70 detects as binary. With -a, annotate will generate an annotation
71 anyway, probably with undesirable results.
71 anyway, probably with undesirable results.
72 """
72 """
73 getdate = util.cachefunc(lambda x: util.datestr(x[0].date()))
73 getdate = util.cachefunc(lambda x: util.datestr(x[0].date()))
74
74
75 if not pats:
75 if not pats:
76 raise util.Abort(_('at least one file name or pattern required'))
76 raise util.Abort(_('at least one file name or pattern required'))
77
77
78 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
78 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
79 ('number', lambda x: str(x[0].rev())),
79 ('number', lambda x: str(x[0].rev())),
80 ('changeset', lambda x: short(x[0].node())),
80 ('changeset', lambda x: short(x[0].node())),
81 ('date', getdate),
81 ('date', getdate),
82 ('follow', lambda x: x[0].path()),
82 ('follow', lambda x: x[0].path()),
83 ]
83 ]
84
84
85 if (not opts['user'] and not opts['changeset'] and not opts['date']
85 if (not opts['user'] and not opts['changeset'] and not opts['date']
86 and not opts['follow']):
86 and not opts['follow']):
87 opts['number'] = 1
87 opts['number'] = 1
88
88
89 linenumber = opts.get('line_number') is not None
89 linenumber = opts.get('line_number') is not None
90 if (linenumber and (not opts['changeset']) and (not opts['number'])):
90 if (linenumber and (not opts['changeset']) and (not opts['number'])):
91 raise util.Abort(_('at least one of -n/-c is required for -l'))
91 raise util.Abort(_('at least one of -n/-c is required for -l'))
92
92
93 funcmap = [func for op, func in opmap if opts.get(op)]
93 funcmap = [func for op, func in opmap if opts.get(op)]
94 if linenumber:
94 if linenumber:
95 lastfunc = funcmap[-1]
95 lastfunc = funcmap[-1]
96 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
96 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
97
97
98 ctx = repo.changectx(opts['rev'])
98 ctx = repo.changectx(opts['rev'])
99
99
100 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
100 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
101 node=ctx.node()):
101 node=ctx.node()):
102 fctx = ctx.filectx(abs)
102 fctx = ctx.filectx(abs)
103 if not opts['text'] and util.binary(fctx.data()):
103 if not opts['text'] and util.binary(fctx.data()):
104 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
104 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
105 continue
105 continue
106
106
107 lines = fctx.annotate(follow=opts.get('follow'),
107 lines = fctx.annotate(follow=opts.get('follow'),
108 linenumber=linenumber)
108 linenumber=linenumber)
109 pieces = []
109 pieces = []
110
110
111 for f in funcmap:
111 for f in funcmap:
112 l = [f(n) for n, dummy in lines]
112 l = [f(n) for n, dummy in lines]
113 if l:
113 if l:
114 m = max(map(len, l))
114 m = max(map(len, l))
115 pieces.append(["%*s" % (m, x) for x in l])
115 pieces.append(["%*s" % (m, x) for x in l])
116
116
117 if pieces:
117 if pieces:
118 for p, l in zip(zip(*pieces), lines):
118 for p, l in zip(zip(*pieces), lines):
119 ui.write("%s: %s" % (" ".join(p), l[1]))
119 ui.write("%s: %s" % (" ".join(p), l[1]))
120
120
121 def archive(ui, repo, dest, **opts):
121 def archive(ui, repo, dest, **opts):
122 '''create unversioned archive of a repository revision
122 '''create unversioned archive of a repository revision
123
123
124 By default, the revision used is the parent of the working
124 By default, the revision used is the parent of the working
125 directory; use "-r" to specify a different revision.
125 directory; use "-r" to specify a different revision.
126
126
127 To specify the type of archive to create, use "-t". Valid
127 To specify the type of archive to create, use "-t". Valid
128 types are:
128 types are:
129
129
130 "files" (default): a directory full of files
130 "files" (default): a directory full of files
131 "tar": tar archive, uncompressed
131 "tar": tar archive, uncompressed
132 "tbz2": tar archive, compressed using bzip2
132 "tbz2": tar archive, compressed using bzip2
133 "tgz": tar archive, compressed using gzip
133 "tgz": tar archive, compressed using gzip
134 "uzip": zip archive, uncompressed
134 "uzip": zip archive, uncompressed
135 "zip": zip archive, compressed using deflate
135 "zip": zip archive, compressed using deflate
136
136
137 The exact name of the destination archive or directory is given
137 The exact name of the destination archive or directory is given
138 using a format string; see "hg help export" for details.
138 using a format string; see "hg help export" for details.
139
139
140 Each member added to an archive file has a directory prefix
140 Each member added to an archive file has a directory prefix
141 prepended. Use "-p" to specify a format string for the prefix.
141 prepended. Use "-p" to specify a format string for the prefix.
142 The default is the basename of the archive, with suffixes removed.
142 The default is the basename of the archive, with suffixes removed.
143 '''
143 '''
144
144
145 ctx = repo.changectx(opts['rev'])
145 ctx = repo.changectx(opts['rev'])
146 if not ctx:
146 if not ctx:
147 raise util.Abort(_('repository has no revisions'))
147 raise util.Abort(_('repository has no revisions'))
148 node = ctx.node()
148 node = ctx.node()
149 dest = cmdutil.make_filename(repo, dest, node)
149 dest = cmdutil.make_filename(repo, dest, node)
150 if os.path.realpath(dest) == repo.root:
150 if os.path.realpath(dest) == repo.root:
151 raise util.Abort(_('repository root cannot be destination'))
151 raise util.Abort(_('repository root cannot be destination'))
152 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
152 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
153 kind = opts.get('type') or 'files'
153 kind = opts.get('type') or 'files'
154 prefix = opts['prefix']
154 prefix = opts['prefix']
155 if dest == '-':
155 if dest == '-':
156 if kind == 'files':
156 if kind == 'files':
157 raise util.Abort(_('cannot archive plain files to stdout'))
157 raise util.Abort(_('cannot archive plain files to stdout'))
158 dest = sys.stdout
158 dest = sys.stdout
159 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
159 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
160 prefix = cmdutil.make_filename(repo, prefix, node)
160 prefix = cmdutil.make_filename(repo, prefix, node)
161 archival.archive(repo, dest, node, kind, not opts['no_decode'],
161 archival.archive(repo, dest, node, kind, not opts['no_decode'],
162 matchfn, prefix)
162 matchfn, prefix)
163
163
164 def backout(ui, repo, node=None, rev=None, **opts):
164 def backout(ui, repo, node=None, rev=None, **opts):
165 '''reverse effect of earlier changeset
165 '''reverse effect of earlier changeset
166
166
167 Commit the backed out changes as a new changeset. The new
167 Commit the backed out changes as a new changeset. The new
168 changeset is a child of the backed out changeset.
168 changeset is a child of the backed out changeset.
169
169
170 If you back out a changeset other than the tip, a new head is
170 If you back out a changeset other than the tip, a new head is
171 created. This head is the parent of the working directory. If
171 created. This head is the parent of the working directory. If
172 you back out an old changeset, your working directory will appear
172 you back out an old changeset, your working directory will appear
173 old after the backout. You should merge the backout changeset
173 old after the backout. You should merge the backout changeset
174 with another head.
174 with another head.
175
175
176 The --merge option remembers the parent of the working directory
176 The --merge option remembers the parent of the working directory
177 before starting the backout, then merges the new head with that
177 before starting the backout, then merges the new head with that
178 changeset afterwards. This saves you from doing the merge by
178 changeset afterwards. This saves you from doing the merge by
179 hand. The result of this merge is not committed, as for a normal
179 hand. The result of this merge is not committed, as for a normal
180 merge.'''
180 merge.'''
181 if rev and node:
181 if rev and node:
182 raise util.Abort(_("please specify just one revision"))
182 raise util.Abort(_("please specify just one revision"))
183
183
184 if not rev:
184 if not rev:
185 rev = node
185 rev = node
186
186
187 if not rev:
187 if not rev:
188 raise util.Abort(_("please specify a revision to backout"))
188 raise util.Abort(_("please specify a revision to backout"))
189
189
190 cmdutil.bail_if_changed(repo)
190 cmdutil.bail_if_changed(repo)
191 op1, op2 = repo.dirstate.parents()
191 op1, op2 = repo.dirstate.parents()
192 if op2 != nullid:
192 if op2 != nullid:
193 raise util.Abort(_('outstanding uncommitted merge'))
193 raise util.Abort(_('outstanding uncommitted merge'))
194 node = repo.lookup(rev)
194 node = repo.lookup(rev)
195 p1, p2 = repo.changelog.parents(node)
195 p1, p2 = repo.changelog.parents(node)
196 if p1 == nullid:
196 if p1 == nullid:
197 raise util.Abort(_('cannot back out a change with no parents'))
197 raise util.Abort(_('cannot back out a change with no parents'))
198 if p2 != nullid:
198 if p2 != nullid:
199 if not opts['parent']:
199 if not opts['parent']:
200 raise util.Abort(_('cannot back out a merge changeset without '
200 raise util.Abort(_('cannot back out a merge changeset without '
201 '--parent'))
201 '--parent'))
202 p = repo.lookup(opts['parent'])
202 p = repo.lookup(opts['parent'])
203 if p not in (p1, p2):
203 if p not in (p1, p2):
204 raise util.Abort(_('%s is not a parent of %s') %
204 raise util.Abort(_('%s is not a parent of %s') %
205 (short(p), short(node)))
205 (short(p), short(node)))
206 parent = p
206 parent = p
207 else:
207 else:
208 if opts['parent']:
208 if opts['parent']:
209 raise util.Abort(_('cannot use --parent on non-merge changeset'))
209 raise util.Abort(_('cannot use --parent on non-merge changeset'))
210 parent = p1
210 parent = p1
211 hg.clean(repo, node, show_stats=False)
211 hg.clean(repo, node, show_stats=False)
212 revert_opts = opts.copy()
212 revert_opts = opts.copy()
213 revert_opts['date'] = None
213 revert_opts['date'] = None
214 revert_opts['all'] = True
214 revert_opts['all'] = True
215 revert_opts['rev'] = hex(parent)
215 revert_opts['rev'] = hex(parent)
216 revert(ui, repo, **revert_opts)
216 revert(ui, repo, **revert_opts)
217 commit_opts = opts.copy()
217 commit_opts = opts.copy()
218 commit_opts['addremove'] = False
218 commit_opts['addremove'] = False
219 if not commit_opts['message'] and not commit_opts['logfile']:
219 if not commit_opts['message'] and not commit_opts['logfile']:
220 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
220 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
221 commit_opts['force_editor'] = True
221 commit_opts['force_editor'] = True
222 commit(ui, repo, **commit_opts)
222 commit(ui, repo, **commit_opts)
223 def nice(node):
223 def nice(node):
224 return '%d:%s' % (repo.changelog.rev(node), short(node))
224 return '%d:%s' % (repo.changelog.rev(node), short(node))
225 ui.status(_('changeset %s backs out changeset %s\n') %
225 ui.status(_('changeset %s backs out changeset %s\n') %
226 (nice(repo.changelog.tip()), nice(node)))
226 (nice(repo.changelog.tip()), nice(node)))
227 if op1 != node:
227 if op1 != node:
228 if opts['merge']:
228 if opts['merge']:
229 ui.status(_('merging with changeset %s\n') % nice(op1))
229 ui.status(_('merging with changeset %s\n') % nice(op1))
230 hg.merge(repo, hex(op1))
230 hg.merge(repo, hex(op1))
231 else:
231 else:
232 ui.status(_('the backout changeset is a new head - '
232 ui.status(_('the backout changeset is a new head - '
233 'do not forget to merge\n'))
233 'do not forget to merge\n'))
234 ui.status(_('(use "backout --merge" '
234 ui.status(_('(use "backout --merge" '
235 'if you want to auto-merge)\n'))
235 'if you want to auto-merge)\n'))
236
236
237 def branch(ui, repo, label=None, **opts):
237 def branch(ui, repo, label=None, **opts):
238 """set or show the current branch name
238 """set or show the current branch name
239
239
240 With no argument, show the current branch name. With one argument,
240 With no argument, show the current branch name. With one argument,
241 set the working directory branch name (the branch does not exist in
241 set the working directory branch name (the branch does not exist in
242 the repository until the next commit).
242 the repository until the next commit).
243
243
244 Unless --force is specified, branch will not let you set a
244 Unless --force is specified, branch will not let you set a
245 branch name that shadows an existing branch.
245 branch name that shadows an existing branch.
246 """
246 """
247
247
248 if label:
248 if label:
249 if not opts.get('force') and label in repo.branchtags():
249 if not opts.get('force') and label in repo.branchtags():
250 if label not in [p.branch() for p in repo.workingctx().parents()]:
250 if label not in [p.branch() for p in repo.workingctx().parents()]:
251 raise util.Abort(_('a branch of the same name already exists'
251 raise util.Abort(_('a branch of the same name already exists'
252 ' (use --force to override)'))
252 ' (use --force to override)'))
253 repo.dirstate.setbranch(util.fromlocal(label))
253 repo.dirstate.setbranch(util.fromlocal(label))
254 ui.status(_('marked working directory as branch %s\n') % label)
254 ui.status(_('marked working directory as branch %s\n') % label)
255 else:
255 else:
256 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
256 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
257
257
258 def branches(ui, repo, active=False):
258 def branches(ui, repo, active=False):
259 """list repository named branches
259 """list repository named branches
260
260
261 List the repository's named branches, indicating which ones are
261 List the repository's named branches, indicating which ones are
262 inactive. If active is specified, only show active branches.
262 inactive. If active is specified, only show active branches.
263
263
264 A branch is considered active if it contains unmerged heads.
264 A branch is considered active if it contains unmerged heads.
265 """
265 """
266 b = repo.branchtags()
266 b = repo.branchtags()
267 heads = dict.fromkeys(repo.heads(), 1)
267 heads = dict.fromkeys(repo.heads(), 1)
268 l = [((n in heads), repo.changelog.rev(n), n, t) for t, n in b.items()]
268 l = [((n in heads), repo.changelog.rev(n), n, t) for t, n in b.items()]
269 l.sort()
269 l.sort()
270 l.reverse()
270 l.reverse()
271 for ishead, r, n, t in l:
271 for ishead, r, n, t in l:
272 if active and not ishead:
272 if active and not ishead:
273 # If we're only displaying active branches, abort the loop on
273 # If we're only displaying active branches, abort the loop on
274 # encountering the first inactive head
274 # encountering the first inactive head
275 break
275 break
276 else:
276 else:
277 hexfunc = ui.debugflag and hex or short
277 hexfunc = ui.debugflag and hex or short
278 if ui.quiet:
278 if ui.quiet:
279 ui.write("%s\n" % t)
279 ui.write("%s\n" % t)
280 else:
280 else:
281 spaces = " " * (30 - util.locallen(t))
281 spaces = " " * (30 - util.locallen(t))
282 # The code only gets here if inactive branches are being
282 # The code only gets here if inactive branches are being
283 # displayed or the branch is active.
283 # displayed or the branch is active.
284 isinactive = ((not ishead) and " (inactive)") or ''
284 isinactive = ((not ishead) and " (inactive)") or ''
285 ui.write("%s%s %s:%s%s\n" % (t, spaces, r, hexfunc(n), isinactive))
285 ui.write("%s%s %s:%s%s\n" % (t, spaces, r, hexfunc(n), isinactive))
286
286
287 def bundle(ui, repo, fname, dest=None, **opts):
287 def bundle(ui, repo, fname, dest=None, **opts):
288 """create a changegroup file
288 """create a changegroup file
289
289
290 Generate a compressed changegroup file collecting changesets not
290 Generate a compressed changegroup file collecting changesets not
291 found in the other repository.
291 found in the other repository.
292
292
293 If no destination repository is specified the destination is assumed
293 If no destination repository is specified the destination is assumed
294 to have all the nodes specified by one or more --base parameters.
294 to have all the nodes specified by one or more --base parameters.
295
295
296 The bundle file can then be transferred using conventional means and
296 The bundle file can then be transferred using conventional means and
297 applied to another repository with the unbundle or pull command.
297 applied to another repository with the unbundle or pull command.
298 This is useful when direct push and pull are not available or when
298 This is useful when direct push and pull are not available or when
299 exporting an entire repository is undesirable.
299 exporting an entire repository is undesirable.
300
300
301 Applying bundles preserves all changeset contents including
301 Applying bundles preserves all changeset contents including
302 permissions, copy/rename information, and revision history.
302 permissions, copy/rename information, and revision history.
303 """
303 """
304 revs = opts.get('rev') or None
304 revs = opts.get('rev') or None
305 if revs:
305 if revs:
306 revs = [repo.lookup(rev) for rev in revs]
306 revs = [repo.lookup(rev) for rev in revs]
307 base = opts.get('base')
307 base = opts.get('base')
308 if base:
308 if base:
309 if dest:
309 if dest:
310 raise util.Abort(_("--base is incompatible with specifiying "
310 raise util.Abort(_("--base is incompatible with specifiying "
311 "a destination"))
311 "a destination"))
312 base = [repo.lookup(rev) for rev in base]
312 base = [repo.lookup(rev) for rev in base]
313 # create the right base
313 # create the right base
314 # XXX: nodesbetween / changegroup* should be "fixed" instead
314 # XXX: nodesbetween / changegroup* should be "fixed" instead
315 o = []
315 o = []
316 has = {nullid: None}
316 has = {nullid: None}
317 for n in base:
317 for n in base:
318 has.update(repo.changelog.reachable(n))
318 has.update(repo.changelog.reachable(n))
319 if revs:
319 if revs:
320 visit = list(revs)
320 visit = list(revs)
321 else:
321 else:
322 visit = repo.changelog.heads()
322 visit = repo.changelog.heads()
323 seen = {}
323 seen = {}
324 while visit:
324 while visit:
325 n = visit.pop(0)
325 n = visit.pop(0)
326 parents = [p for p in repo.changelog.parents(n) if p not in has]
326 parents = [p for p in repo.changelog.parents(n) if p not in has]
327 if len(parents) == 0:
327 if len(parents) == 0:
328 o.insert(0, n)
328 o.insert(0, n)
329 else:
329 else:
330 for p in parents:
330 for p in parents:
331 if p not in seen:
331 if p not in seen:
332 seen[p] = 1
332 seen[p] = 1
333 visit.append(p)
333 visit.append(p)
334 else:
334 else:
335 cmdutil.setremoteconfig(ui, opts)
335 cmdutil.setremoteconfig(ui, opts)
336 dest, revs = cmdutil.parseurl(
336 dest, revs = cmdutil.parseurl(
337 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
337 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
338 other = hg.repository(ui, dest)
338 other = hg.repository(ui, dest)
339 o = repo.findoutgoing(other, force=opts['force'])
339 o = repo.findoutgoing(other, force=opts['force'])
340
340
341 if revs:
341 if revs:
342 cg = repo.changegroupsubset(o, revs, 'bundle')
342 cg = repo.changegroupsubset(o, revs, 'bundle')
343 else:
343 else:
344 cg = repo.changegroup(o, 'bundle')
344 cg = repo.changegroup(o, 'bundle')
345 changegroup.writebundle(cg, fname, "HG10BZ")
345 changegroup.writebundle(cg, fname, "HG10BZ")
346
346
347 def cat(ui, repo, file1, *pats, **opts):
347 def cat(ui, repo, file1, *pats, **opts):
348 """output the current or given revision of files
348 """output the current or given revision of files
349
349
350 Print the specified files as they were at the given revision.
350 Print the specified files as they were at the given revision.
351 If no revision is given, the parent of the working directory is used,
351 If no revision is given, the parent of the working directory is used,
352 or tip if no revision is checked out.
352 or tip if no revision is checked out.
353
353
354 Output may be to a file, in which case the name of the file is
354 Output may be to a file, in which case the name of the file is
355 given using a format string. The formatting rules are the same as
355 given using a format string. The formatting rules are the same as
356 for the export command, with the following additions:
356 for the export command, with the following additions:
357
357
358 %s basename of file being printed
358 %s basename of file being printed
359 %d dirname of file being printed, or '.' if in repo root
359 %d dirname of file being printed, or '.' if in repo root
360 %p root-relative path name of file being printed
360 %p root-relative path name of file being printed
361 """
361 """
362 ctx = repo.changectx(opts['rev'])
362 ctx = repo.changectx(opts['rev'])
363 err = 1
363 err = 1
364 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
364 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
365 ctx.node()):
365 ctx.node()):
366 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
366 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
367 fp.write(ctx.filectx(abs).data())
367 fp.write(ctx.filectx(abs).data())
368 err = 0
368 err = 0
369 return err
369 return err
370
370
371 def clone(ui, source, dest=None, **opts):
371 def clone(ui, source, dest=None, **opts):
372 """make a copy of an existing repository
372 """make a copy of an existing repository
373
373
374 Create a copy of an existing repository in a new directory.
374 Create a copy of an existing repository in a new directory.
375
375
376 If no destination directory name is specified, it defaults to the
376 If no destination directory name is specified, it defaults to the
377 basename of the source.
377 basename of the source.
378
378
379 The location of the source is added to the new repository's
379 The location of the source is added to the new repository's
380 .hg/hgrc file, as the default to be used for future pulls.
380 .hg/hgrc file, as the default to be used for future pulls.
381
381
382 For efficiency, hardlinks are used for cloning whenever the source
382 For efficiency, hardlinks are used for cloning whenever the source
383 and destination are on the same filesystem (note this applies only
383 and destination are on the same filesystem (note this applies only
384 to the repository data, not to the checked out files). Some
384 to the repository data, not to the checked out files). Some
385 filesystems, such as AFS, implement hardlinking incorrectly, but
385 filesystems, such as AFS, implement hardlinking incorrectly, but
386 do not report errors. In these cases, use the --pull option to
386 do not report errors. In these cases, use the --pull option to
387 avoid hardlinking.
387 avoid hardlinking.
388
388
389 You can safely clone repositories and checked out files using full
389 You can safely clone repositories and checked out files using full
390 hardlinks with
390 hardlinks with
391
391
392 $ cp -al REPO REPOCLONE
392 $ cp -al REPO REPOCLONE
393
393
394 which is the fastest way to clone. However, the operation is not
394 which is the fastest way to clone. However, the operation is not
395 atomic (making sure REPO is not modified during the operation is
395 atomic (making sure REPO is not modified during the operation is
396 up to you) and you have to make sure your editor breaks hardlinks
396 up to you) and you have to make sure your editor breaks hardlinks
397 (Emacs and most Linux Kernel tools do so).
397 (Emacs and most Linux Kernel tools do so).
398
398
399 If you use the -r option to clone up to a specific revision, no
399 If you use the -r option to clone up to a specific revision, no
400 subsequent revisions will be present in the cloned repository.
400 subsequent revisions will be present in the cloned repository.
401 This option implies --pull, even on local repositories.
401 This option implies --pull, even on local repositories.
402
402
403 See pull for valid source format details.
403 See pull for valid source format details.
404
404
405 It is possible to specify an ssh:// URL as the destination, but no
405 It is possible to specify an ssh:// URL as the destination, but no
406 .hg/hgrc and working directory will be created on the remote side.
406 .hg/hgrc and working directory will be created on the remote side.
407 Look at the help text for the pull command for important details
407 Look at the help text for the pull command for important details
408 about ssh:// URLs.
408 about ssh:// URLs.
409 """
409 """
410 cmdutil.setremoteconfig(ui, opts)
410 cmdutil.setremoteconfig(ui, opts)
411 hg.clone(ui, source, dest,
411 hg.clone(ui, source, dest,
412 pull=opts['pull'],
412 pull=opts['pull'],
413 stream=opts['uncompressed'],
413 stream=opts['uncompressed'],
414 rev=opts['rev'],
414 rev=opts['rev'],
415 update=not opts['noupdate'])
415 update=not opts['noupdate'])
416
416
417 def commit(ui, repo, *pats, **opts):
417 def commit(ui, repo, *pats, **opts):
418 """commit the specified files or all outstanding changes
418 """commit the specified files or all outstanding changes
419
419
420 Commit changes to the given files into the repository.
420 Commit changes to the given files into the repository.
421
421
422 If a list of files is omitted, all changes reported by "hg status"
422 If a list of files is omitted, all changes reported by "hg status"
423 will be committed.
423 will be committed.
424
424
425 If no commit message is specified, the editor configured in your hgrc
425 If no commit message is specified, the editor configured in your hgrc
426 or in the EDITOR environment variable is started to enter a message.
426 or in the EDITOR environment variable is started to enter a message.
427 """
427 """
428 message = cmdutil.logmessage(opts)
428 message = cmdutil.logmessage(opts)
429
429
430 if opts['addremove']:
430 if opts['addremove']:
431 cmdutil.addremove(repo, pats, opts)
431 cmdutil.addremove(repo, pats, opts)
432 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
432 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
433 if pats:
433 if pats:
434 status = repo.status(files=fns, match=match)
434 status = repo.status(files=fns, match=match)
435 modified, added, removed, deleted, unknown = status[:5]
435 modified, added, removed, deleted, unknown = status[:5]
436 files = modified + added + removed
436 files = modified + added + removed
437 slist = None
437 slist = None
438 for f in fns:
438 for f in fns:
439 if f == '.':
439 if f == '.':
440 continue
440 continue
441 if f not in files:
441 if f not in files:
442 rf = repo.wjoin(f)
442 rf = repo.wjoin(f)
443 try:
443 try:
444 mode = os.lstat(rf)[stat.ST_MODE]
444 mode = os.lstat(rf)[stat.ST_MODE]
445 except OSError:
445 except OSError:
446 raise util.Abort(_("file %s not found!") % rf)
446 raise util.Abort(_("file %s not found!") % rf)
447 if stat.S_ISDIR(mode):
447 if stat.S_ISDIR(mode):
448 name = f + '/'
448 name = f + '/'
449 if slist is None:
449 if slist is None:
450 slist = list(files)
450 slist = list(files)
451 slist.sort()
451 slist.sort()
452 i = bisect.bisect(slist, name)
452 i = bisect.bisect(slist, name)
453 if i >= len(slist) or not slist[i].startswith(name):
453 if i >= len(slist) or not slist[i].startswith(name):
454 raise util.Abort(_("no match under directory %s!")
454 raise util.Abort(_("no match under directory %s!")
455 % rf)
455 % rf)
456 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
456 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
457 raise util.Abort(_("can't commit %s: "
457 raise util.Abort(_("can't commit %s: "
458 "unsupported file type!") % rf)
458 "unsupported file type!") % rf)
459 elif f not in repo.dirstate:
459 elif f not in repo.dirstate:
460 raise util.Abort(_("file %s not tracked!") % rf)
460 raise util.Abort(_("file %s not tracked!") % rf)
461 else:
461 else:
462 files = []
462 files = []
463 try:
463 try:
464 repo.commit(files, message, opts['user'], opts['date'], match,
464 repo.commit(files, message, opts['user'], opts['date'], match,
465 force_editor=opts.get('force_editor'))
465 force_editor=opts.get('force_editor'))
466 except ValueError, inst:
466 except ValueError, inst:
467 raise util.Abort(str(inst))
467 raise util.Abort(str(inst))
468
468
469 def docopy(ui, repo, pats, opts):
469 def docopy(ui, repo, pats, opts):
470 # called with the repo lock held
470 # called with the repo lock held
471 #
471 #
472 # hgsep => pathname that uses "/" to separate directories
472 # hgsep => pathname that uses "/" to separate directories
473 # ossep => pathname that uses os.sep to separate directories
473 # ossep => pathname that uses os.sep to separate directories
474 cwd = repo.getcwd()
474 cwd = repo.getcwd()
475 errors = 0
475 errors = 0
476 copied = []
476 copied = []
477 targets = {}
477 targets = {}
478
478
479 # abs: hgsep
479 # abs: hgsep
480 # rel: ossep
480 # rel: ossep
481 # return: hgsep
481 # return: hgsep
482 def okaytocopy(abs, rel, exact):
482 def okaytocopy(abs, rel, exact):
483 reasons = {'?': _('is not managed'),
483 reasons = {'?': _('is not managed'),
484 'r': _('has been marked for remove')}
484 'r': _('has been marked for remove')}
485 state = repo.dirstate[abs]
485 state = repo.dirstate[abs]
486 reason = reasons.get(state)
486 reason = reasons.get(state)
487 if reason:
487 if reason:
488 if exact:
488 if exact:
489 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
489 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
490 else:
490 else:
491 if state == 'a':
491 if state == 'a':
492 origsrc = repo.dirstate.copied(abs)
492 origsrc = repo.dirstate.copied(abs)
493 if origsrc is not None:
493 if origsrc is not None:
494 return origsrc
494 return origsrc
495 return abs
495 return abs
496
496
497 # origsrc: hgsep
497 # origsrc: hgsep
498 # abssrc: hgsep
498 # abssrc: hgsep
499 # relsrc: ossep
499 # relsrc: ossep
500 # otarget: ossep
500 # otarget: ossep
501 def copy(origsrc, abssrc, relsrc, otarget, exact):
501 def copy(origsrc, abssrc, relsrc, otarget, exact):
502 abstarget = util.canonpath(repo.root, cwd, otarget)
502 abstarget = util.canonpath(repo.root, cwd, otarget)
503 reltarget = repo.pathto(abstarget, cwd)
503 reltarget = repo.pathto(abstarget, cwd)
504 prevsrc = targets.get(abstarget)
504 prevsrc = targets.get(abstarget)
505 src = repo.wjoin(abssrc)
505 src = repo.wjoin(abssrc)
506 target = repo.wjoin(abstarget)
506 target = repo.wjoin(abstarget)
507 if prevsrc is not None:
507 if prevsrc is not None:
508 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
508 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
509 (reltarget, repo.pathto(abssrc, cwd),
509 (reltarget, repo.pathto(abssrc, cwd),
510 repo.pathto(prevsrc, cwd)))
510 repo.pathto(prevsrc, cwd)))
511 return
511 return
512 if (not opts['after'] and os.path.exists(target) or
512 if (not opts['after'] and os.path.exists(target) or
513 opts['after'] and repo.dirstate[abstarget] in 'mn'):
513 opts['after'] and repo.dirstate[abstarget] in 'mn'):
514 if not opts['force']:
514 if not opts['force']:
515 ui.warn(_('%s: not overwriting - file exists\n') %
515 ui.warn(_('%s: not overwriting - file exists\n') %
516 reltarget)
516 reltarget)
517 return
517 return
518 if not opts['after'] and not opts.get('dry_run'):
518 if not opts['after'] and not opts.get('dry_run'):
519 os.unlink(target)
519 os.unlink(target)
520 if opts['after']:
520 if opts['after']:
521 if not os.path.exists(target):
521 if not os.path.exists(target):
522 return
522 return
523 else:
523 else:
524 targetdir = os.path.dirname(target) or '.'
524 targetdir = os.path.dirname(target) or '.'
525 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
525 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
526 os.makedirs(targetdir)
526 os.makedirs(targetdir)
527 try:
527 try:
528 restore = repo.dirstate[abstarget] == 'r'
528 restore = repo.dirstate[abstarget] == 'r'
529 if restore and not opts.get('dry_run'):
529 if restore and not opts.get('dry_run'):
530 repo.undelete([abstarget])
530 repo.undelete([abstarget])
531 try:
531 try:
532 if not opts.get('dry_run'):
532 if not opts.get('dry_run'):
533 util.copyfile(src, target)
533 util.copyfile(src, target)
534 restore = False
534 restore = False
535 finally:
535 finally:
536 if restore:
536 if restore:
537 repo.remove([abstarget])
537 repo.remove([abstarget])
538 except IOError, inst:
538 except IOError, inst:
539 if inst.errno == errno.ENOENT:
539 if inst.errno == errno.ENOENT:
540 ui.warn(_('%s: deleted in working copy\n') % relsrc)
540 ui.warn(_('%s: deleted in working copy\n') % relsrc)
541 else:
541 else:
542 ui.warn(_('%s: cannot copy - %s\n') %
542 ui.warn(_('%s: cannot copy - %s\n') %
543 (relsrc, inst.strerror))
543 (relsrc, inst.strerror))
544 errors += 1
544 errors += 1
545 return
545 return
546 if ui.verbose or not exact:
546 if ui.verbose or not exact:
547 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
547 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
548 targets[abstarget] = abssrc
548 targets[abstarget] = abssrc
549 if abstarget != origsrc:
549 if abstarget != origsrc:
550 if repo.dirstate[origsrc] == 'a':
550 if repo.dirstate[origsrc] == 'a':
551 if not ui.quiet:
551 if not ui.quiet:
552 ui.warn(_("%s has not been committed yet, so no copy "
552 ui.warn(_("%s has not been committed yet, so no copy "
553 "data will be stored for %s.\n")
553 "data will be stored for %s.\n")
554 % (repo.pathto(origsrc, cwd), reltarget))
554 % (repo.pathto(origsrc, cwd), reltarget))
555 if abstarget not in repo.dirstate and not opts.get('dry_run'):
555 if abstarget not in repo.dirstate and not opts.get('dry_run'):
556 repo.add([abstarget])
556 repo.add([abstarget])
557 elif not opts.get('dry_run'):
557 elif not opts.get('dry_run'):
558 repo.copy(origsrc, abstarget)
558 repo.copy(origsrc, abstarget)
559 copied.append((abssrc, relsrc, exact))
559 copied.append((abssrc, relsrc, exact))
560
560
561 # pat: ossep
561 # pat: ossep
562 # dest ossep
562 # dest ossep
563 # srcs: list of (hgsep, hgsep, ossep, bool)
563 # srcs: list of (hgsep, hgsep, ossep, bool)
564 # return: function that takes hgsep and returns ossep
564 # return: function that takes hgsep and returns ossep
565 def targetpathfn(pat, dest, srcs):
565 def targetpathfn(pat, dest, srcs):
566 if os.path.isdir(pat):
566 if os.path.isdir(pat):
567 abspfx = util.canonpath(repo.root, cwd, pat)
567 abspfx = util.canonpath(repo.root, cwd, pat)
568 abspfx = util.localpath(abspfx)
568 abspfx = util.localpath(abspfx)
569 if destdirexists:
569 if destdirexists:
570 striplen = len(os.path.split(abspfx)[0])
570 striplen = len(os.path.split(abspfx)[0])
571 else:
571 else:
572 striplen = len(abspfx)
572 striplen = len(abspfx)
573 if striplen:
573 if striplen:
574 striplen += len(os.sep)
574 striplen += len(os.sep)
575 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
575 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
576 elif destdirexists:
576 elif destdirexists:
577 res = lambda p: os.path.join(dest,
577 res = lambda p: os.path.join(dest,
578 os.path.basename(util.localpath(p)))
578 os.path.basename(util.localpath(p)))
579 else:
579 else:
580 res = lambda p: dest
580 res = lambda p: dest
581 return res
581 return res
582
582
583 # pat: ossep
583 # pat: ossep
584 # dest ossep
584 # dest ossep
585 # srcs: list of (hgsep, hgsep, ossep, bool)
585 # srcs: list of (hgsep, hgsep, ossep, bool)
586 # return: function that takes hgsep and returns ossep
586 # return: function that takes hgsep and returns ossep
587 def targetpathafterfn(pat, dest, srcs):
587 def targetpathafterfn(pat, dest, srcs):
588 if util.patkind(pat, None)[0]:
588 if util.patkind(pat, None)[0]:
589 # a mercurial pattern
589 # a mercurial pattern
590 res = lambda p: os.path.join(dest,
590 res = lambda p: os.path.join(dest,
591 os.path.basename(util.localpath(p)))
591 os.path.basename(util.localpath(p)))
592 else:
592 else:
593 abspfx = util.canonpath(repo.root, cwd, pat)
593 abspfx = util.canonpath(repo.root, cwd, pat)
594 if len(abspfx) < len(srcs[0][0]):
594 if len(abspfx) < len(srcs[0][0]):
595 # A directory. Either the target path contains the last
595 # A directory. Either the target path contains the last
596 # component of the source path or it does not.
596 # component of the source path or it does not.
597 def evalpath(striplen):
597 def evalpath(striplen):
598 score = 0
598 score = 0
599 for s in srcs:
599 for s in srcs:
600 t = os.path.join(dest, util.localpath(s[0])[striplen:])
600 t = os.path.join(dest, util.localpath(s[0])[striplen:])
601 if os.path.exists(t):
601 if os.path.exists(t):
602 score += 1
602 score += 1
603 return score
603 return score
604
604
605 abspfx = util.localpath(abspfx)
605 abspfx = util.localpath(abspfx)
606 striplen = len(abspfx)
606 striplen = len(abspfx)
607 if striplen:
607 if striplen:
608 striplen += len(os.sep)
608 striplen += len(os.sep)
609 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
609 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
610 score = evalpath(striplen)
610 score = evalpath(striplen)
611 striplen1 = len(os.path.split(abspfx)[0])
611 striplen1 = len(os.path.split(abspfx)[0])
612 if striplen1:
612 if striplen1:
613 striplen1 += len(os.sep)
613 striplen1 += len(os.sep)
614 if evalpath(striplen1) > score:
614 if evalpath(striplen1) > score:
615 striplen = striplen1
615 striplen = striplen1
616 res = lambda p: os.path.join(dest,
616 res = lambda p: os.path.join(dest,
617 util.localpath(p)[striplen:])
617 util.localpath(p)[striplen:])
618 else:
618 else:
619 # a file
619 # a file
620 if destdirexists:
620 if destdirexists:
621 res = lambda p: os.path.join(dest,
621 res = lambda p: os.path.join(dest,
622 os.path.basename(util.localpath(p)))
622 os.path.basename(util.localpath(p)))
623 else:
623 else:
624 res = lambda p: dest
624 res = lambda p: dest
625 return res
625 return res
626
626
627
627
628 pats = util.expand_glob(pats)
628 pats = util.expand_glob(pats)
629 if not pats:
629 if not pats:
630 raise util.Abort(_('no source or destination specified'))
630 raise util.Abort(_('no source or destination specified'))
631 if len(pats) == 1:
631 if len(pats) == 1:
632 raise util.Abort(_('no destination specified'))
632 raise util.Abort(_('no destination specified'))
633 dest = pats.pop()
633 dest = pats.pop()
634 destdirexists = os.path.isdir(dest)
634 destdirexists = os.path.isdir(dest)
635 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
635 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
636 raise util.Abort(_('with multiple sources, destination must be an '
636 raise util.Abort(_('with multiple sources, destination must be an '
637 'existing directory'))
637 'existing directory'))
638 if opts['after']:
638 if opts['after']:
639 tfn = targetpathafterfn
639 tfn = targetpathafterfn
640 else:
640 else:
641 tfn = targetpathfn
641 tfn = targetpathfn
642 copylist = []
642 copylist = []
643 for pat in pats:
643 for pat in pats:
644 srcs = []
644 srcs = []
645 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
645 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
646 globbed=True):
646 globbed=True):
647 origsrc = okaytocopy(abssrc, relsrc, exact)
647 origsrc = okaytocopy(abssrc, relsrc, exact)
648 if origsrc:
648 if origsrc:
649 srcs.append((origsrc, abssrc, relsrc, exact))
649 srcs.append((origsrc, abssrc, relsrc, exact))
650 if not srcs:
650 if not srcs:
651 continue
651 continue
652 copylist.append((tfn(pat, dest, srcs), srcs))
652 copylist.append((tfn(pat, dest, srcs), srcs))
653 if not copylist:
653 if not copylist:
654 raise util.Abort(_('no files to copy'))
654 raise util.Abort(_('no files to copy'))
655
655
656 for targetpath, srcs in copylist:
656 for targetpath, srcs in copylist:
657 for origsrc, abssrc, relsrc, exact in srcs:
657 for origsrc, abssrc, relsrc, exact in srcs:
658 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
658 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
659
659
660 if errors:
660 if errors:
661 ui.warn(_('(consider using --after)\n'))
661 ui.warn(_('(consider using --after)\n'))
662 return errors, copied
662 return errors, copied
663
663
664 def copy(ui, repo, *pats, **opts):
664 def copy(ui, repo, *pats, **opts):
665 """mark files as copied for the next commit
665 """mark files as copied for the next commit
666
666
667 Mark dest as having copies of source files. If dest is a
667 Mark dest as having copies of source files. If dest is a
668 directory, copies are put in that directory. If dest is a file,
668 directory, copies are put in that directory. If dest is a file,
669 there can only be one source.
669 there can only be one source.
670
670
671 By default, this command copies the contents of files as they
671 By default, this command copies the contents of files as they
672 stand in the working directory. If invoked with --after, the
672 stand in the working directory. If invoked with --after, the
673 operation is recorded, but no copying is performed.
673 operation is recorded, but no copying is performed.
674
674
675 This command takes effect in the next commit. To undo a copy
675 This command takes effect in the next commit. To undo a copy
676 before that, see hg revert.
676 before that, see hg revert.
677 """
677 """
678 wlock = repo.wlock(False)
678 wlock = repo.wlock(False)
679 try:
679 try:
680 errs, copied = docopy(ui, repo, pats, opts)
680 errs, copied = docopy(ui, repo, pats, opts)
681 finally:
681 finally:
682 del wlock
682 del wlock
683 return errs
683 return errs
684
684
685 def debugancestor(ui, index, rev1, rev2):
685 def debugancestor(ui, index, rev1, rev2):
686 """find the ancestor revision of two revisions in a given index"""
686 """find the ancestor revision of two revisions in a given index"""
687 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
687 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
688 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
688 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
689 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
689 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
690
690
691 def debugcomplete(ui, cmd='', **opts):
691 def debugcomplete(ui, cmd='', **opts):
692 """returns the completion list associated with the given command"""
692 """returns the completion list associated with the given command"""
693
693
694 if opts['options']:
694 if opts['options']:
695 options = []
695 options = []
696 otables = [globalopts]
696 otables = [globalopts]
697 if cmd:
697 if cmd:
698 aliases, entry = cmdutil.findcmd(ui, cmd)
698 aliases, entry = cmdutil.findcmd(ui, cmd)
699 otables.append(entry[1])
699 otables.append(entry[1])
700 for t in otables:
700 for t in otables:
701 for o in t:
701 for o in t:
702 if o[0]:
702 if o[0]:
703 options.append('-%s' % o[0])
703 options.append('-%s' % o[0])
704 options.append('--%s' % o[1])
704 options.append('--%s' % o[1])
705 ui.write("%s\n" % "\n".join(options))
705 ui.write("%s\n" % "\n".join(options))
706 return
706 return
707
707
708 clist = cmdutil.findpossible(ui, cmd).keys()
708 clist = cmdutil.findpossible(ui, cmd).keys()
709 clist.sort()
709 clist.sort()
710 ui.write("%s\n" % "\n".join(clist))
710 ui.write("%s\n" % "\n".join(clist))
711
711
712 def debugrebuildstate(ui, repo, rev=""):
712 def debugrebuildstate(ui, repo, rev=""):
713 """rebuild the dirstate as it would look like for the given revision"""
713 """rebuild the dirstate as it would look like for the given revision"""
714 if rev == "":
714 if rev == "":
715 rev = repo.changelog.tip()
715 rev = repo.changelog.tip()
716 ctx = repo.changectx(rev)
716 ctx = repo.changectx(rev)
717 files = ctx.manifest()
717 files = ctx.manifest()
718 wlock = repo.wlock()
718 wlock = repo.wlock()
719 try:
719 try:
720 repo.dirstate.rebuild(rev, files)
720 repo.dirstate.rebuild(rev, files)
721 finally:
721 finally:
722 del wlock
722 del wlock
723
723
724 def debugcheckstate(ui, repo):
724 def debugcheckstate(ui, repo):
725 """validate the correctness of the current dirstate"""
725 """validate the correctness of the current dirstate"""
726 parent1, parent2 = repo.dirstate.parents()
726 parent1, parent2 = repo.dirstate.parents()
727 m1 = repo.changectx(parent1).manifest()
727 m1 = repo.changectx(parent1).manifest()
728 m2 = repo.changectx(parent2).manifest()
728 m2 = repo.changectx(parent2).manifest()
729 errors = 0
729 errors = 0
730 for f in repo.dirstate:
730 for f in repo.dirstate:
731 state = repo.dirstate[f]
731 state = repo.dirstate[f]
732 if state in "nr" and f not in m1:
732 if state in "nr" and f not in m1:
733 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
733 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
734 errors += 1
734 errors += 1
735 if state in "a" and f in m1:
735 if state in "a" and f in m1:
736 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
736 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
737 errors += 1
737 errors += 1
738 if state in "m" and f not in m1 and f not in m2:
738 if state in "m" and f not in m1 and f not in m2:
739 ui.warn(_("%s in state %s, but not in either manifest\n") %
739 ui.warn(_("%s in state %s, but not in either manifest\n") %
740 (f, state))
740 (f, state))
741 errors += 1
741 errors += 1
742 for f in m1:
742 for f in m1:
743 state = repo.dirstate[f]
743 state = repo.dirstate[f]
744 if state not in "nrm":
744 if state not in "nrm":
745 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
745 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
746 errors += 1
746 errors += 1
747 if errors:
747 if errors:
748 error = _(".hg/dirstate inconsistent with current parent's manifest")
748 error = _(".hg/dirstate inconsistent with current parent's manifest")
749 raise util.Abort(error)
749 raise util.Abort(error)
750
750
751 def showconfig(ui, repo, *values, **opts):
751 def showconfig(ui, repo, *values, **opts):
752 """show combined config settings from all hgrc files
752 """show combined config settings from all hgrc files
753
753
754 With no args, print names and values of all config items.
754 With no args, print names and values of all config items.
755
755
756 With one arg of the form section.name, print just the value of
756 With one arg of the form section.name, print just the value of
757 that config item.
757 that config item.
758
758
759 With multiple args, print names and values of all config items
759 With multiple args, print names and values of all config items
760 with matching section names."""
760 with matching section names."""
761
761
762 untrusted = bool(opts.get('untrusted'))
762 untrusted = bool(opts.get('untrusted'))
763 if values:
763 if values:
764 if len([v for v in values if '.' in v]) > 1:
764 if len([v for v in values if '.' in v]) > 1:
765 raise util.Abort(_('only one config item permitted'))
765 raise util.Abort(_('only one config item permitted'))
766 for section, name, value in ui.walkconfig(untrusted=untrusted):
766 for section, name, value in ui.walkconfig(untrusted=untrusted):
767 sectname = section + '.' + name
767 sectname = section + '.' + name
768 if values:
768 if values:
769 for v in values:
769 for v in values:
770 if v == section:
770 if v == section:
771 ui.write('%s=%s\n' % (sectname, value))
771 ui.write('%s=%s\n' % (sectname, value))
772 elif v == sectname:
772 elif v == sectname:
773 ui.write(value, '\n')
773 ui.write(value, '\n')
774 else:
774 else:
775 ui.write('%s=%s\n' % (sectname, value))
775 ui.write('%s=%s\n' % (sectname, value))
776
776
777 def debugsetparents(ui, repo, rev1, rev2=None):
777 def debugsetparents(ui, repo, rev1, rev2=None):
778 """manually set the parents of the current working directory
778 """manually set the parents of the current working directory
779
779
780 This is useful for writing repository conversion tools, but should
780 This is useful for writing repository conversion tools, but should
781 be used with care.
781 be used with care.
782 """
782 """
783
783
784 if not rev2:
784 if not rev2:
785 rev2 = hex(nullid)
785 rev2 = hex(nullid)
786
786
787 wlock = repo.wlock()
787 wlock = repo.wlock()
788 try:
788 try:
789 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
789 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
790 finally:
790 finally:
791 del wlock
791 del wlock
792
792
793 def debugstate(ui, repo):
793 def debugstate(ui, repo):
794 """show the contents of the current dirstate"""
794 """show the contents of the current dirstate"""
795 dc = repo.dirstate._map
795 dc = repo.dirstate._map
796 k = dc.keys()
796 k = dc.keys()
797 k.sort()
797 k.sort()
798 for file_ in k:
798 for file_ in k:
799 if dc[file_][3] == -1:
799 if dc[file_][3] == -1:
800 # Pad or slice to locale representation
800 # Pad or slice to locale representation
801 locale_len = len(time.strftime("%x %X", time.localtime(0)))
801 locale_len = len(time.strftime("%x %X", time.localtime(0)))
802 timestr = 'unset'
802 timestr = 'unset'
803 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
803 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
804 else:
804 else:
805 timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
805 timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
806 ui.write("%c %3o %10d %s %s\n"
806 ui.write("%c %3o %10d %s %s\n"
807 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
807 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
808 timestr, file_))
808 timestr, file_))
809 for f in repo.dirstate.copies():
809 for f in repo.dirstate.copies():
810 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
810 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
811
811
812 def debugdata(ui, file_, rev):
812 def debugdata(ui, file_, rev):
813 """dump the contents of a data file revision"""
813 """dump the contents of a data file revision"""
814 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
814 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
815 try:
815 try:
816 ui.write(r.revision(r.lookup(rev)))
816 ui.write(r.revision(r.lookup(rev)))
817 except KeyError:
817 except KeyError:
818 raise util.Abort(_('invalid revision identifier %s') % rev)
818 raise util.Abort(_('invalid revision identifier %s') % rev)
819
819
820 def debugdate(ui, date, range=None, **opts):
820 def debugdate(ui, date, range=None, **opts):
821 """parse and display a date"""
821 """parse and display a date"""
822 if opts["extended"]:
822 if opts["extended"]:
823 d = util.parsedate(date, util.extendeddateformats)
823 d = util.parsedate(date, util.extendeddateformats)
824 else:
824 else:
825 d = util.parsedate(date)
825 d = util.parsedate(date)
826 ui.write("internal: %s %s\n" % d)
826 ui.write("internal: %s %s\n" % d)
827 ui.write("standard: %s\n" % util.datestr(d))
827 ui.write("standard: %s\n" % util.datestr(d))
828 if range:
828 if range:
829 m = util.matchdate(range)
829 m = util.matchdate(range)
830 ui.write("match: %s\n" % m(d[0]))
830 ui.write("match: %s\n" % m(d[0]))
831
831
832 def debugindex(ui, file_):
832 def debugindex(ui, file_):
833 """dump the contents of an index file"""
833 """dump the contents of an index file"""
834 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
834 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
835 ui.write(" rev offset length base linkrev" +
835 ui.write(" rev offset length base linkrev" +
836 " nodeid p1 p2\n")
836 " nodeid p1 p2\n")
837 for i in xrange(r.count()):
837 for i in xrange(r.count()):
838 node = r.node(i)
838 node = r.node(i)
839 pp = r.parents(node)
839 pp = r.parents(node)
840 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
840 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
841 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
841 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
842 short(node), short(pp[0]), short(pp[1])))
842 short(node), short(pp[0]), short(pp[1])))
843
843
844 def debugindexdot(ui, file_):
844 def debugindexdot(ui, file_):
845 """dump an index DAG as a .dot file"""
845 """dump an index DAG as a .dot file"""
846 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
846 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
847 ui.write("digraph G {\n")
847 ui.write("digraph G {\n")
848 for i in xrange(r.count()):
848 for i in xrange(r.count()):
849 node = r.node(i)
849 node = r.node(i)
850 pp = r.parents(node)
850 pp = r.parents(node)
851 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
851 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
852 if pp[1] != nullid:
852 if pp[1] != nullid:
853 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
853 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
854 ui.write("}\n")
854 ui.write("}\n")
855
855
856 def debuginstall(ui):
856 def debuginstall(ui):
857 '''test Mercurial installation'''
857 '''test Mercurial installation'''
858
858
859 def writetemp(contents):
859 def writetemp(contents):
860 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
860 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
861 f = os.fdopen(fd, "wb")
861 f = os.fdopen(fd, "wb")
862 f.write(contents)
862 f.write(contents)
863 f.close()
863 f.close()
864 return name
864 return name
865
865
866 problems = 0
866 problems = 0
867
867
868 # encoding
868 # encoding
869 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
869 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
870 try:
870 try:
871 util.fromlocal("test")
871 util.fromlocal("test")
872 except util.Abort, inst:
872 except util.Abort, inst:
873 ui.write(" %s\n" % inst)
873 ui.write(" %s\n" % inst)
874 ui.write(_(" (check that your locale is properly set)\n"))
874 ui.write(_(" (check that your locale is properly set)\n"))
875 problems += 1
875 problems += 1
876
876
877 # compiled modules
877 # compiled modules
878 ui.status(_("Checking extensions...\n"))
878 ui.status(_("Checking extensions...\n"))
879 try:
879 try:
880 import bdiff, mpatch, base85
880 import bdiff, mpatch, base85
881 except Exception, inst:
881 except Exception, inst:
882 ui.write(" %s\n" % inst)
882 ui.write(" %s\n" % inst)
883 ui.write(_(" One or more extensions could not be found"))
883 ui.write(_(" One or more extensions could not be found"))
884 ui.write(_(" (check that you compiled the extensions)\n"))
884 ui.write(_(" (check that you compiled the extensions)\n"))
885 problems += 1
885 problems += 1
886
886
887 # templates
887 # templates
888 ui.status(_("Checking templates...\n"))
888 ui.status(_("Checking templates...\n"))
889 try:
889 try:
890 import templater
890 import templater
891 t = templater.templater(templater.templatepath("map-cmdline.default"))
891 t = templater.templater(templater.templatepath("map-cmdline.default"))
892 except Exception, inst:
892 except Exception, inst:
893 ui.write(" %s\n" % inst)
893 ui.write(" %s\n" % inst)
894 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
894 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
895 problems += 1
895 problems += 1
896
896
897 # patch
897 # patch
898 ui.status(_("Checking patch...\n"))
898 ui.status(_("Checking patch...\n"))
899 patcher = ui.config('ui', 'patch')
899 patcher = ui.config('ui', 'patch')
900 patcher = ((patcher and util.find_exe(patcher)) or
900 patcher = ((patcher and util.find_exe(patcher)) or
901 util.find_exe('gpatch') or
901 util.find_exe('gpatch') or
902 util.find_exe('patch'))
902 util.find_exe('patch'))
903 if not patcher:
903 if not patcher:
904 ui.write(_(" Can't find patch or gpatch in PATH\n"))
904 ui.write(_(" Can't find patch or gpatch in PATH\n"))
905 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
905 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
906 problems += 1
906 problems += 1
907 else:
907 else:
908 # actually attempt a patch here
908 # actually attempt a patch here
909 a = "1\n2\n3\n4\n"
909 a = "1\n2\n3\n4\n"
910 b = "1\n2\n3\ninsert\n4\n"
910 b = "1\n2\n3\ninsert\n4\n"
911 fa = writetemp(a)
911 fa = writetemp(a)
912 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa))
912 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa))
913 fd = writetemp(d)
913 fd = writetemp(d)
914
914
915 files = {}
915 files = {}
916 try:
916 try:
917 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
917 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
918 except util.Abort, e:
918 except util.Abort, e:
919 ui.write(_(" patch call failed:\n"))
919 ui.write(_(" patch call failed:\n"))
920 ui.write(" " + str(e) + "\n")
920 ui.write(" " + str(e) + "\n")
921 problems += 1
921 problems += 1
922 else:
922 else:
923 if list(files) != [os.path.basename(fa)]:
923 if list(files) != [os.path.basename(fa)]:
924 ui.write(_(" unexpected patch output!"))
924 ui.write(_(" unexpected patch output!"))
925 ui.write(_(" (you may have an incompatible version of patch)\n"))
925 ui.write(_(" (you may have an incompatible version of patch)\n"))
926 problems += 1
926 problems += 1
927 a = file(fa).read()
927 a = file(fa).read()
928 if a != b:
928 if a != b:
929 ui.write(_(" patch test failed!"))
929 ui.write(_(" patch test failed!"))
930 ui.write(_(" (you may have an incompatible version of patch)\n"))
930 ui.write(_(" (you may have an incompatible version of patch)\n"))
931 problems += 1
931 problems += 1
932
932
933 os.unlink(fa)
933 os.unlink(fa)
934 os.unlink(fd)
934 os.unlink(fd)
935
935
936 # merge helper
936 # merge helper
937 ui.status(_("Checking merge helper...\n"))
937 ui.status(_("Checking merge helper...\n"))
938 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
938 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
939 or "hgmerge")
939 or "hgmerge")
940 cmdpath = util.find_exe(cmd) or util.find_exe(cmd.split()[0])
940 cmdpath = util.find_exe(cmd) or util.find_exe(cmd.split()[0])
941 if not cmdpath:
941 if not cmdpath:
942 if cmd == 'hgmerge':
942 if cmd == 'hgmerge':
943 ui.write(_(" No merge helper set and can't find default"
943 ui.write(_(" No merge helper set and can't find default"
944 " hgmerge script in PATH\n"))
944 " hgmerge script in PATH\n"))
945 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
945 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
946 else:
946 else:
947 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
947 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
948 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
948 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
949 problems += 1
949 problems += 1
950 else:
950 else:
951 # actually attempt a patch here
951 # actually attempt a patch here
952 fa = writetemp("1\n2\n3\n4\n")
952 fa = writetemp("1\n2\n3\n4\n")
953 fl = writetemp("1\n2\n3\ninsert\n4\n")
953 fl = writetemp("1\n2\n3\ninsert\n4\n")
954 fr = writetemp("begin\n1\n2\n3\n4\n")
954 fr = writetemp("begin\n1\n2\n3\n4\n")
955 r = util.system('%s "%s" "%s" "%s"' % (cmd, fl, fa, fr))
955 r = util.system('%s "%s" "%s" "%s"' % (cmd, fl, fa, fr))
956 if r:
956 if r:
957 ui.write(_(" Got unexpected merge error %d!\n") % r)
957 ui.write(_(" Got unexpected merge error %d!\n") % r)
958 problems += 1
958 problems += 1
959 m = file(fl).read()
959 m = file(fl).read()
960 if m != "begin\n1\n2\n3\ninsert\n4\n":
960 if m != "begin\n1\n2\n3\ninsert\n4\n":
961 ui.write(_(" Got unexpected merge results!\n"))
961 ui.write(_(" Got unexpected merge results!\n"))
962 ui.write(_(" (your merge helper may have the"
962 ui.write(_(" (your merge helper may have the"
963 " wrong argument order)\n"))
963 " wrong argument order)\n"))
964 ui.write(_(" Result: %r\n") % m)
964 ui.write(_(" Result: %r\n") % m)
965 problems += 1
965 problems += 1
966 os.unlink(fa)
966 os.unlink(fa)
967 os.unlink(fl)
967 os.unlink(fl)
968 os.unlink(fr)
968 os.unlink(fr)
969
969
970 # editor
970 # editor
971 ui.status(_("Checking commit editor...\n"))
971 ui.status(_("Checking commit editor...\n"))
972 editor = (os.environ.get("HGEDITOR") or
972 editor = (os.environ.get("HGEDITOR") or
973 ui.config("ui", "editor") or
973 ui.config("ui", "editor") or
974 os.environ.get("EDITOR", "vi"))
974 os.environ.get("EDITOR", "vi"))
975 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
975 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
976 if not cmdpath:
976 if not cmdpath:
977 if editor == 'vi':
977 if editor == 'vi':
978 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
978 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
979 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
979 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
980 else:
980 else:
981 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
981 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
982 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
982 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
983 problems += 1
983 problems += 1
984
984
985 # check username
985 # check username
986 ui.status(_("Checking username...\n"))
986 ui.status(_("Checking username...\n"))
987 user = os.environ.get("HGUSER")
987 user = os.environ.get("HGUSER")
988 if user is None:
988 if user is None:
989 user = ui.config("ui", "username")
989 user = ui.config("ui", "username")
990 if user is None:
990 if user is None:
991 user = os.environ.get("EMAIL")
991 user = os.environ.get("EMAIL")
992 if not user:
992 if not user:
993 ui.warn(" ")
993 ui.warn(" ")
994 ui.username()
994 ui.username()
995 ui.write(_(" (specify a username in your .hgrc file)\n"))
995 ui.write(_(" (specify a username in your .hgrc file)\n"))
996
996
997 if not problems:
997 if not problems:
998 ui.status(_("No problems detected\n"))
998 ui.status(_("No problems detected\n"))
999 else:
999 else:
1000 ui.write(_("%s problems detected,"
1000 ui.write(_("%s problems detected,"
1001 " please check your install!\n") % problems)
1001 " please check your install!\n") % problems)
1002
1002
1003 return problems
1003 return problems
1004
1004
1005 def debugrename(ui, repo, file1, *pats, **opts):
1005 def debugrename(ui, repo, file1, *pats, **opts):
1006 """dump rename information"""
1006 """dump rename information"""
1007
1007
1008 ctx = repo.changectx(opts.get('rev', 'tip'))
1008 ctx = repo.changectx(opts.get('rev', 'tip'))
1009 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
1009 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
1010 ctx.node()):
1010 ctx.node()):
1011 m = ctx.filectx(abs).renamed()
1011 m = ctx.filectx(abs).renamed()
1012 if m:
1012 if m:
1013 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
1013 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
1014 else:
1014 else:
1015 ui.write(_("%s not renamed\n") % rel)
1015 ui.write(_("%s not renamed\n") % rel)
1016
1016
1017 def debugwalk(ui, repo, *pats, **opts):
1017 def debugwalk(ui, repo, *pats, **opts):
1018 """show how files match on given patterns"""
1018 """show how files match on given patterns"""
1019 items = list(cmdutil.walk(repo, pats, opts))
1019 items = list(cmdutil.walk(repo, pats, opts))
1020 if not items:
1020 if not items:
1021 return
1021 return
1022 fmt = '%%s %%-%ds %%-%ds %%s' % (
1022 fmt = '%%s %%-%ds %%-%ds %%s' % (
1023 max([len(abs) for (src, abs, rel, exact) in items]),
1023 max([len(abs) for (src, abs, rel, exact) in items]),
1024 max([len(rel) for (src, abs, rel, exact) in items]))
1024 max([len(rel) for (src, abs, rel, exact) in items]))
1025 for src, abs, rel, exact in items:
1025 for src, abs, rel, exact in items:
1026 line = fmt % (src, abs, rel, exact and 'exact' or '')
1026 line = fmt % (src, abs, rel, exact and 'exact' or '')
1027 ui.write("%s\n" % line.rstrip())
1027 ui.write("%s\n" % line.rstrip())
1028
1028
1029 def diff(ui, repo, *pats, **opts):
1029 def diff(ui, repo, *pats, **opts):
1030 """diff repository (or selected files)
1030 """diff repository (or selected files)
1031
1031
1032 Show differences between revisions for the specified files.
1032 Show differences between revisions for the specified files.
1033
1033
1034 Differences between files are shown using the unified diff format.
1034 Differences between files are shown using the unified diff format.
1035
1035
1036 NOTE: diff may generate unexpected results for merges, as it will
1036 NOTE: diff may generate unexpected results for merges, as it will
1037 default to comparing against the working directory's first parent
1037 default to comparing against the working directory's first parent
1038 changeset if no revisions are specified.
1038 changeset if no revisions are specified.
1039
1039
1040 When two revision arguments are given, then changes are shown
1040 When two revision arguments are given, then changes are shown
1041 between those revisions. If only one revision is specified then
1041 between those revisions. If only one revision is specified then
1042 that revision is compared to the working directory, and, when no
1042 that revision is compared to the working directory, and, when no
1043 revisions are specified, the working directory files are compared
1043 revisions are specified, the working directory files are compared
1044 to its parent.
1044 to its parent.
1045
1045
1046 Without the -a option, diff will avoid generating diffs of files
1046 Without the -a option, diff will avoid generating diffs of files
1047 it detects as binary. With -a, diff will generate a diff anyway,
1047 it detects as binary. With -a, diff will generate a diff anyway,
1048 probably with undesirable results.
1048 probably with undesirable results.
1049 """
1049 """
1050 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1050 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1051
1051
1052 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1052 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1053
1053
1054 patch.diff(repo, node1, node2, fns, match=matchfn,
1054 patch.diff(repo, node1, node2, fns, match=matchfn,
1055 opts=patch.diffopts(ui, opts))
1055 opts=patch.diffopts(ui, opts))
1056
1056
1057 def export(ui, repo, *changesets, **opts):
1057 def export(ui, repo, *changesets, **opts):
1058 """dump the header and diffs for one or more changesets
1058 """dump the header and diffs for one or more changesets
1059
1059
1060 Print the changeset header and diffs for one or more revisions.
1060 Print the changeset header and diffs for one or more revisions.
1061
1061
1062 The information shown in the changeset header is: author,
1062 The information shown in the changeset header is: author,
1063 changeset hash, parent(s) and commit comment.
1063 changeset hash, parent(s) and commit comment.
1064
1064
1065 NOTE: export may generate unexpected diff output for merge changesets,
1065 NOTE: export may generate unexpected diff output for merge changesets,
1066 as it will compare the merge changeset against its first parent only.
1066 as it will compare the merge changeset against its first parent only.
1067
1067
1068 Output may be to a file, in which case the name of the file is
1068 Output may be to a file, in which case the name of the file is
1069 given using a format string. The formatting rules are as follows:
1069 given using a format string. The formatting rules are as follows:
1070
1070
1071 %% literal "%" character
1071 %% literal "%" character
1072 %H changeset hash (40 bytes of hexadecimal)
1072 %H changeset hash (40 bytes of hexadecimal)
1073 %N number of patches being generated
1073 %N number of patches being generated
1074 %R changeset revision number
1074 %R changeset revision number
1075 %b basename of the exporting repository
1075 %b basename of the exporting repository
1076 %h short-form changeset hash (12 bytes of hexadecimal)
1076 %h short-form changeset hash (12 bytes of hexadecimal)
1077 %n zero-padded sequence number, starting at 1
1077 %n zero-padded sequence number, starting at 1
1078 %r zero-padded changeset revision number
1078 %r zero-padded changeset revision number
1079
1079
1080 Without the -a option, export will avoid generating diffs of files
1080 Without the -a option, export will avoid generating diffs of files
1081 it detects as binary. With -a, export will generate a diff anyway,
1081 it detects as binary. With -a, export will generate a diff anyway,
1082 probably with undesirable results.
1082 probably with undesirable results.
1083
1083
1084 With the --switch-parent option, the diff will be against the second
1084 With the --switch-parent option, the diff will be against the second
1085 parent. It can be useful to review a merge.
1085 parent. It can be useful to review a merge.
1086 """
1086 """
1087 if not changesets:
1087 if not changesets:
1088 raise util.Abort(_("export requires at least one changeset"))
1088 raise util.Abort(_("export requires at least one changeset"))
1089 revs = cmdutil.revrange(repo, changesets)
1089 revs = cmdutil.revrange(repo, changesets)
1090 if len(revs) > 1:
1090 if len(revs) > 1:
1091 ui.note(_('exporting patches:\n'))
1091 ui.note(_('exporting patches:\n'))
1092 else:
1092 else:
1093 ui.note(_('exporting patch:\n'))
1093 ui.note(_('exporting patch:\n'))
1094 patch.export(repo, revs, template=opts['output'],
1094 patch.export(repo, revs, template=opts['output'],
1095 switch_parent=opts['switch_parent'],
1095 switch_parent=opts['switch_parent'],
1096 opts=patch.diffopts(ui, opts))
1096 opts=patch.diffopts(ui, opts))
1097
1097
1098 def grep(ui, repo, pattern, *pats, **opts):
1098 def grep(ui, repo, pattern, *pats, **opts):
1099 """search for a pattern in specified files and revisions
1099 """search for a pattern in specified files and revisions
1100
1100
1101 Search revisions of files for a regular expression.
1101 Search revisions of files for a regular expression.
1102
1102
1103 This command behaves differently than Unix grep. It only accepts
1103 This command behaves differently than Unix grep. It only accepts
1104 Python/Perl regexps. It searches repository history, not the
1104 Python/Perl regexps. It searches repository history, not the
1105 working directory. It always prints the revision number in which
1105 working directory. It always prints the revision number in which
1106 a match appears.
1106 a match appears.
1107
1107
1108 By default, grep only prints output for the first revision of a
1108 By default, grep only prints output for the first revision of a
1109 file in which it finds a match. To get it to print every revision
1109 file in which it finds a match. To get it to print every revision
1110 that contains a change in match status ("-" for a match that
1110 that contains a change in match status ("-" for a match that
1111 becomes a non-match, or "+" for a non-match that becomes a match),
1111 becomes a non-match, or "+" for a non-match that becomes a match),
1112 use the --all flag.
1112 use the --all flag.
1113 """
1113 """
1114 reflags = 0
1114 reflags = 0
1115 if opts['ignore_case']:
1115 if opts['ignore_case']:
1116 reflags |= re.I
1116 reflags |= re.I
1117 try:
1117 try:
1118 regexp = re.compile(pattern, reflags)
1118 regexp = re.compile(pattern, reflags)
1119 except Exception, inst:
1119 except Exception, inst:
1120 ui.warn(_("grep: invalid match pattern: %s!\n") % inst)
1120 ui.warn(_("grep: invalid match pattern: %s!\n") % inst)
1121 return None
1121 return None
1122 sep, eol = ':', '\n'
1122 sep, eol = ':', '\n'
1123 if opts['print0']:
1123 if opts['print0']:
1124 sep = eol = '\0'
1124 sep = eol = '\0'
1125
1125
1126 fcache = {}
1126 fcache = {}
1127 def getfile(fn):
1127 def getfile(fn):
1128 if fn not in fcache:
1128 if fn not in fcache:
1129 fcache[fn] = repo.file(fn)
1129 fcache[fn] = repo.file(fn)
1130 return fcache[fn]
1130 return fcache[fn]
1131
1131
1132 def matchlines(body):
1132 def matchlines(body):
1133 begin = 0
1133 begin = 0
1134 linenum = 0
1134 linenum = 0
1135 while True:
1135 while True:
1136 match = regexp.search(body, begin)
1136 match = regexp.search(body, begin)
1137 if not match:
1137 if not match:
1138 break
1138 break
1139 mstart, mend = match.span()
1139 mstart, mend = match.span()
1140 linenum += body.count('\n', begin, mstart) + 1
1140 linenum += body.count('\n', begin, mstart) + 1
1141 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1141 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1142 lend = body.find('\n', mend)
1142 lend = body.find('\n', mend)
1143 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1143 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1144 begin = lend + 1
1144 begin = lend + 1
1145
1145
1146 class linestate(object):
1146 class linestate(object):
1147 def __init__(self, line, linenum, colstart, colend):
1147 def __init__(self, line, linenum, colstart, colend):
1148 self.line = line
1148 self.line = line
1149 self.linenum = linenum
1149 self.linenum = linenum
1150 self.colstart = colstart
1150 self.colstart = colstart
1151 self.colend = colend
1151 self.colend = colend
1152
1152
1153 def __eq__(self, other):
1153 def __eq__(self, other):
1154 return self.line == other.line
1154 return self.line == other.line
1155
1155
1156 matches = {}
1156 matches = {}
1157 copies = {}
1157 copies = {}
1158 def grepbody(fn, rev, body):
1158 def grepbody(fn, rev, body):
1159 matches[rev].setdefault(fn, [])
1159 matches[rev].setdefault(fn, [])
1160 m = matches[rev][fn]
1160 m = matches[rev][fn]
1161 for lnum, cstart, cend, line in matchlines(body):
1161 for lnum, cstart, cend, line in matchlines(body):
1162 s = linestate(line, lnum, cstart, cend)
1162 s = linestate(line, lnum, cstart, cend)
1163 m.append(s)
1163 m.append(s)
1164
1164
1165 def difflinestates(a, b):
1165 def difflinestates(a, b):
1166 sm = difflib.SequenceMatcher(None, a, b)
1166 sm = difflib.SequenceMatcher(None, a, b)
1167 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1167 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1168 if tag == 'insert':
1168 if tag == 'insert':
1169 for i in xrange(blo, bhi):
1169 for i in xrange(blo, bhi):
1170 yield ('+', b[i])
1170 yield ('+', b[i])
1171 elif tag == 'delete':
1171 elif tag == 'delete':
1172 for i in xrange(alo, ahi):
1172 for i in xrange(alo, ahi):
1173 yield ('-', a[i])
1173 yield ('-', a[i])
1174 elif tag == 'replace':
1174 elif tag == 'replace':
1175 for i in xrange(alo, ahi):
1175 for i in xrange(alo, ahi):
1176 yield ('-', a[i])
1176 yield ('-', a[i])
1177 for i in xrange(blo, bhi):
1177 for i in xrange(blo, bhi):
1178 yield ('+', b[i])
1178 yield ('+', b[i])
1179
1179
1180 prev = {}
1180 prev = {}
1181 def display(fn, rev, states, prevstates):
1181 def display(fn, rev, states, prevstates):
1182 found = False
1182 found = False
1183 filerevmatches = {}
1183 filerevmatches = {}
1184 r = prev.get(fn, -1)
1184 r = prev.get(fn, -1)
1185 if opts['all']:
1185 if opts['all']:
1186 iter = difflinestates(states, prevstates)
1186 iter = difflinestates(states, prevstates)
1187 else:
1187 else:
1188 iter = [('', l) for l in prevstates]
1188 iter = [('', l) for l in prevstates]
1189 for change, l in iter:
1189 for change, l in iter:
1190 cols = [fn, str(r)]
1190 cols = [fn, str(r)]
1191 if opts['line_number']:
1191 if opts['line_number']:
1192 cols.append(str(l.linenum))
1192 cols.append(str(l.linenum))
1193 if opts['all']:
1193 if opts['all']:
1194 cols.append(change)
1194 cols.append(change)
1195 if opts['user']:
1195 if opts['user']:
1196 cols.append(ui.shortuser(get(r)[1]))
1196 cols.append(ui.shortuser(get(r)[1]))
1197 if opts['files_with_matches']:
1197 if opts['files_with_matches']:
1198 c = (fn, r)
1198 c = (fn, r)
1199 if c in filerevmatches:
1199 if c in filerevmatches:
1200 continue
1200 continue
1201 filerevmatches[c] = 1
1201 filerevmatches[c] = 1
1202 else:
1202 else:
1203 cols.append(l.line)
1203 cols.append(l.line)
1204 ui.write(sep.join(cols), eol)
1204 ui.write(sep.join(cols), eol)
1205 found = True
1205 found = True
1206 return found
1206 return found
1207
1207
1208 fstate = {}
1208 fstate = {}
1209 skip = {}
1209 skip = {}
1210 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1210 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1211 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1211 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1212 found = False
1212 found = False
1213 follow = opts.get('follow')
1213 follow = opts.get('follow')
1214 for st, rev, fns in changeiter:
1214 for st, rev, fns in changeiter:
1215 if st == 'window':
1215 if st == 'window':
1216 matches.clear()
1216 matches.clear()
1217 elif st == 'add':
1217 elif st == 'add':
1218 mf = repo.changectx(rev).manifest()
1218 mf = repo.changectx(rev).manifest()
1219 matches[rev] = {}
1219 matches[rev] = {}
1220 for fn in fns:
1220 for fn in fns:
1221 if fn in skip:
1221 if fn in skip:
1222 continue
1222 continue
1223 fstate.setdefault(fn, {})
1223 fstate.setdefault(fn, {})
1224 try:
1224 try:
1225 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1225 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1226 if follow:
1226 if follow:
1227 copied = getfile(fn).renamed(mf[fn])
1227 copied = getfile(fn).renamed(mf[fn])
1228 if copied:
1228 if copied:
1229 copies.setdefault(rev, {})[fn] = copied[0]
1229 copies.setdefault(rev, {})[fn] = copied[0]
1230 except KeyError:
1230 except KeyError:
1231 pass
1231 pass
1232 elif st == 'iter':
1232 elif st == 'iter':
1233 states = matches[rev].items()
1233 states = matches[rev].items()
1234 states.sort()
1234 states.sort()
1235 for fn, m in states:
1235 for fn, m in states:
1236 copy = copies.get(rev, {}).get(fn)
1236 copy = copies.get(rev, {}).get(fn)
1237 if fn in skip:
1237 if fn in skip:
1238 if copy:
1238 if copy:
1239 skip[copy] = True
1239 skip[copy] = True
1240 continue
1240 continue
1241 if fn in prev or fstate[fn]:
1241 if fn in prev or fstate[fn]:
1242 r = display(fn, rev, m, fstate[fn])
1242 r = display(fn, rev, m, fstate[fn])
1243 found = found or r
1243 found = found or r
1244 if r and not opts['all']:
1244 if r and not opts['all']:
1245 skip[fn] = True
1245 skip[fn] = True
1246 if copy:
1246 if copy:
1247 skip[copy] = True
1247 skip[copy] = True
1248 fstate[fn] = m
1248 fstate[fn] = m
1249 if copy:
1249 if copy:
1250 fstate[copy] = m
1250 fstate[copy] = m
1251 prev[fn] = rev
1251 prev[fn] = rev
1252
1252
1253 fstate = fstate.items()
1253 fstate = fstate.items()
1254 fstate.sort()
1254 fstate.sort()
1255 for fn, state in fstate:
1255 for fn, state in fstate:
1256 if fn in skip:
1256 if fn in skip:
1257 continue
1257 continue
1258 if fn not in copies.get(prev[fn], {}):
1258 if fn not in copies.get(prev[fn], {}):
1259 found = display(fn, rev, {}, state) or found
1259 found = display(fn, rev, {}, state) or found
1260 return (not found and 1) or 0
1260 return (not found and 1) or 0
1261
1261
1262 def heads(ui, repo, *branchrevs, **opts):
1262 def heads(ui, repo, *branchrevs, **opts):
1263 """show current repository heads or show branch heads
1263 """show current repository heads or show branch heads
1264
1264
1265 With no arguments, show all repository head changesets.
1265 With no arguments, show all repository head changesets.
1266
1266
1267 If branch or revisions names are given this will show the heads of
1267 If branch or revisions names are given this will show the heads of
1268 the specified branches or the branches those revisions are tagged
1268 the specified branches or the branches those revisions are tagged
1269 with.
1269 with.
1270
1270
1271 Repository "heads" are changesets that don't have child
1271 Repository "heads" are changesets that don't have child
1272 changesets. They are where development generally takes place and
1272 changesets. They are where development generally takes place and
1273 are the usual targets for update and merge operations.
1273 are the usual targets for update and merge operations.
1274
1274
1275 Branch heads are changesets that have a given branch tag, but have
1275 Branch heads are changesets that have a given branch tag, but have
1276 no child changesets with that tag. They are usually where
1276 no child changesets with that tag. They are usually where
1277 development on the given branch takes place.
1277 development on the given branch takes place.
1278 """
1278 """
1279 if opts['rev']:
1279 if opts['rev']:
1280 start = repo.lookup(opts['rev'])
1280 start = repo.lookup(opts['rev'])
1281 else:
1281 else:
1282 start = None
1282 start = None
1283 if not branchrevs:
1283 if not branchrevs:
1284 # Assume we're looking repo-wide heads if no revs were specified.
1284 # Assume we're looking repo-wide heads if no revs were specified.
1285 heads = repo.heads(start)
1285 heads = repo.heads(start)
1286 else:
1286 else:
1287 heads = []
1287 heads = []
1288 visitedset = util.set()
1288 visitedset = util.set()
1289 for branchrev in branchrevs:
1289 for branchrev in branchrevs:
1290 branch = repo.changectx(branchrev).branch()
1290 branch = repo.changectx(branchrev).branch()
1291 if branch in visitedset:
1291 if branch in visitedset:
1292 continue
1292 continue
1293 visitedset.add(branch)
1293 visitedset.add(branch)
1294 bheads = repo.branchheads(branch, start)
1294 bheads = repo.branchheads(branch, start)
1295 if not bheads:
1295 if not bheads:
1296 if branch != branchrev:
1296 if branch != branchrev:
1297 ui.warn(_("no changes on branch %s containing %s are "
1297 ui.warn(_("no changes on branch %s containing %s are "
1298 "reachable from %s\n")
1298 "reachable from %s\n")
1299 % (branch, branchrev, opts['rev']))
1299 % (branch, branchrev, opts['rev']))
1300 else:
1300 else:
1301 ui.warn(_("no changes on branch %s are reachable from %s\n")
1301 ui.warn(_("no changes on branch %s are reachable from %s\n")
1302 % (branch, opts['rev']))
1302 % (branch, opts['rev']))
1303 heads.extend(bheads)
1303 heads.extend(bheads)
1304 if not heads:
1304 if not heads:
1305 return 1
1305 return 1
1306 displayer = cmdutil.show_changeset(ui, repo, opts)
1306 displayer = cmdutil.show_changeset(ui, repo, opts)
1307 for n in heads:
1307 for n in heads:
1308 displayer.show(changenode=n)
1308 displayer.show(changenode=n)
1309
1309
1310 def help_(ui, name=None, with_version=False):
1310 def help_(ui, name=None, with_version=False):
1311 """show help for a command, extension, or list of commands
1311 """show help for a command, extension, or list of commands
1312
1312
1313 With no arguments, print a list of commands and short help.
1313 With no arguments, print a list of commands and short help.
1314
1314
1315 Given a command name, print help for that command.
1315 Given a command name, print help for that command.
1316
1316
1317 Given an extension name, print help for that extension, and the
1317 Given an extension name, print help for that extension, and the
1318 commands it provides."""
1318 commands it provides."""
1319 option_lists = []
1319 option_lists = []
1320
1320
1321 def addglobalopts(aliases):
1321 def addglobalopts(aliases):
1322 if ui.verbose:
1322 if ui.verbose:
1323 option_lists.append((_("global options:"), globalopts))
1323 option_lists.append((_("global options:"), globalopts))
1324 if name == 'shortlist':
1324 if name == 'shortlist':
1325 option_lists.append((_('use "hg help" for the full list '
1325 option_lists.append((_('use "hg help" for the full list '
1326 'of commands'), ()))
1326 'of commands'), ()))
1327 else:
1327 else:
1328 if name == 'shortlist':
1328 if name == 'shortlist':
1329 msg = _('use "hg help" for the full list of commands '
1329 msg = _('use "hg help" for the full list of commands '
1330 'or "hg -v" for details')
1330 'or "hg -v" for details')
1331 elif aliases:
1331 elif aliases:
1332 msg = _('use "hg -v help%s" to show aliases and '
1332 msg = _('use "hg -v help%s" to show aliases and '
1333 'global options') % (name and " " + name or "")
1333 'global options') % (name and " " + name or "")
1334 else:
1334 else:
1335 msg = _('use "hg -v help %s" to show global options') % name
1335 msg = _('use "hg -v help %s" to show global options') % name
1336 option_lists.append((msg, ()))
1336 option_lists.append((msg, ()))
1337
1337
1338 def helpcmd(name):
1338 def helpcmd(name):
1339 if with_version:
1339 if with_version:
1340 version_(ui)
1340 version_(ui)
1341 ui.write('\n')
1341 ui.write('\n')
1342 aliases, i = cmdutil.findcmd(ui, name)
1342 aliases, i = cmdutil.findcmd(ui, name)
1343 # synopsis
1343 # synopsis
1344 ui.write("%s\n\n" % i[2])
1344 ui.write("%s\n\n" % i[2])
1345
1345
1346 # description
1346 # description
1347 doc = i[0].__doc__
1347 doc = i[0].__doc__
1348 if not doc:
1348 if not doc:
1349 doc = _("(No help text available)")
1349 doc = _("(No help text available)")
1350 if ui.quiet:
1350 if ui.quiet:
1351 doc = doc.splitlines(0)[0]
1351 doc = doc.splitlines(0)[0]
1352 ui.write("%s\n" % doc.rstrip())
1352 ui.write("%s\n" % doc.rstrip())
1353
1353
1354 if not ui.quiet:
1354 if not ui.quiet:
1355 # aliases
1355 # aliases
1356 if len(aliases) > 1:
1356 if len(aliases) > 1:
1357 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1357 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1358
1358
1359 # options
1359 # options
1360 if i[1]:
1360 if i[1]:
1361 option_lists.append((_("options:\n"), i[1]))
1361 option_lists.append((_("options:\n"), i[1]))
1362
1362
1363 addglobalopts(False)
1363 addglobalopts(False)
1364
1364
1365 def helplist(select=None):
1365 def helplist(header, select=None):
1366 h = {}
1366 h = {}
1367 cmds = {}
1367 cmds = {}
1368 for c, e in table.items():
1368 for c, e in table.items():
1369 f = c.split("|", 1)[0]
1369 f = c.split("|", 1)[0]
1370 if select and not select(f):
1370 if select and not select(f):
1371 continue
1371 continue
1372 if name == "shortlist" and not f.startswith("^"):
1372 if name == "shortlist" and not f.startswith("^"):
1373 continue
1373 continue
1374 f = f.lstrip("^")
1374 f = f.lstrip("^")
1375 if not ui.debugflag and f.startswith("debug"):
1375 if not ui.debugflag and f.startswith("debug"):
1376 continue
1376 continue
1377 doc = e[0].__doc__
1377 doc = e[0].__doc__
1378 if not doc:
1378 if not doc:
1379 doc = _("(No help text available)")
1379 doc = _("(No help text available)")
1380 h[f] = doc.splitlines(0)[0].rstrip()
1380 h[f] = doc.splitlines(0)[0].rstrip()
1381 cmds[f] = c.lstrip("^")
1381 cmds[f] = c.lstrip("^")
1382
1382
1383 if not h:
1384 ui.status(_('no commands defined\n'))
1385 return
1386
1387 ui.status(header)
1383 fns = h.keys()
1388 fns = h.keys()
1384 fns.sort()
1389 fns.sort()
1385 m = max(map(len, fns))
1390 m = max(map(len, fns))
1386 for f in fns:
1391 for f in fns:
1387 if ui.verbose:
1392 if ui.verbose:
1388 commands = cmds[f].replace("|",", ")
1393 commands = cmds[f].replace("|",", ")
1389 ui.write(" %s:\n %s\n"%(commands, h[f]))
1394 ui.write(" %s:\n %s\n"%(commands, h[f]))
1390 else:
1395 else:
1391 ui.write(' %-*s %s\n' % (m, f, h[f]))
1396 ui.write(' %-*s %s\n' % (m, f, h[f]))
1392
1397
1393 if not ui.quiet:
1398 if not ui.quiet:
1394 addglobalopts(True)
1399 addglobalopts(True)
1395
1400
1396 def helptopic(name):
1401 def helptopic(name):
1397 v = None
1402 v = None
1398 for i in help.helptable:
1403 for i in help.helptable:
1399 l = i.split('|')
1404 l = i.split('|')
1400 if name in l:
1405 if name in l:
1401 v = i
1406 v = i
1402 header = l[-1]
1407 header = l[-1]
1403 if not v:
1408 if not v:
1404 raise cmdutil.UnknownCommand(name)
1409 raise cmdutil.UnknownCommand(name)
1405
1410
1406 # description
1411 # description
1407 doc = help.helptable[v]
1412 doc = help.helptable[v]
1408 if not doc:
1413 if not doc:
1409 doc = _("(No help text available)")
1414 doc = _("(No help text available)")
1410 if callable(doc):
1415 if callable(doc):
1411 doc = doc()
1416 doc = doc()
1412
1417
1413 ui.write("%s\n" % header)
1418 ui.write("%s\n" % header)
1414 ui.write("%s\n" % doc.rstrip())
1419 ui.write("%s\n" % doc.rstrip())
1415
1420
1416 def helpext(name):
1421 def helpext(name):
1417 try:
1422 try:
1418 mod = extensions.find(name)
1423 mod = extensions.find(name)
1419 except KeyError:
1424 except KeyError:
1420 raise cmdutil.UnknownCommand(name)
1425 raise cmdutil.UnknownCommand(name)
1421
1426
1422 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1427 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1423 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1428 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1424 for d in doc[1:]:
1429 for d in doc[1:]:
1425 ui.write(d, '\n')
1430 ui.write(d, '\n')
1426
1431
1427 ui.status('\n')
1432 ui.status('\n')
1428
1433
1429 try:
1434 try:
1430 ct = mod.cmdtable
1435 ct = mod.cmdtable
1431 except AttributeError:
1436 except AttributeError:
1432 ct = None
1437 ct = {}
1433 if not ct:
1438
1434 ui.status(_('no commands defined\n'))
1435 return
1436
1437 ui.status(_('list of commands:\n\n'))
1438 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1439 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1439 helplist(modcmds.has_key)
1440 helplist(_('list of commands:\n\n'), modcmds.has_key)
1440
1441
1441 if name and name != 'shortlist':
1442 if name and name != 'shortlist':
1442 i = None
1443 i = None
1443 for f in (helpcmd, helptopic, helpext):
1444 for f in (helpcmd, helptopic, helpext):
1444 try:
1445 try:
1445 f(name)
1446 f(name)
1446 i = None
1447 i = None
1447 break
1448 break
1448 except cmdutil.UnknownCommand, inst:
1449 except cmdutil.UnknownCommand, inst:
1449 i = inst
1450 i = inst
1450 if i:
1451 if i:
1451 raise i
1452 raise i
1452
1453
1453 else:
1454 else:
1454 # program name
1455 # program name
1455 if ui.verbose or with_version:
1456 if ui.verbose or with_version:
1456 version_(ui)
1457 version_(ui)
1457 else:
1458 else:
1458 ui.status(_("Mercurial Distributed SCM\n"))
1459 ui.status(_("Mercurial Distributed SCM\n"))
1459 ui.status('\n')
1460 ui.status('\n')
1460
1461
1461 # list of commands
1462 # list of commands
1462 if name == "shortlist":
1463 if name == "shortlist":
1463 ui.status(_('basic commands:\n\n'))
1464 header = _('basic commands:\n\n')
1464 else:
1465 else:
1465 ui.status(_('list of commands:\n\n'))
1466 header = _('list of commands:\n\n')
1466
1467
1467 helplist()
1468 helplist(header)
1468
1469
1469 # list all option lists
1470 # list all option lists
1470 opt_output = []
1471 opt_output = []
1471 for title, options in option_lists:
1472 for title, options in option_lists:
1472 opt_output.append(("\n%s" % title, None))
1473 opt_output.append(("\n%s" % title, None))
1473 for shortopt, longopt, default, desc in options:
1474 for shortopt, longopt, default, desc in options:
1474 if "DEPRECATED" in desc and not ui.verbose: continue
1475 if "DEPRECATED" in desc and not ui.verbose: continue
1475 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1476 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1476 longopt and " --%s" % longopt),
1477 longopt and " --%s" % longopt),
1477 "%s%s" % (desc,
1478 "%s%s" % (desc,
1478 default
1479 default
1479 and _(" (default: %s)") % default
1480 and _(" (default: %s)") % default
1480 or "")))
1481 or "")))
1481
1482
1482 if opt_output:
1483 if opt_output:
1483 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1484 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1484 for first, second in opt_output:
1485 for first, second in opt_output:
1485 if second:
1486 if second:
1486 ui.write(" %-*s %s\n" % (opts_len, first, second))
1487 ui.write(" %-*s %s\n" % (opts_len, first, second))
1487 else:
1488 else:
1488 ui.write("%s\n" % first)
1489 ui.write("%s\n" % first)
1489
1490
1490 def identify(ui, repo, source=None,
1491 def identify(ui, repo, source=None,
1491 rev=None, num=None, id=None, branch=None, tags=None):
1492 rev=None, num=None, id=None, branch=None, tags=None):
1492 """identify the working copy or specified revision
1493 """identify the working copy or specified revision
1493
1494
1494 With no revision, print a summary of the current state of the repo.
1495 With no revision, print a summary of the current state of the repo.
1495
1496
1496 With a path, do a lookup in another repository.
1497 With a path, do a lookup in another repository.
1497
1498
1498 This summary identifies the repository state using one or two parent
1499 This summary identifies the repository state using one or two parent
1499 hash identifiers, followed by a "+" if there are uncommitted changes
1500 hash identifiers, followed by a "+" if there are uncommitted changes
1500 in the working directory, a list of tags for this revision and a branch
1501 in the working directory, a list of tags for this revision and a branch
1501 name for non-default branches.
1502 name for non-default branches.
1502 """
1503 """
1503
1504
1504 hexfunc = ui.debugflag and hex or short
1505 hexfunc = ui.debugflag and hex or short
1505 default = not (num or id or branch or tags)
1506 default = not (num or id or branch or tags)
1506 output = []
1507 output = []
1507
1508
1508 if source:
1509 if source:
1509 source, revs = cmdutil.parseurl(ui.expandpath(source), [])
1510 source, revs = cmdutil.parseurl(ui.expandpath(source), [])
1510 srepo = hg.repository(ui, source)
1511 srepo = hg.repository(ui, source)
1511 if not rev and revs:
1512 if not rev and revs:
1512 rev = revs[0]
1513 rev = revs[0]
1513 if not rev:
1514 if not rev:
1514 rev = "tip"
1515 rev = "tip"
1515 if num or branch or tags:
1516 if num or branch or tags:
1516 raise util.Abort(
1517 raise util.Abort(
1517 "can't query remote revision number, branch, or tags")
1518 "can't query remote revision number, branch, or tags")
1518 output = [hexfunc(srepo.lookup(rev))]
1519 output = [hexfunc(srepo.lookup(rev))]
1519 elif not rev:
1520 elif not rev:
1520 ctx = repo.workingctx()
1521 ctx = repo.workingctx()
1521 parents = ctx.parents()
1522 parents = ctx.parents()
1522 changed = False
1523 changed = False
1523 if default or id or num:
1524 if default or id or num:
1524 changed = ctx.files() + ctx.deleted()
1525 changed = ctx.files() + ctx.deleted()
1525 if default or id:
1526 if default or id:
1526 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1527 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1527 (changed) and "+" or "")]
1528 (changed) and "+" or "")]
1528 if num:
1529 if num:
1529 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1530 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1530 (changed) and "+" or ""))
1531 (changed) and "+" or ""))
1531 else:
1532 else:
1532 ctx = repo.changectx(rev)
1533 ctx = repo.changectx(rev)
1533 if default or id:
1534 if default or id:
1534 output = [hexfunc(ctx.node())]
1535 output = [hexfunc(ctx.node())]
1535 if num:
1536 if num:
1536 output.append(str(ctx.rev()))
1537 output.append(str(ctx.rev()))
1537
1538
1538 if not source and default and not ui.quiet:
1539 if not source and default and not ui.quiet:
1539 b = util.tolocal(ctx.branch())
1540 b = util.tolocal(ctx.branch())
1540 if b != 'default':
1541 if b != 'default':
1541 output.append("(%s)" % b)
1542 output.append("(%s)" % b)
1542
1543
1543 # multiple tags for a single parent separated by '/'
1544 # multiple tags for a single parent separated by '/'
1544 t = "/".join(ctx.tags())
1545 t = "/".join(ctx.tags())
1545 if t:
1546 if t:
1546 output.append(t)
1547 output.append(t)
1547
1548
1548 if branch:
1549 if branch:
1549 output.append(util.tolocal(ctx.branch()))
1550 output.append(util.tolocal(ctx.branch()))
1550
1551
1551 if tags:
1552 if tags:
1552 output.extend(ctx.tags())
1553 output.extend(ctx.tags())
1553
1554
1554 ui.write("%s\n" % ' '.join(output))
1555 ui.write("%s\n" % ' '.join(output))
1555
1556
1556 def import_(ui, repo, patch1, *patches, **opts):
1557 def import_(ui, repo, patch1, *patches, **opts):
1557 """import an ordered set of patches
1558 """import an ordered set of patches
1558
1559
1559 Import a list of patches and commit them individually.
1560 Import a list of patches and commit them individually.
1560
1561
1561 If there are outstanding changes in the working directory, import
1562 If there are outstanding changes in the working directory, import
1562 will abort unless given the -f flag.
1563 will abort unless given the -f flag.
1563
1564
1564 You can import a patch straight from a mail message. Even patches
1565 You can import a patch straight from a mail message. Even patches
1565 as attachments work (body part must be type text/plain or
1566 as attachments work (body part must be type text/plain or
1566 text/x-patch to be used). From and Subject headers of email
1567 text/x-patch to be used). From and Subject headers of email
1567 message are used as default committer and commit message. All
1568 message are used as default committer and commit message. All
1568 text/plain body parts before first diff are added to commit
1569 text/plain body parts before first diff are added to commit
1569 message.
1570 message.
1570
1571
1571 If the imported patch was generated by hg export, user and description
1572 If the imported patch was generated by hg export, user and description
1572 from patch override values from message headers and body. Values
1573 from patch override values from message headers and body. Values
1573 given on command line with -m and -u override these.
1574 given on command line with -m and -u override these.
1574
1575
1575 If --exact is specified, import will set the working directory
1576 If --exact is specified, import will set the working directory
1576 to the parent of each patch before applying it, and will abort
1577 to the parent of each patch before applying it, and will abort
1577 if the resulting changeset has a different ID than the one
1578 if the resulting changeset has a different ID than the one
1578 recorded in the patch. This may happen due to character set
1579 recorded in the patch. This may happen due to character set
1579 problems or other deficiencies in the text patch format.
1580 problems or other deficiencies in the text patch format.
1580
1581
1581 To read a patch from standard input, use patch name "-".
1582 To read a patch from standard input, use patch name "-".
1582 """
1583 """
1583 patches = (patch1,) + patches
1584 patches = (patch1,) + patches
1584
1585
1585 if opts.get('exact') or not opts['force']:
1586 if opts.get('exact') or not opts['force']:
1586 cmdutil.bail_if_changed(repo)
1587 cmdutil.bail_if_changed(repo)
1587
1588
1588 d = opts["base"]
1589 d = opts["base"]
1589 strip = opts["strip"]
1590 strip = opts["strip"]
1590 wlock = lock = None
1591 wlock = lock = None
1591 try:
1592 try:
1592 wlock = repo.wlock()
1593 wlock = repo.wlock()
1593 lock = repo.lock()
1594 lock = repo.lock()
1594 for p in patches:
1595 for p in patches:
1595 pf = os.path.join(d, p)
1596 pf = os.path.join(d, p)
1596
1597
1597 if pf == '-':
1598 if pf == '-':
1598 ui.status(_("applying patch from stdin\n"))
1599 ui.status(_("applying patch from stdin\n"))
1599 data = patch.extract(ui, sys.stdin)
1600 data = patch.extract(ui, sys.stdin)
1600 else:
1601 else:
1601 ui.status(_("applying %s\n") % p)
1602 ui.status(_("applying %s\n") % p)
1602 data = patch.extract(ui, file(pf, 'rb'))
1603 data = patch.extract(ui, file(pf, 'rb'))
1603
1604
1604 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1605 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1605
1606
1606 if tmpname is None:
1607 if tmpname is None:
1607 raise util.Abort(_('no diffs found'))
1608 raise util.Abort(_('no diffs found'))
1608
1609
1609 try:
1610 try:
1610 cmdline_message = cmdutil.logmessage(opts)
1611 cmdline_message = cmdutil.logmessage(opts)
1611 if cmdline_message:
1612 if cmdline_message:
1612 # pickup the cmdline msg
1613 # pickup the cmdline msg
1613 message = cmdline_message
1614 message = cmdline_message
1614 elif message:
1615 elif message:
1615 # pickup the patch msg
1616 # pickup the patch msg
1616 message = message.strip()
1617 message = message.strip()
1617 else:
1618 else:
1618 # launch the editor
1619 # launch the editor
1619 message = None
1620 message = None
1620 ui.debug(_('message:\n%s\n') % message)
1621 ui.debug(_('message:\n%s\n') % message)
1621
1622
1622 wp = repo.workingctx().parents()
1623 wp = repo.workingctx().parents()
1623 if opts.get('exact'):
1624 if opts.get('exact'):
1624 if not nodeid or not p1:
1625 if not nodeid or not p1:
1625 raise util.Abort(_('not a mercurial patch'))
1626 raise util.Abort(_('not a mercurial patch'))
1626 p1 = repo.lookup(p1)
1627 p1 = repo.lookup(p1)
1627 p2 = repo.lookup(p2 or hex(nullid))
1628 p2 = repo.lookup(p2 or hex(nullid))
1628
1629
1629 if p1 != wp[0].node():
1630 if p1 != wp[0].node():
1630 hg.clean(repo, p1)
1631 hg.clean(repo, p1)
1631 repo.dirstate.setparents(p1, p2)
1632 repo.dirstate.setparents(p1, p2)
1632 elif p2:
1633 elif p2:
1633 try:
1634 try:
1634 p1 = repo.lookup(p1)
1635 p1 = repo.lookup(p1)
1635 p2 = repo.lookup(p2)
1636 p2 = repo.lookup(p2)
1636 if p1 == wp[0].node():
1637 if p1 == wp[0].node():
1637 repo.dirstate.setparents(p1, p2)
1638 repo.dirstate.setparents(p1, p2)
1638 except hg.RepoError:
1639 except hg.RepoError:
1639 pass
1640 pass
1640 if opts.get('exact') or opts.get('import_branch'):
1641 if opts.get('exact') or opts.get('import_branch'):
1641 repo.dirstate.setbranch(branch or 'default')
1642 repo.dirstate.setbranch(branch or 'default')
1642
1643
1643 files = {}
1644 files = {}
1644 try:
1645 try:
1645 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1646 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1646 files=files)
1647 files=files)
1647 finally:
1648 finally:
1648 files = patch.updatedir(ui, repo, files)
1649 files = patch.updatedir(ui, repo, files)
1649 n = repo.commit(files, message, user, date)
1650 n = repo.commit(files, message, user, date)
1650 if opts.get('exact'):
1651 if opts.get('exact'):
1651 if hex(n) != nodeid:
1652 if hex(n) != nodeid:
1652 repo.rollback()
1653 repo.rollback()
1653 raise util.Abort(_('patch is damaged' +
1654 raise util.Abort(_('patch is damaged' +
1654 ' or loses information'))
1655 ' or loses information'))
1655 finally:
1656 finally:
1656 os.unlink(tmpname)
1657 os.unlink(tmpname)
1657 finally:
1658 finally:
1658 del wlock, lock
1659 del wlock, lock
1659
1660
1660 def incoming(ui, repo, source="default", **opts):
1661 def incoming(ui, repo, source="default", **opts):
1661 """show new changesets found in source
1662 """show new changesets found in source
1662
1663
1663 Show new changesets found in the specified path/URL or the default
1664 Show new changesets found in the specified path/URL or the default
1664 pull location. These are the changesets that would be pulled if a pull
1665 pull location. These are the changesets that would be pulled if a pull
1665 was requested.
1666 was requested.
1666
1667
1667 For remote repository, using --bundle avoids downloading the changesets
1668 For remote repository, using --bundle avoids downloading the changesets
1668 twice if the incoming is followed by a pull.
1669 twice if the incoming is followed by a pull.
1669
1670
1670 See pull for valid source format details.
1671 See pull for valid source format details.
1671 """
1672 """
1672 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
1673 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
1673 cmdutil.setremoteconfig(ui, opts)
1674 cmdutil.setremoteconfig(ui, opts)
1674
1675
1675 other = hg.repository(ui, source)
1676 other = hg.repository(ui, source)
1676 ui.status(_('comparing with %s\n') % source)
1677 ui.status(_('comparing with %s\n') % source)
1677 if revs:
1678 if revs:
1678 if 'lookup' in other.capabilities:
1679 if 'lookup' in other.capabilities:
1679 revs = [other.lookup(rev) for rev in revs]
1680 revs = [other.lookup(rev) for rev in revs]
1680 else:
1681 else:
1681 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1682 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1682 raise util.Abort(error)
1683 raise util.Abort(error)
1683 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1684 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1684 if not incoming:
1685 if not incoming:
1685 try:
1686 try:
1686 os.unlink(opts["bundle"])
1687 os.unlink(opts["bundle"])
1687 except:
1688 except:
1688 pass
1689 pass
1689 ui.status(_("no changes found\n"))
1690 ui.status(_("no changes found\n"))
1690 return 1
1691 return 1
1691
1692
1692 cleanup = None
1693 cleanup = None
1693 try:
1694 try:
1694 fname = opts["bundle"]
1695 fname = opts["bundle"]
1695 if fname or not other.local():
1696 if fname or not other.local():
1696 # create a bundle (uncompressed if other repo is not local)
1697 # create a bundle (uncompressed if other repo is not local)
1697 if revs is None:
1698 if revs is None:
1698 cg = other.changegroup(incoming, "incoming")
1699 cg = other.changegroup(incoming, "incoming")
1699 else:
1700 else:
1700 if 'changegroupsubset' not in other.capabilities:
1701 if 'changegroupsubset' not in other.capabilities:
1701 raise util.Abort(_("Partial incoming cannot be done because other repository doesn't support changegroupsubset."))
1702 raise util.Abort(_("Partial incoming cannot be done because other repository doesn't support changegroupsubset."))
1702 cg = other.changegroupsubset(incoming, revs, 'incoming')
1703 cg = other.changegroupsubset(incoming, revs, 'incoming')
1703 bundletype = other.local() and "HG10BZ" or "HG10UN"
1704 bundletype = other.local() and "HG10BZ" or "HG10UN"
1704 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1705 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1705 # keep written bundle?
1706 # keep written bundle?
1706 if opts["bundle"]:
1707 if opts["bundle"]:
1707 cleanup = None
1708 cleanup = None
1708 if not other.local():
1709 if not other.local():
1709 # use the created uncompressed bundlerepo
1710 # use the created uncompressed bundlerepo
1710 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1711 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1711
1712
1712 o = other.changelog.nodesbetween(incoming, revs)[0]
1713 o = other.changelog.nodesbetween(incoming, revs)[0]
1713 if opts['newest_first']:
1714 if opts['newest_first']:
1714 o.reverse()
1715 o.reverse()
1715 displayer = cmdutil.show_changeset(ui, other, opts)
1716 displayer = cmdutil.show_changeset(ui, other, opts)
1716 for n in o:
1717 for n in o:
1717 parents = [p for p in other.changelog.parents(n) if p != nullid]
1718 parents = [p for p in other.changelog.parents(n) if p != nullid]
1718 if opts['no_merges'] and len(parents) == 2:
1719 if opts['no_merges'] and len(parents) == 2:
1719 continue
1720 continue
1720 displayer.show(changenode=n)
1721 displayer.show(changenode=n)
1721 finally:
1722 finally:
1722 if hasattr(other, 'close'):
1723 if hasattr(other, 'close'):
1723 other.close()
1724 other.close()
1724 if cleanup:
1725 if cleanup:
1725 os.unlink(cleanup)
1726 os.unlink(cleanup)
1726
1727
1727 def init(ui, dest=".", **opts):
1728 def init(ui, dest=".", **opts):
1728 """create a new repository in the given directory
1729 """create a new repository in the given directory
1729
1730
1730 Initialize a new repository in the given directory. If the given
1731 Initialize a new repository in the given directory. If the given
1731 directory does not exist, it is created.
1732 directory does not exist, it is created.
1732
1733
1733 If no directory is given, the current directory is used.
1734 If no directory is given, the current directory is used.
1734
1735
1735 It is possible to specify an ssh:// URL as the destination.
1736 It is possible to specify an ssh:// URL as the destination.
1736 Look at the help text for the pull command for important details
1737 Look at the help text for the pull command for important details
1737 about ssh:// URLs.
1738 about ssh:// URLs.
1738 """
1739 """
1739 cmdutil.setremoteconfig(ui, opts)
1740 cmdutil.setremoteconfig(ui, opts)
1740 hg.repository(ui, dest, create=1)
1741 hg.repository(ui, dest, create=1)
1741
1742
1742 def locate(ui, repo, *pats, **opts):
1743 def locate(ui, repo, *pats, **opts):
1743 """locate files matching specific patterns
1744 """locate files matching specific patterns
1744
1745
1745 Print all files under Mercurial control whose names match the
1746 Print all files under Mercurial control whose names match the
1746 given patterns.
1747 given patterns.
1747
1748
1748 This command searches the entire repository by default. To search
1749 This command searches the entire repository by default. To search
1749 just the current directory and its subdirectories, use
1750 just the current directory and its subdirectories, use
1750 "--include .".
1751 "--include .".
1751
1752
1752 If no patterns are given to match, this command prints all file
1753 If no patterns are given to match, this command prints all file
1753 names.
1754 names.
1754
1755
1755 If you want to feed the output of this command into the "xargs"
1756 If you want to feed the output of this command into the "xargs"
1756 command, use the "-0" option to both this command and "xargs".
1757 command, use the "-0" option to both this command and "xargs".
1757 This will avoid the problem of "xargs" treating single filenames
1758 This will avoid the problem of "xargs" treating single filenames
1758 that contain white space as multiple filenames.
1759 that contain white space as multiple filenames.
1759 """
1760 """
1760 end = opts['print0'] and '\0' or '\n'
1761 end = opts['print0'] and '\0' or '\n'
1761 rev = opts['rev']
1762 rev = opts['rev']
1762 if rev:
1763 if rev:
1763 node = repo.lookup(rev)
1764 node = repo.lookup(rev)
1764 else:
1765 else:
1765 node = None
1766 node = None
1766
1767
1767 ret = 1
1768 ret = 1
1768 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1769 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1769 badmatch=util.always,
1770 badmatch=util.always,
1770 default='relglob'):
1771 default='relglob'):
1771 if src == 'b':
1772 if src == 'b':
1772 continue
1773 continue
1773 if not node and abs not in repo.dirstate:
1774 if not node and abs not in repo.dirstate:
1774 continue
1775 continue
1775 if opts['fullpath']:
1776 if opts['fullpath']:
1776 ui.write(os.path.join(repo.root, abs), end)
1777 ui.write(os.path.join(repo.root, abs), end)
1777 else:
1778 else:
1778 ui.write(((pats and rel) or abs), end)
1779 ui.write(((pats and rel) or abs), end)
1779 ret = 0
1780 ret = 0
1780
1781
1781 return ret
1782 return ret
1782
1783
1783 def log(ui, repo, *pats, **opts):
1784 def log(ui, repo, *pats, **opts):
1784 """show revision history of entire repository or files
1785 """show revision history of entire repository or files
1785
1786
1786 Print the revision history of the specified files or the entire
1787 Print the revision history of the specified files or the entire
1787 project.
1788 project.
1788
1789
1789 File history is shown without following rename or copy history of
1790 File history is shown without following rename or copy history of
1790 files. Use -f/--follow with a file name to follow history across
1791 files. Use -f/--follow with a file name to follow history across
1791 renames and copies. --follow without a file name will only show
1792 renames and copies. --follow without a file name will only show
1792 ancestors or descendants of the starting revision. --follow-first
1793 ancestors or descendants of the starting revision. --follow-first
1793 only follows the first parent of merge revisions.
1794 only follows the first parent of merge revisions.
1794
1795
1795 If no revision range is specified, the default is tip:0 unless
1796 If no revision range is specified, the default is tip:0 unless
1796 --follow is set, in which case the working directory parent is
1797 --follow is set, in which case the working directory parent is
1797 used as the starting revision.
1798 used as the starting revision.
1798
1799
1799 By default this command outputs: changeset id and hash, tags,
1800 By default this command outputs: changeset id and hash, tags,
1800 non-trivial parents, user, date and time, and a summary for each
1801 non-trivial parents, user, date and time, and a summary for each
1801 commit. When the -v/--verbose switch is used, the list of changed
1802 commit. When the -v/--verbose switch is used, the list of changed
1802 files and full commit message is shown.
1803 files and full commit message is shown.
1803
1804
1804 NOTE: log -p may generate unexpected diff output for merge
1805 NOTE: log -p may generate unexpected diff output for merge
1805 changesets, as it will compare the merge changeset against its
1806 changesets, as it will compare the merge changeset against its
1806 first parent only. Also, the files: list will only reflect files
1807 first parent only. Also, the files: list will only reflect files
1807 that are different from BOTH parents.
1808 that are different from BOTH parents.
1808
1809
1809 """
1810 """
1810
1811
1811 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1812 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1812 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1813 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1813
1814
1814 if opts['limit']:
1815 if opts['limit']:
1815 try:
1816 try:
1816 limit = int(opts['limit'])
1817 limit = int(opts['limit'])
1817 except ValueError:
1818 except ValueError:
1818 raise util.Abort(_('limit must be a positive integer'))
1819 raise util.Abort(_('limit must be a positive integer'))
1819 if limit <= 0: raise util.Abort(_('limit must be positive'))
1820 if limit <= 0: raise util.Abort(_('limit must be positive'))
1820 else:
1821 else:
1821 limit = sys.maxint
1822 limit = sys.maxint
1822 count = 0
1823 count = 0
1823
1824
1824 if opts['copies'] and opts['rev']:
1825 if opts['copies'] and opts['rev']:
1825 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1826 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1826 else:
1827 else:
1827 endrev = repo.changelog.count()
1828 endrev = repo.changelog.count()
1828 rcache = {}
1829 rcache = {}
1829 ncache = {}
1830 ncache = {}
1830 dcache = []
1831 dcache = []
1831 def getrenamed(fn, rev, man):
1832 def getrenamed(fn, rev, man):
1832 '''looks up all renames for a file (up to endrev) the first
1833 '''looks up all renames for a file (up to endrev) the first
1833 time the file is given. It indexes on the changerev and only
1834 time the file is given. It indexes on the changerev and only
1834 parses the manifest if linkrev != changerev.
1835 parses the manifest if linkrev != changerev.
1835 Returns rename info for fn at changerev rev.'''
1836 Returns rename info for fn at changerev rev.'''
1836 if fn not in rcache:
1837 if fn not in rcache:
1837 rcache[fn] = {}
1838 rcache[fn] = {}
1838 ncache[fn] = {}
1839 ncache[fn] = {}
1839 fl = repo.file(fn)
1840 fl = repo.file(fn)
1840 for i in xrange(fl.count()):
1841 for i in xrange(fl.count()):
1841 node = fl.node(i)
1842 node = fl.node(i)
1842 lr = fl.linkrev(node)
1843 lr = fl.linkrev(node)
1843 renamed = fl.renamed(node)
1844 renamed = fl.renamed(node)
1844 rcache[fn][lr] = renamed
1845 rcache[fn][lr] = renamed
1845 if renamed:
1846 if renamed:
1846 ncache[fn][node] = renamed
1847 ncache[fn][node] = renamed
1847 if lr >= endrev:
1848 if lr >= endrev:
1848 break
1849 break
1849 if rev in rcache[fn]:
1850 if rev in rcache[fn]:
1850 return rcache[fn][rev]
1851 return rcache[fn][rev]
1851 mr = repo.manifest.rev(man)
1852 mr = repo.manifest.rev(man)
1852 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1853 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1853 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1854 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1854 if not dcache or dcache[0] != man:
1855 if not dcache or dcache[0] != man:
1855 dcache[:] = [man, repo.manifest.readdelta(man)]
1856 dcache[:] = [man, repo.manifest.readdelta(man)]
1856 if fn in dcache[1]:
1857 if fn in dcache[1]:
1857 return ncache[fn].get(dcache[1][fn])
1858 return ncache[fn].get(dcache[1][fn])
1858 return None
1859 return None
1859
1860
1860 df = False
1861 df = False
1861 if opts["date"]:
1862 if opts["date"]:
1862 df = util.matchdate(opts["date"])
1863 df = util.matchdate(opts["date"])
1863
1864
1864 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1865 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1865 for st, rev, fns in changeiter:
1866 for st, rev, fns in changeiter:
1866 if st == 'add':
1867 if st == 'add':
1867 changenode = repo.changelog.node(rev)
1868 changenode = repo.changelog.node(rev)
1868 parents = [p for p in repo.changelog.parentrevs(rev)
1869 parents = [p for p in repo.changelog.parentrevs(rev)
1869 if p != nullrev]
1870 if p != nullrev]
1870 if opts['no_merges'] and len(parents) == 2:
1871 if opts['no_merges'] and len(parents) == 2:
1871 continue
1872 continue
1872 if opts['only_merges'] and len(parents) != 2:
1873 if opts['only_merges'] and len(parents) != 2:
1873 continue
1874 continue
1874
1875
1875 if df:
1876 if df:
1876 changes = get(rev)
1877 changes = get(rev)
1877 if not df(changes[2][0]):
1878 if not df(changes[2][0]):
1878 continue
1879 continue
1879
1880
1880 if opts['keyword']:
1881 if opts['keyword']:
1881 changes = get(rev)
1882 changes = get(rev)
1882 miss = 0
1883 miss = 0
1883 for k in [kw.lower() for kw in opts['keyword']]:
1884 for k in [kw.lower() for kw in opts['keyword']]:
1884 if not (k in changes[1].lower() or
1885 if not (k in changes[1].lower() or
1885 k in changes[4].lower() or
1886 k in changes[4].lower() or
1886 k in " ".join(changes[3]).lower()):
1887 k in " ".join(changes[3]).lower()):
1887 miss = 1
1888 miss = 1
1888 break
1889 break
1889 if miss:
1890 if miss:
1890 continue
1891 continue
1891
1892
1892 copies = []
1893 copies = []
1893 if opts.get('copies') and rev:
1894 if opts.get('copies') and rev:
1894 mf = get(rev)[0]
1895 mf = get(rev)[0]
1895 for fn in get(rev)[3]:
1896 for fn in get(rev)[3]:
1896 rename = getrenamed(fn, rev, mf)
1897 rename = getrenamed(fn, rev, mf)
1897 if rename:
1898 if rename:
1898 copies.append((fn, rename[0]))
1899 copies.append((fn, rename[0]))
1899 displayer.show(rev, changenode, copies=copies)
1900 displayer.show(rev, changenode, copies=copies)
1900 elif st == 'iter':
1901 elif st == 'iter':
1901 if count == limit: break
1902 if count == limit: break
1902 if displayer.flush(rev):
1903 if displayer.flush(rev):
1903 count += 1
1904 count += 1
1904
1905
1905 def manifest(ui, repo, rev=None):
1906 def manifest(ui, repo, rev=None):
1906 """output the current or given revision of the project manifest
1907 """output the current or given revision of the project manifest
1907
1908
1908 Print a list of version controlled files for the given revision.
1909 Print a list of version controlled files for the given revision.
1909 If no revision is given, the parent of the working directory is used,
1910 If no revision is given, the parent of the working directory is used,
1910 or tip if no revision is checked out.
1911 or tip if no revision is checked out.
1911
1912
1912 The manifest is the list of files being version controlled. If no revision
1913 The manifest is the list of files being version controlled. If no revision
1913 is given then the first parent of the working directory is used.
1914 is given then the first parent of the working directory is used.
1914
1915
1915 With -v flag, print file permissions. With --debug flag, print
1916 With -v flag, print file permissions. With --debug flag, print
1916 file revision hashes.
1917 file revision hashes.
1917 """
1918 """
1918
1919
1919 m = repo.changectx(rev).manifest()
1920 m = repo.changectx(rev).manifest()
1920 files = m.keys()
1921 files = m.keys()
1921 files.sort()
1922 files.sort()
1922
1923
1923 for f in files:
1924 for f in files:
1924 if ui.debugflag:
1925 if ui.debugflag:
1925 ui.write("%40s " % hex(m[f]))
1926 ui.write("%40s " % hex(m[f]))
1926 if ui.verbose:
1927 if ui.verbose:
1927 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1928 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1928 ui.write("%s\n" % f)
1929 ui.write("%s\n" % f)
1929
1930
1930 def merge(ui, repo, node=None, force=None, rev=None):
1931 def merge(ui, repo, node=None, force=None, rev=None):
1931 """merge working directory with another revision
1932 """merge working directory with another revision
1932
1933
1933 Merge the contents of the current working directory and the
1934 Merge the contents of the current working directory and the
1934 requested revision. Files that changed between either parent are
1935 requested revision. Files that changed between either parent are
1935 marked as changed for the next commit and a commit must be
1936 marked as changed for the next commit and a commit must be
1936 performed before any further updates are allowed.
1937 performed before any further updates are allowed.
1937
1938
1938 If no revision is specified, the working directory's parent is a
1939 If no revision is specified, the working directory's parent is a
1939 head revision, and the repository contains exactly one other head,
1940 head revision, and the repository contains exactly one other head,
1940 the other head is merged with by default. Otherwise, an explicit
1941 the other head is merged with by default. Otherwise, an explicit
1941 revision to merge with must be provided.
1942 revision to merge with must be provided.
1942 """
1943 """
1943
1944
1944 if rev and node:
1945 if rev and node:
1945 raise util.Abort(_("please specify just one revision"))
1946 raise util.Abort(_("please specify just one revision"))
1946
1947
1947 if not node:
1948 if not node:
1948 node = rev
1949 node = rev
1949
1950
1950 if not node:
1951 if not node:
1951 heads = repo.heads()
1952 heads = repo.heads()
1952 if len(heads) > 2:
1953 if len(heads) > 2:
1953 raise util.Abort(_('repo has %d heads - '
1954 raise util.Abort(_('repo has %d heads - '
1954 'please merge with an explicit rev') %
1955 'please merge with an explicit rev') %
1955 len(heads))
1956 len(heads))
1956 if len(heads) == 1:
1957 if len(heads) == 1:
1957 raise util.Abort(_('there is nothing to merge - '
1958 raise util.Abort(_('there is nothing to merge - '
1958 'use "hg update" instead'))
1959 'use "hg update" instead'))
1959 parent = repo.dirstate.parents()[0]
1960 parent = repo.dirstate.parents()[0]
1960 if parent not in heads:
1961 if parent not in heads:
1961 raise util.Abort(_('working dir not at a head rev - '
1962 raise util.Abort(_('working dir not at a head rev - '
1962 'use "hg update" or merge with an explicit rev'))
1963 'use "hg update" or merge with an explicit rev'))
1963 node = parent == heads[0] and heads[-1] or heads[0]
1964 node = parent == heads[0] and heads[-1] or heads[0]
1964 return hg.merge(repo, node, force=force)
1965 return hg.merge(repo, node, force=force)
1965
1966
1966 def outgoing(ui, repo, dest=None, **opts):
1967 def outgoing(ui, repo, dest=None, **opts):
1967 """show changesets not found in destination
1968 """show changesets not found in destination
1968
1969
1969 Show changesets not found in the specified destination repository or
1970 Show changesets not found in the specified destination repository or
1970 the default push location. These are the changesets that would be pushed
1971 the default push location. These are the changesets that would be pushed
1971 if a push was requested.
1972 if a push was requested.
1972
1973
1973 See pull for valid destination format details.
1974 See pull for valid destination format details.
1974 """
1975 """
1975 dest, revs = cmdutil.parseurl(
1976 dest, revs = cmdutil.parseurl(
1976 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1977 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1977 cmdutil.setremoteconfig(ui, opts)
1978 cmdutil.setremoteconfig(ui, opts)
1978 if revs:
1979 if revs:
1979 revs = [repo.lookup(rev) for rev in revs]
1980 revs = [repo.lookup(rev) for rev in revs]
1980
1981
1981 other = hg.repository(ui, dest)
1982 other = hg.repository(ui, dest)
1982 ui.status(_('comparing with %s\n') % dest)
1983 ui.status(_('comparing with %s\n') % dest)
1983 o = repo.findoutgoing(other, force=opts['force'])
1984 o = repo.findoutgoing(other, force=opts['force'])
1984 if not o:
1985 if not o:
1985 ui.status(_("no changes found\n"))
1986 ui.status(_("no changes found\n"))
1986 return 1
1987 return 1
1987 o = repo.changelog.nodesbetween(o, revs)[0]
1988 o = repo.changelog.nodesbetween(o, revs)[0]
1988 if opts['newest_first']:
1989 if opts['newest_first']:
1989 o.reverse()
1990 o.reverse()
1990 displayer = cmdutil.show_changeset(ui, repo, opts)
1991 displayer = cmdutil.show_changeset(ui, repo, opts)
1991 for n in o:
1992 for n in o:
1992 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1993 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1993 if opts['no_merges'] and len(parents) == 2:
1994 if opts['no_merges'] and len(parents) == 2:
1994 continue
1995 continue
1995 displayer.show(changenode=n)
1996 displayer.show(changenode=n)
1996
1997
1997 def parents(ui, repo, file_=None, **opts):
1998 def parents(ui, repo, file_=None, **opts):
1998 """show the parents of the working dir or revision
1999 """show the parents of the working dir or revision
1999
2000
2000 Print the working directory's parent revisions. If a
2001 Print the working directory's parent revisions. If a
2001 revision is given via --rev, the parent of that revision
2002 revision is given via --rev, the parent of that revision
2002 will be printed. If a file argument is given, revision in
2003 will be printed. If a file argument is given, revision in
2003 which the file was last changed (before the working directory
2004 which the file was last changed (before the working directory
2004 revision or the argument to --rev if given) is printed.
2005 revision or the argument to --rev if given) is printed.
2005 """
2006 """
2006 rev = opts.get('rev')
2007 rev = opts.get('rev')
2007 if file_:
2008 if file_:
2008 files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
2009 files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
2009 if anypats or len(files) != 1:
2010 if anypats or len(files) != 1:
2010 raise util.Abort(_('can only specify an explicit file name'))
2011 raise util.Abort(_('can only specify an explicit file name'))
2011 ctx = repo.filectx(files[0], changeid=rev)
2012 ctx = repo.filectx(files[0], changeid=rev)
2012 elif rev:
2013 elif rev:
2013 ctx = repo.changectx(rev)
2014 ctx = repo.changectx(rev)
2014 else:
2015 else:
2015 ctx = repo.workingctx()
2016 ctx = repo.workingctx()
2016 p = [cp.node() for cp in ctx.parents()]
2017 p = [cp.node() for cp in ctx.parents()]
2017
2018
2018 displayer = cmdutil.show_changeset(ui, repo, opts)
2019 displayer = cmdutil.show_changeset(ui, repo, opts)
2019 for n in p:
2020 for n in p:
2020 if n != nullid:
2021 if n != nullid:
2021 displayer.show(changenode=n)
2022 displayer.show(changenode=n)
2022
2023
2023 def paths(ui, repo, search=None):
2024 def paths(ui, repo, search=None):
2024 """show definition of symbolic path names
2025 """show definition of symbolic path names
2025
2026
2026 Show definition of symbolic path name NAME. If no name is given, show
2027 Show definition of symbolic path name NAME. If no name is given, show
2027 definition of available names.
2028 definition of available names.
2028
2029
2029 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2030 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2030 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2031 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2031 """
2032 """
2032 if search:
2033 if search:
2033 for name, path in ui.configitems("paths"):
2034 for name, path in ui.configitems("paths"):
2034 if name == search:
2035 if name == search:
2035 ui.write("%s\n" % path)
2036 ui.write("%s\n" % path)
2036 return
2037 return
2037 ui.warn(_("not found!\n"))
2038 ui.warn(_("not found!\n"))
2038 return 1
2039 return 1
2039 else:
2040 else:
2040 for name, path in ui.configitems("paths"):
2041 for name, path in ui.configitems("paths"):
2041 ui.write("%s = %s\n" % (name, path))
2042 ui.write("%s = %s\n" % (name, path))
2042
2043
2043 def postincoming(ui, repo, modheads, optupdate, wasempty):
2044 def postincoming(ui, repo, modheads, optupdate):
2044 if modheads == 0:
2045 if modheads == 0:
2045 return
2046 return
2046 if optupdate:
2047 if optupdate:
2047 if wasempty:
2048 if modheads == 1:
2048 return hg.update(repo, repo.lookup('default'))
2049 return hg.update(repo, None)
2049 elif modheads == 1:
2050 return hg.update(repo, repo.changelog.tip()) # update
2051 else:
2050 else:
2052 ui.status(_("not updating, since new heads added\n"))
2051 ui.status(_("not updating, since new heads added\n"))
2053 if modheads > 1:
2052 if modheads > 1:
2054 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2053 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2055 else:
2054 else:
2056 ui.status(_("(run 'hg update' to get a working copy)\n"))
2055 ui.status(_("(run 'hg update' to get a working copy)\n"))
2057
2056
2058 def pull(ui, repo, source="default", **opts):
2057 def pull(ui, repo, source="default", **opts):
2059 """pull changes from the specified source
2058 """pull changes from the specified source
2060
2059
2061 Pull changes from a remote repository to a local one.
2060 Pull changes from a remote repository to a local one.
2062
2061
2063 This finds all changes from the repository at the specified path
2062 This finds all changes from the repository at the specified path
2064 or URL and adds them to the local repository. By default, this
2063 or URL and adds them to the local repository. By default, this
2065 does not update the copy of the project in the working directory.
2064 does not update the copy of the project in the working directory.
2066
2065
2067 Valid URLs are of the form:
2066 Valid URLs are of the form:
2068
2067
2069 local/filesystem/path (or file://local/filesystem/path)
2068 local/filesystem/path (or file://local/filesystem/path)
2070 http://[user@]host[:port]/[path]
2069 http://[user@]host[:port]/[path]
2071 https://[user@]host[:port]/[path]
2070 https://[user@]host[:port]/[path]
2072 ssh://[user@]host[:port]/[path]
2071 ssh://[user@]host[:port]/[path]
2073 static-http://host[:port]/[path]
2072 static-http://host[:port]/[path]
2074
2073
2075 Paths in the local filesystem can either point to Mercurial
2074 Paths in the local filesystem can either point to Mercurial
2076 repositories or to bundle files (as created by 'hg bundle' or
2075 repositories or to bundle files (as created by 'hg bundle' or
2077 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2076 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2078 allows access to a Mercurial repository where you simply use a web
2077 allows access to a Mercurial repository where you simply use a web
2079 server to publish the .hg directory as static content.
2078 server to publish the .hg directory as static content.
2080
2079
2081 An optional identifier after # indicates a particular branch, tag,
2080 An optional identifier after # indicates a particular branch, tag,
2082 or changeset to pull.
2081 or changeset to pull.
2083
2082
2084 Some notes about using SSH with Mercurial:
2083 Some notes about using SSH with Mercurial:
2085 - SSH requires an accessible shell account on the destination machine
2084 - SSH requires an accessible shell account on the destination machine
2086 and a copy of hg in the remote path or specified with as remotecmd.
2085 and a copy of hg in the remote path or specified with as remotecmd.
2087 - path is relative to the remote user's home directory by default.
2086 - path is relative to the remote user's home directory by default.
2088 Use an extra slash at the start of a path to specify an absolute path:
2087 Use an extra slash at the start of a path to specify an absolute path:
2089 ssh://example.com//tmp/repository
2088 ssh://example.com//tmp/repository
2090 - Mercurial doesn't use its own compression via SSH; the right thing
2089 - Mercurial doesn't use its own compression via SSH; the right thing
2091 to do is to configure it in your ~/.ssh/config, e.g.:
2090 to do is to configure it in your ~/.ssh/config, e.g.:
2092 Host *.mylocalnetwork.example.com
2091 Host *.mylocalnetwork.example.com
2093 Compression no
2092 Compression no
2094 Host *
2093 Host *
2095 Compression yes
2094 Compression yes
2096 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2095 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2097 with the --ssh command line option.
2096 with the --ssh command line option.
2098 """
2097 """
2099 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
2098 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
2100 cmdutil.setremoteconfig(ui, opts)
2099 cmdutil.setremoteconfig(ui, opts)
2101
2100
2102 other = hg.repository(ui, source)
2101 other = hg.repository(ui, source)
2103 ui.status(_('pulling from %s\n') % (source))
2102 ui.status(_('pulling from %s\n') % (source))
2104 if revs:
2103 if revs:
2105 if 'lookup' in other.capabilities:
2104 if 'lookup' in other.capabilities:
2106 revs = [other.lookup(rev) for rev in revs]
2105 revs = [other.lookup(rev) for rev in revs]
2107 else:
2106 else:
2108 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
2107 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
2109 raise util.Abort(error)
2108 raise util.Abort(error)
2110
2109
2111 wasempty = repo.changelog.count() == 0
2112 modheads = repo.pull(other, heads=revs, force=opts['force'])
2110 modheads = repo.pull(other, heads=revs, force=opts['force'])
2113 return postincoming(ui, repo, modheads, opts['update'], wasempty)
2111 return postincoming(ui, repo, modheads, opts['update'])
2114
2112
2115 def push(ui, repo, dest=None, **opts):
2113 def push(ui, repo, dest=None, **opts):
2116 """push changes to the specified destination
2114 """push changes to the specified destination
2117
2115
2118 Push changes from the local repository to the given destination.
2116 Push changes from the local repository to the given destination.
2119
2117
2120 This is the symmetrical operation for pull. It helps to move
2118 This is the symmetrical operation for pull. It helps to move
2121 changes from the current repository to a different one. If the
2119 changes from the current repository to a different one. If the
2122 destination is local this is identical to a pull in that directory
2120 destination is local this is identical to a pull in that directory
2123 from the current one.
2121 from the current one.
2124
2122
2125 By default, push will refuse to run if it detects the result would
2123 By default, push will refuse to run if it detects the result would
2126 increase the number of remote heads. This generally indicates the
2124 increase the number of remote heads. This generally indicates the
2127 the client has forgotten to sync and merge before pushing.
2125 the client has forgotten to sync and merge before pushing.
2128
2126
2129 Valid URLs are of the form:
2127 Valid URLs are of the form:
2130
2128
2131 local/filesystem/path (or file://local/filesystem/path)
2129 local/filesystem/path (or file://local/filesystem/path)
2132 ssh://[user@]host[:port]/[path]
2130 ssh://[user@]host[:port]/[path]
2133 http://[user@]host[:port]/[path]
2131 http://[user@]host[:port]/[path]
2134 https://[user@]host[:port]/[path]
2132 https://[user@]host[:port]/[path]
2135
2133
2136 An optional identifier after # indicates a particular branch, tag,
2134 An optional identifier after # indicates a particular branch, tag,
2137 or changeset to push.
2135 or changeset to push.
2138
2136
2139 Look at the help text for the pull command for important details
2137 Look at the help text for the pull command for important details
2140 about ssh:// URLs.
2138 about ssh:// URLs.
2141
2139
2142 Pushing to http:// and https:// URLs is only possible, if this
2140 Pushing to http:// and https:// URLs is only possible, if this
2143 feature is explicitly enabled on the remote Mercurial server.
2141 feature is explicitly enabled on the remote Mercurial server.
2144 """
2142 """
2145 dest, revs = cmdutil.parseurl(
2143 dest, revs = cmdutil.parseurl(
2146 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2144 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2147 cmdutil.setremoteconfig(ui, opts)
2145 cmdutil.setremoteconfig(ui, opts)
2148
2146
2149 other = hg.repository(ui, dest)
2147 other = hg.repository(ui, dest)
2150 ui.status('pushing to %s\n' % (dest))
2148 ui.status('pushing to %s\n' % (dest))
2151 if revs:
2149 if revs:
2152 revs = [repo.lookup(rev) for rev in revs]
2150 revs = [repo.lookup(rev) for rev in revs]
2153 r = repo.push(other, opts['force'], revs=revs)
2151 r = repo.push(other, opts['force'], revs=revs)
2154 return r == 0
2152 return r == 0
2155
2153
2156 def rawcommit(ui, repo, *pats, **opts):
2154 def rawcommit(ui, repo, *pats, **opts):
2157 """raw commit interface (DEPRECATED)
2155 """raw commit interface (DEPRECATED)
2158
2156
2159 (DEPRECATED)
2157 (DEPRECATED)
2160 Lowlevel commit, for use in helper scripts.
2158 Lowlevel commit, for use in helper scripts.
2161
2159
2162 This command is not intended to be used by normal users, as it is
2160 This command is not intended to be used by normal users, as it is
2163 primarily useful for importing from other SCMs.
2161 primarily useful for importing from other SCMs.
2164
2162
2165 This command is now deprecated and will be removed in a future
2163 This command is now deprecated and will be removed in a future
2166 release, please use debugsetparents and commit instead.
2164 release, please use debugsetparents and commit instead.
2167 """
2165 """
2168
2166
2169 ui.warn(_("(the rawcommit command is deprecated)\n"))
2167 ui.warn(_("(the rawcommit command is deprecated)\n"))
2170
2168
2171 message = cmdutil.logmessage(opts)
2169 message = cmdutil.logmessage(opts)
2172
2170
2173 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2171 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2174 if opts['files']:
2172 if opts['files']:
2175 files += open(opts['files']).read().splitlines()
2173 files += open(opts['files']).read().splitlines()
2176
2174
2177 parents = [repo.lookup(p) for p in opts['parent']]
2175 parents = [repo.lookup(p) for p in opts['parent']]
2178
2176
2179 try:
2177 try:
2180 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2178 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2181 except ValueError, inst:
2179 except ValueError, inst:
2182 raise util.Abort(str(inst))
2180 raise util.Abort(str(inst))
2183
2181
2184 def recover(ui, repo):
2182 def recover(ui, repo):
2185 """roll back an interrupted transaction
2183 """roll back an interrupted transaction
2186
2184
2187 Recover from an interrupted commit or pull.
2185 Recover from an interrupted commit or pull.
2188
2186
2189 This command tries to fix the repository status after an interrupted
2187 This command tries to fix the repository status after an interrupted
2190 operation. It should only be necessary when Mercurial suggests it.
2188 operation. It should only be necessary when Mercurial suggests it.
2191 """
2189 """
2192 if repo.recover():
2190 if repo.recover():
2193 return hg.verify(repo)
2191 return hg.verify(repo)
2194 return 1
2192 return 1
2195
2193
2196 def remove(ui, repo, *pats, **opts):
2194 def remove(ui, repo, *pats, **opts):
2197 """remove the specified files on the next commit
2195 """remove the specified files on the next commit
2198
2196
2199 Schedule the indicated files for removal from the repository.
2197 Schedule the indicated files for removal from the repository.
2200
2198
2201 This only removes files from the current branch, not from the
2199 This only removes files from the current branch, not from the
2202 entire project history. If the files still exist in the working
2200 entire project history. If the files still exist in the working
2203 directory, they will be deleted from it. If invoked with --after,
2201 directory, they will be deleted from it. If invoked with --after,
2204 files are marked as removed, but not actually unlinked unless --force
2202 files are marked as removed, but not actually unlinked unless --force
2205 is also given. Without exact file names, --after will only mark
2203 is also given. Without exact file names, --after will only mark
2206 files as removed if they are no longer in the working directory.
2204 files as removed if they are no longer in the working directory.
2207
2205
2208 This command schedules the files to be removed at the next commit.
2206 This command schedules the files to be removed at the next commit.
2209 To undo a remove before that, see hg revert.
2207 To undo a remove before that, see hg revert.
2210
2208
2211 Modified files and added files are not removed by default. To
2209 Modified files and added files are not removed by default. To
2212 remove them, use the -f/--force option.
2210 remove them, use the -f/--force option.
2213 """
2211 """
2214 names = []
2215 if not opts['after'] and not pats:
2212 if not opts['after'] and not pats:
2216 raise util.Abort(_('no files specified'))
2213 raise util.Abort(_('no files specified'))
2217 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2214 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2218 exact = dict.fromkeys(files)
2215 exact = dict.fromkeys(files)
2219 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2216 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2220 modified, added, removed, deleted, unknown = mardu
2217 modified, added, removed, deleted, unknown = mardu
2221 remove, forget = [], []
2218 remove, forget = [], []
2222 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2219 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2223 reason = None
2220 reason = None
2224 if abs in modified and not opts['force']:
2221 if abs in modified and not opts['force']:
2225 reason = _('is modified (use -f to force removal)')
2222 reason = _('is modified (use -f to force removal)')
2226 elif abs in added:
2223 elif abs in added:
2227 if opts['force']:
2224 if opts['force']:
2228 forget.append(abs)
2225 forget.append(abs)
2229 continue
2226 continue
2230 reason = _('has been marked for add (use -f to force removal)')
2227 reason = _('has been marked for add (use -f to force removal)')
2231 elif abs not in repo.dirstate:
2228 elif abs not in repo.dirstate:
2232 reason = _('is not managed')
2229 reason = _('is not managed')
2233 elif opts['after'] and not exact and abs not in deleted:
2230 elif opts['after'] and not exact and abs not in deleted:
2234 continue
2231 continue
2235 elif abs in removed:
2232 elif abs in removed:
2236 continue
2233 continue
2237 if reason:
2234 if reason:
2238 if exact:
2235 if exact:
2239 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2236 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2240 else:
2237 else:
2241 if ui.verbose or not exact:
2238 if ui.verbose or not exact:
2242 ui.status(_('removing %s\n') % rel)
2239 ui.status(_('removing %s\n') % rel)
2243 remove.append(abs)
2240 remove.append(abs)
2244 repo.forget(forget)
2241 repo.forget(forget)
2245 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2242 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2246
2243
2247 def rename(ui, repo, *pats, **opts):
2244 def rename(ui, repo, *pats, **opts):
2248 """rename files; equivalent of copy + remove
2245 """rename files; equivalent of copy + remove
2249
2246
2250 Mark dest as copies of sources; mark sources for deletion. If
2247 Mark dest as copies of sources; mark sources for deletion. If
2251 dest is a directory, copies are put in that directory. If dest is
2248 dest is a directory, copies are put in that directory. If dest is
2252 a file, there can only be one source.
2249 a file, there can only be one source.
2253
2250
2254 By default, this command copies the contents of files as they
2251 By default, this command copies the contents of files as they
2255 stand in the working directory. If invoked with --after, the
2252 stand in the working directory. If invoked with --after, the
2256 operation is recorded, but no copying is performed.
2253 operation is recorded, but no copying is performed.
2257
2254
2258 This command takes effect in the next commit. To undo a rename
2255 This command takes effect in the next commit. To undo a rename
2259 before that, see hg revert.
2256 before that, see hg revert.
2260 """
2257 """
2261 wlock = repo.wlock(False)
2258 wlock = repo.wlock(False)
2262 try:
2259 try:
2263 errs, copied = docopy(ui, repo, pats, opts)
2260 errs, copied = docopy(ui, repo, pats, opts)
2264 names = []
2261 names = []
2265 for abs, rel, exact in copied:
2262 for abs, rel, exact in copied:
2266 if ui.verbose or not exact:
2263 if ui.verbose or not exact:
2267 ui.status(_('removing %s\n') % rel)
2264 ui.status(_('removing %s\n') % rel)
2268 names.append(abs)
2265 names.append(abs)
2269 if not opts.get('dry_run'):
2266 if not opts.get('dry_run'):
2270 repo.remove(names, True)
2267 repo.remove(names, True)
2271 return errs
2268 return errs
2272 finally:
2269 finally:
2273 del wlock
2270 del wlock
2274
2271
2275 def revert(ui, repo, *pats, **opts):
2272 def revert(ui, repo, *pats, **opts):
2276 """revert files or dirs to their states as of some revision
2273 """revert files or dirs to their states as of some revision
2277
2274
2278 With no revision specified, revert the named files or directories
2275 With no revision specified, revert the named files or directories
2279 to the contents they had in the parent of the working directory.
2276 to the contents they had in the parent of the working directory.
2280 This restores the contents of the affected files to an unmodified
2277 This restores the contents of the affected files to an unmodified
2281 state and unschedules adds, removes, copies, and renames. If the
2278 state and unschedules adds, removes, copies, and renames. If the
2282 working directory has two parents, you must explicitly specify the
2279 working directory has two parents, you must explicitly specify the
2283 revision to revert to.
2280 revision to revert to.
2284
2281
2285 Modified files are saved with a .orig suffix before reverting.
2282 Modified files are saved with a .orig suffix before reverting.
2286 To disable these backups, use --no-backup.
2283 To disable these backups, use --no-backup.
2287
2284
2288 Using the -r option, revert the given files or directories to their
2285 Using the -r option, revert the given files or directories to their
2289 contents as of a specific revision. This can be helpful to "roll
2286 contents as of a specific revision. This can be helpful to "roll
2290 back" some or all of a change that should not have been committed.
2287 back" some or all of a change that should not have been committed.
2291
2288
2292 Revert modifies the working directory. It does not commit any
2289 Revert modifies the working directory. It does not commit any
2293 changes, or change the parent of the working directory. If you
2290 changes, or change the parent of the working directory. If you
2294 revert to a revision other than the parent of the working
2291 revert to a revision other than the parent of the working
2295 directory, the reverted files will thus appear modified
2292 directory, the reverted files will thus appear modified
2296 afterwards.
2293 afterwards.
2297
2294
2298 If a file has been deleted, it is restored. If the executable
2295 If a file has been deleted, it is restored. If the executable
2299 mode of a file was changed, it is reset.
2296 mode of a file was changed, it is reset.
2300
2297
2301 If names are given, all files matching the names are reverted.
2298 If names are given, all files matching the names are reverted.
2302
2299
2303 If no arguments are given, no files are reverted.
2300 If no arguments are given, no files are reverted.
2304 """
2301 """
2305
2302
2306 if opts["date"]:
2303 if opts["date"]:
2307 if opts["rev"]:
2304 if opts["rev"]:
2308 raise util.Abort(_("you can't specify a revision and a date"))
2305 raise util.Abort(_("you can't specify a revision and a date"))
2309 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2306 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2310
2307
2311 if not pats and not opts['all']:
2308 if not pats and not opts['all']:
2312 raise util.Abort(_('no files or directories specified; '
2309 raise util.Abort(_('no files or directories specified; '
2313 'use --all to revert the whole repo'))
2310 'use --all to revert the whole repo'))
2314
2311
2315 parent, p2 = repo.dirstate.parents()
2312 parent, p2 = repo.dirstate.parents()
2316 if not opts['rev'] and p2 != nullid:
2313 if not opts['rev'] and p2 != nullid:
2317 raise util.Abort(_('uncommitted merge - please provide a '
2314 raise util.Abort(_('uncommitted merge - please provide a '
2318 'specific revision'))
2315 'specific revision'))
2319 ctx = repo.changectx(opts['rev'])
2316 ctx = repo.changectx(opts['rev'])
2320 node = ctx.node()
2317 node = ctx.node()
2321 mf = ctx.manifest()
2318 mf = ctx.manifest()
2322 if node == parent:
2319 if node == parent:
2323 pmf = mf
2320 pmf = mf
2324 else:
2321 else:
2325 pmf = None
2322 pmf = None
2326
2323
2327 # need all matching names in dirstate and manifest of target rev,
2324 # need all matching names in dirstate and manifest of target rev,
2328 # so have to walk both. do not print errors if files exist in one
2325 # so have to walk both. do not print errors if files exist in one
2329 # but not other.
2326 # but not other.
2330
2327
2331 names = {}
2328 names = {}
2332 target_only = {}
2329 target_only = {}
2333
2330
2334 wlock = repo.wlock()
2331 wlock = repo.wlock()
2335 try:
2332 try:
2336 # walk dirstate.
2333 # walk dirstate.
2337 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2334 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2338 badmatch=mf.has_key):
2335 badmatch=mf.has_key):
2339 names[abs] = (rel, exact)
2336 names[abs] = (rel, exact)
2340 if src == 'b':
2337 if src == 'b':
2341 target_only[abs] = True
2338 target_only[abs] = True
2342
2339
2343 # walk target manifest.
2340 # walk target manifest.
2344
2341
2345 def badmatch(path):
2342 def badmatch(path):
2346 if path in names:
2343 if path in names:
2347 return True
2344 return True
2348 path_ = path + '/'
2345 path_ = path + '/'
2349 for f in names:
2346 for f in names:
2350 if f.startswith(path_):
2347 if f.startswith(path_):
2351 return True
2348 return True
2352 return False
2349 return False
2353
2350
2354 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2351 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2355 badmatch=badmatch):
2352 badmatch=badmatch):
2356 if abs in names or src == 'b':
2353 if abs in names or src == 'b':
2357 continue
2354 continue
2358 names[abs] = (rel, exact)
2355 names[abs] = (rel, exact)
2359 target_only[abs] = True
2356 target_only[abs] = True
2360
2357
2361 changes = repo.status(match=names.has_key)[:5]
2358 changes = repo.status(match=names.has_key)[:5]
2362 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2359 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2363
2360
2364 revert = ([], _('reverting %s\n'))
2361 revert = ([], _('reverting %s\n'))
2365 add = ([], _('adding %s\n'))
2362 add = ([], _('adding %s\n'))
2366 remove = ([], _('removing %s\n'))
2363 remove = ([], _('removing %s\n'))
2367 forget = ([], _('forgetting %s\n'))
2364 forget = ([], _('forgetting %s\n'))
2368 undelete = ([], _('undeleting %s\n'))
2365 undelete = ([], _('undeleting %s\n'))
2369 update = {}
2366 update = {}
2370
2367
2371 disptable = (
2368 disptable = (
2372 # dispatch table:
2369 # dispatch table:
2373 # file state
2370 # file state
2374 # action if in target manifest
2371 # action if in target manifest
2375 # action if not in target manifest
2372 # action if not in target manifest
2376 # make backup if in target manifest
2373 # make backup if in target manifest
2377 # make backup if not in target manifest
2374 # make backup if not in target manifest
2378 (modified, revert, remove, True, True),
2375 (modified, revert, remove, True, True),
2379 (added, revert, forget, True, False),
2376 (added, revert, forget, True, False),
2380 (removed, undelete, None, False, False),
2377 (removed, undelete, None, False, False),
2381 (deleted, revert, remove, False, False),
2378 (deleted, revert, remove, False, False),
2382 (unknown, add, None, True, False),
2379 (unknown, add, None, True, False),
2383 (target_only, add, None, False, False),
2380 (target_only, add, None, False, False),
2384 )
2381 )
2385
2382
2386 entries = names.items()
2383 entries = names.items()
2387 entries.sort()
2384 entries.sort()
2388
2385
2389 for abs, (rel, exact) in entries:
2386 for abs, (rel, exact) in entries:
2390 mfentry = mf.get(abs)
2387 mfentry = mf.get(abs)
2391 target = repo.wjoin(abs)
2388 target = repo.wjoin(abs)
2392 def handle(xlist, dobackup):
2389 def handle(xlist, dobackup):
2393 xlist[0].append(abs)
2390 xlist[0].append(abs)
2394 update[abs] = 1
2391 update[abs] = 1
2395 if dobackup and not opts['no_backup'] and util.lexists(target):
2392 if dobackup and not opts['no_backup'] and util.lexists(target):
2396 bakname = "%s.orig" % rel
2393 bakname = "%s.orig" % rel
2397 ui.note(_('saving current version of %s as %s\n') %
2394 ui.note(_('saving current version of %s as %s\n') %
2398 (rel, bakname))
2395 (rel, bakname))
2399 if not opts.get('dry_run'):
2396 if not opts.get('dry_run'):
2400 util.copyfile(target, bakname)
2397 util.copyfile(target, bakname)
2401 if ui.verbose or not exact:
2398 if ui.verbose or not exact:
2402 ui.status(xlist[1] % rel)
2399 ui.status(xlist[1] % rel)
2403 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2400 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2404 if abs not in table: continue
2401 if abs not in table: continue
2405 # file has changed in dirstate
2402 # file has changed in dirstate
2406 if mfentry:
2403 if mfentry:
2407 handle(hitlist, backuphit)
2404 handle(hitlist, backuphit)
2408 elif misslist is not None:
2405 elif misslist is not None:
2409 handle(misslist, backupmiss)
2406 handle(misslist, backupmiss)
2410 else:
2407 else:
2411 if exact: ui.warn(_('file not managed: %s\n') % rel)
2408 if exact: ui.warn(_('file not managed: %s\n') % rel)
2412 break
2409 break
2413 else:
2410 else:
2414 # file has not changed in dirstate
2411 # file has not changed in dirstate
2415 if node == parent:
2412 if node == parent:
2416 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2413 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2417 continue
2414 continue
2418 if pmf is None:
2415 if pmf is None:
2419 # only need parent manifest in this unlikely case,
2416 # only need parent manifest in this unlikely case,
2420 # so do not read by default
2417 # so do not read by default
2421 pmf = repo.changectx(parent).manifest()
2418 pmf = repo.changectx(parent).manifest()
2422 if abs in pmf:
2419 if abs in pmf:
2423 if mfentry:
2420 if mfentry:
2424 # if version of file is same in parent and target
2421 # if version of file is same in parent and target
2425 # manifests, do nothing
2422 # manifests, do nothing
2426 if pmf[abs] != mfentry:
2423 if pmf[abs] != mfentry:
2427 handle(revert, False)
2424 handle(revert, False)
2428 else:
2425 else:
2429 handle(remove, False)
2426 handle(remove, False)
2430
2427
2431 if not opts.get('dry_run'):
2428 if not opts.get('dry_run'):
2432 for f in forget[0]:
2429 for f in forget[0]:
2433 repo.dirstate.forget(f)
2430 repo.dirstate.forget(f)
2434 r = hg.revert(repo, node, update.has_key)
2431 r = hg.revert(repo, node, update.has_key)
2435 for f in add[0]:
2432 for f in add[0]:
2436 repo.dirstate.add(f)
2433 repo.dirstate.add(f)
2437 for f in undelete[0]:
2434 for f in undelete[0]:
2438 repo.dirstate.normal(f)
2435 repo.dirstate.normal(f)
2439 for f in remove[0]:
2436 for f in remove[0]:
2440 repo.dirstate.remove(f)
2437 repo.dirstate.remove(f)
2441 return r
2438 return r
2442 finally:
2439 finally:
2443 del wlock
2440 del wlock
2444
2441
2445 def rollback(ui, repo):
2442 def rollback(ui, repo):
2446 """roll back the last transaction in this repository
2443 """roll back the last transaction in this repository
2447
2444
2448 Roll back the last transaction in this repository, restoring the
2445 Roll back the last transaction in this repository, restoring the
2449 project to its state prior to the transaction.
2446 project to its state prior to the transaction.
2450
2447
2451 Transactions are used to encapsulate the effects of all commands
2448 Transactions are used to encapsulate the effects of all commands
2452 that create new changesets or propagate existing changesets into a
2449 that create new changesets or propagate existing changesets into a
2453 repository. For example, the following commands are transactional,
2450 repository. For example, the following commands are transactional,
2454 and their effects can be rolled back:
2451 and their effects can be rolled back:
2455
2452
2456 commit
2453 commit
2457 import
2454 import
2458 pull
2455 pull
2459 push (with this repository as destination)
2456 push (with this repository as destination)
2460 unbundle
2457 unbundle
2461
2458
2462 This command should be used with care. There is only one level of
2459 This command should be used with care. There is only one level of
2463 rollback, and there is no way to undo a rollback. It will also
2460 rollback, and there is no way to undo a rollback. It will also
2464 restore the dirstate at the time of the last transaction, which
2461 restore the dirstate at the time of the last transaction, which
2465 may lose subsequent dirstate changes.
2462 may lose subsequent dirstate changes.
2466
2463
2467 This command is not intended for use on public repositories. Once
2464 This command is not intended for use on public repositories. Once
2468 changes are visible for pull by other users, rolling a transaction
2465 changes are visible for pull by other users, rolling a transaction
2469 back locally is ineffective (someone else may already have pulled
2466 back locally is ineffective (someone else may already have pulled
2470 the changes). Furthermore, a race is possible with readers of the
2467 the changes). Furthermore, a race is possible with readers of the
2471 repository; for example an in-progress pull from the repository
2468 repository; for example an in-progress pull from the repository
2472 may fail if a rollback is performed.
2469 may fail if a rollback is performed.
2473 """
2470 """
2474 repo.rollback()
2471 repo.rollback()
2475
2472
2476 def root(ui, repo):
2473 def root(ui, repo):
2477 """print the root (top) of the current working dir
2474 """print the root (top) of the current working dir
2478
2475
2479 Print the root directory of the current repository.
2476 Print the root directory of the current repository.
2480 """
2477 """
2481 ui.write(repo.root + "\n")
2478 ui.write(repo.root + "\n")
2482
2479
2483 def serve(ui, repo, **opts):
2480 def serve(ui, repo, **opts):
2484 """export the repository via HTTP
2481 """export the repository via HTTP
2485
2482
2486 Start a local HTTP repository browser and pull server.
2483 Start a local HTTP repository browser and pull server.
2487
2484
2488 By default, the server logs accesses to stdout and errors to
2485 By default, the server logs accesses to stdout and errors to
2489 stderr. Use the "-A" and "-E" options to log to files.
2486 stderr. Use the "-A" and "-E" options to log to files.
2490 """
2487 """
2491
2488
2492 if opts["stdio"]:
2489 if opts["stdio"]:
2493 if repo is None:
2490 if repo is None:
2494 raise hg.RepoError(_("There is no Mercurial repository here"
2491 raise hg.RepoError(_("There is no Mercurial repository here"
2495 " (.hg not found)"))
2492 " (.hg not found)"))
2496 s = sshserver.sshserver(ui, repo)
2493 s = sshserver.sshserver(ui, repo)
2497 s.serve_forever()
2494 s.serve_forever()
2498
2495
2499 parentui = ui.parentui or ui
2496 parentui = ui.parentui or ui
2500 optlist = ("name templates style address port ipv6"
2497 optlist = ("name templates style address port ipv6"
2501 " accesslog errorlog webdir_conf certificate")
2498 " accesslog errorlog webdir_conf certificate")
2502 for o in optlist.split():
2499 for o in optlist.split():
2503 if opts[o]:
2500 if opts[o]:
2504 parentui.setconfig("web", o, str(opts[o]))
2501 parentui.setconfig("web", o, str(opts[o]))
2505 if repo.ui != parentui:
2502 if repo.ui != parentui:
2506 repo.ui.setconfig("web", o, str(opts[o]))
2503 repo.ui.setconfig("web", o, str(opts[o]))
2507
2504
2508 if repo is None and not ui.config("web", "webdir_conf"):
2505 if repo is None and not ui.config("web", "webdir_conf"):
2509 raise hg.RepoError(_("There is no Mercurial repository here"
2506 raise hg.RepoError(_("There is no Mercurial repository here"
2510 " (.hg not found)"))
2507 " (.hg not found)"))
2511
2508
2512 class service:
2509 class service:
2513 def init(self):
2510 def init(self):
2514 util.set_signal_handler()
2511 util.set_signal_handler()
2515 try:
2512 try:
2516 self.httpd = hgweb.server.create_server(parentui, repo)
2513 self.httpd = hgweb.server.create_server(parentui, repo)
2517 except socket.error, inst:
2514 except socket.error, inst:
2518 raise util.Abort(_('cannot start server: ') + inst.args[1])
2515 raise util.Abort(_('cannot start server: ') + inst.args[1])
2519
2516
2520 if not ui.verbose: return
2517 if not ui.verbose: return
2521
2518
2522 if self.httpd.port != 80:
2519 if self.httpd.port != 80:
2523 ui.status(_('listening at http://%s:%d/\n') %
2520 ui.status(_('listening at http://%s:%d/\n') %
2524 (self.httpd.addr, self.httpd.port))
2521 (self.httpd.addr, self.httpd.port))
2525 else:
2522 else:
2526 ui.status(_('listening at http://%s/\n') % self.httpd.addr)
2523 ui.status(_('listening at http://%s/\n') % self.httpd.addr)
2527
2524
2528 def run(self):
2525 def run(self):
2529 self.httpd.serve_forever()
2526 self.httpd.serve_forever()
2530
2527
2531 service = service()
2528 service = service()
2532
2529
2533 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2530 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2534
2531
2535 def status(ui, repo, *pats, **opts):
2532 def status(ui, repo, *pats, **opts):
2536 """show changed files in the working directory
2533 """show changed files in the working directory
2537
2534
2538 Show status of files in the repository. If names are given, only
2535 Show status of files in the repository. If names are given, only
2539 files that match are shown. Files that are clean or ignored, are
2536 files that match are shown. Files that are clean or ignored, are
2540 not listed unless -c (clean), -i (ignored) or -A is given.
2537 not listed unless -c (clean), -i (ignored) or -A is given.
2541
2538
2542 NOTE: status may appear to disagree with diff if permissions have
2539 NOTE: status may appear to disagree with diff if permissions have
2543 changed or a merge has occurred. The standard diff format does not
2540 changed or a merge has occurred. The standard diff format does not
2544 report permission changes and diff only reports changes relative
2541 report permission changes and diff only reports changes relative
2545 to one merge parent.
2542 to one merge parent.
2546
2543
2547 If one revision is given, it is used as the base revision.
2544 If one revision is given, it is used as the base revision.
2548 If two revisions are given, the difference between them is shown.
2545 If two revisions are given, the difference between them is shown.
2549
2546
2550 The codes used to show the status of files are:
2547 The codes used to show the status of files are:
2551 M = modified
2548 M = modified
2552 A = added
2549 A = added
2553 R = removed
2550 R = removed
2554 C = clean
2551 C = clean
2555 ! = deleted, but still tracked
2552 ! = deleted, but still tracked
2556 ? = not tracked
2553 ? = not tracked
2557 I = ignored (not shown by default)
2554 I = ignored (not shown by default)
2558 = the previous added file was copied from here
2555 = the previous added file was copied from here
2559 """
2556 """
2560
2557
2561 all = opts['all']
2558 all = opts['all']
2562 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2559 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2563
2560
2564 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2561 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2565 cwd = (pats and repo.getcwd()) or ''
2562 cwd = (pats and repo.getcwd()) or ''
2566 modified, added, removed, deleted, unknown, ignored, clean = [
2563 modified, added, removed, deleted, unknown, ignored, clean = [
2567 n for n in repo.status(node1=node1, node2=node2, files=files,
2564 n for n in repo.status(node1=node1, node2=node2, files=files,
2568 match=matchfn,
2565 match=matchfn,
2569 list_ignored=all or opts['ignored'],
2566 list_ignored=all or opts['ignored'],
2570 list_clean=all or opts['clean'])]
2567 list_clean=all or opts['clean'])]
2571
2568
2572 changetypes = (('modified', 'M', modified),
2569 changetypes = (('modified', 'M', modified),
2573 ('added', 'A', added),
2570 ('added', 'A', added),
2574 ('removed', 'R', removed),
2571 ('removed', 'R', removed),
2575 ('deleted', '!', deleted),
2572 ('deleted', '!', deleted),
2576 ('unknown', '?', unknown),
2573 ('unknown', '?', unknown),
2577 ('ignored', 'I', ignored))
2574 ('ignored', 'I', ignored))
2578
2575
2579 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2576 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2580
2577
2581 end = opts['print0'] and '\0' or '\n'
2578 end = opts['print0'] and '\0' or '\n'
2582
2579
2583 for opt, char, changes in ([ct for ct in explicit_changetypes
2580 for opt, char, changes in ([ct for ct in explicit_changetypes
2584 if all or opts[ct[0]]]
2581 if all or opts[ct[0]]]
2585 or changetypes):
2582 or changetypes):
2586 if opts['no_status']:
2583 if opts['no_status']:
2587 format = "%%s%s" % end
2584 format = "%%s%s" % end
2588 else:
2585 else:
2589 format = "%s %%s%s" % (char, end)
2586 format = "%s %%s%s" % (char, end)
2590
2587
2591 for f in changes:
2588 for f in changes:
2592 ui.write(format % repo.pathto(f, cwd))
2589 ui.write(format % repo.pathto(f, cwd))
2593 if ((all or opts.get('copies')) and not opts.get('no_status')):
2590 if ((all or opts.get('copies')) and not opts.get('no_status')):
2594 copied = repo.dirstate.copied(f)
2591 copied = repo.dirstate.copied(f)
2595 if copied:
2592 if copied:
2596 ui.write(' %s%s' % (repo.pathto(copied, cwd), end))
2593 ui.write(' %s%s' % (repo.pathto(copied, cwd), end))
2597
2594
2598 def tag(ui, repo, name, rev_=None, **opts):
2595 def tag(ui, repo, name, rev_=None, **opts):
2599 """add a tag for the current or given revision
2596 """add a tag for the current or given revision
2600
2597
2601 Name a particular revision using <name>.
2598 Name a particular revision using <name>.
2602
2599
2603 Tags are used to name particular revisions of the repository and are
2600 Tags are used to name particular revisions of the repository and are
2604 very useful to compare different revision, to go back to significant
2601 very useful to compare different revision, to go back to significant
2605 earlier versions or to mark branch points as releases, etc.
2602 earlier versions or to mark branch points as releases, etc.
2606
2603
2607 If no revision is given, the parent of the working directory is used,
2604 If no revision is given, the parent of the working directory is used,
2608 or tip if no revision is checked out.
2605 or tip if no revision is checked out.
2609
2606
2610 To facilitate version control, distribution, and merging of tags,
2607 To facilitate version control, distribution, and merging of tags,
2611 they are stored as a file named ".hgtags" which is managed
2608 they are stored as a file named ".hgtags" which is managed
2612 similarly to other project files and can be hand-edited if
2609 similarly to other project files and can be hand-edited if
2613 necessary. The file '.hg/localtags' is used for local tags (not
2610 necessary. The file '.hg/localtags' is used for local tags (not
2614 shared among repositories).
2611 shared among repositories).
2615 """
2612 """
2616 if name in ['tip', '.', 'null']:
2613 if name in ['tip', '.', 'null']:
2617 raise util.Abort(_("the name '%s' is reserved") % name)
2614 raise util.Abort(_("the name '%s' is reserved") % name)
2618 if rev_ is not None:
2615 if rev_ is not None:
2619 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2616 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2620 "please use 'hg tag [-r REV] NAME' instead\n"))
2617 "please use 'hg tag [-r REV] NAME' instead\n"))
2621 if opts['rev']:
2618 if opts['rev']:
2622 raise util.Abort(_("use only one form to specify the revision"))
2619 raise util.Abort(_("use only one form to specify the revision"))
2623 if opts['rev'] and opts['remove']:
2620 if opts['rev'] and opts['remove']:
2624 raise util.Abort(_("--rev and --remove are incompatible"))
2621 raise util.Abort(_("--rev and --remove are incompatible"))
2625 if opts['rev']:
2622 if opts['rev']:
2626 rev_ = opts['rev']
2623 rev_ = opts['rev']
2627 message = opts['message']
2624 message = opts['message']
2628 if opts['remove']:
2625 if opts['remove']:
2629 if not name in repo.tags():
2626 if not name in repo.tags():
2630 raise util.Abort(_('tag %s does not exist') % name)
2627 raise util.Abort(_('tag %s does not exist') % name)
2631 rev_ = nullid
2628 rev_ = nullid
2632 if not message:
2629 if not message:
2633 message = _('Removed tag %s') % name
2630 message = _('Removed tag %s') % name
2634 elif name in repo.tags() and not opts['force']:
2631 elif name in repo.tags() and not opts['force']:
2635 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2632 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2636 % name)
2633 % name)
2637 if not rev_ and repo.dirstate.parents()[1] != nullid:
2634 if not rev_ and repo.dirstate.parents()[1] != nullid:
2638 raise util.Abort(_('uncommitted merge - please provide a '
2635 raise util.Abort(_('uncommitted merge - please provide a '
2639 'specific revision'))
2636 'specific revision'))
2640 r = repo.changectx(rev_).node()
2637 r = repo.changectx(rev_).node()
2641
2638
2642 if not message:
2639 if not message:
2643 message = _('Added tag %s for changeset %s') % (name, short(r))
2640 message = _('Added tag %s for changeset %s') % (name, short(r))
2644
2641
2645 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2642 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2646
2643
2647 def tags(ui, repo):
2644 def tags(ui, repo):
2648 """list repository tags
2645 """list repository tags
2649
2646
2650 List the repository tags.
2647 List the repository tags.
2651
2648
2652 This lists both regular and local tags.
2649 This lists both regular and local tags.
2653 """
2650 """
2654
2651
2655 l = repo.tagslist()
2652 l = repo.tagslist()
2656 l.reverse()
2653 l.reverse()
2657 hexfunc = ui.debugflag and hex or short
2654 hexfunc = ui.debugflag and hex or short
2658 for t, n in l:
2655 for t, n in l:
2659 try:
2656 try:
2660 hn = hexfunc(n)
2657 hn = hexfunc(n)
2661 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2658 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2662 except revlog.LookupError:
2659 except revlog.LookupError:
2663 r = " ?:%s" % hn
2660 r = " ?:%s" % hn
2664 if ui.quiet:
2661 if ui.quiet:
2665 ui.write("%s\n" % t)
2662 ui.write("%s\n" % t)
2666 else:
2663 else:
2667 spaces = " " * (30 - util.locallen(t))
2664 spaces = " " * (30 - util.locallen(t))
2668 ui.write("%s%s %s\n" % (t, spaces, r))
2665 ui.write("%s%s %s\n" % (t, spaces, r))
2669
2666
2670 def tip(ui, repo, **opts):
2667 def tip(ui, repo, **opts):
2671 """show the tip revision
2668 """show the tip revision
2672
2669
2673 Show the tip revision.
2670 Show the tip revision.
2674 """
2671 """
2675 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2672 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2676
2673
2677 def unbundle(ui, repo, fname1, *fnames, **opts):
2674 def unbundle(ui, repo, fname1, *fnames, **opts):
2678 """apply one or more changegroup files
2675 """apply one or more changegroup files
2679
2676
2680 Apply one or more compressed changegroup files generated by the
2677 Apply one or more compressed changegroup files generated by the
2681 bundle command.
2678 bundle command.
2682 """
2679 """
2683 fnames = (fname1,) + fnames
2680 fnames = (fname1,) + fnames
2684 result = None
2685 wasempty = repo.changelog.count() == 0
2686 for fname in fnames:
2681 for fname in fnames:
2687 if os.path.exists(fname):
2682 if os.path.exists(fname):
2688 f = open(fname, "rb")
2683 f = open(fname, "rb")
2689 else:
2684 else:
2690 f = urllib.urlopen(fname)
2685 f = urllib.urlopen(fname)
2691 gen = changegroup.readbundle(f, fname)
2686 gen = changegroup.readbundle(f, fname)
2692 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2687 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2693
2688
2694 return postincoming(ui, repo, modheads, opts['update'], wasempty)
2689 return postincoming(ui, repo, modheads, opts['update'])
2695
2690
2696 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2691 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2697 """update working directory
2692 """update working directory
2698
2693
2699 Update the working directory to the specified revision, or the
2694 Update the working directory to the specified revision, or the
2700 tip of the current branch if none is specified.
2695 tip of the current branch if none is specified.
2701
2696
2702 If there are no outstanding changes in the working directory and
2697 If there are no outstanding changes in the working directory and
2703 there is a linear relationship between the current version and the
2698 there is a linear relationship between the current version and the
2704 requested version, the result is the requested version.
2699 requested version, the result is the requested version.
2705
2700
2706 To merge the working directory with another revision, use the
2701 To merge the working directory with another revision, use the
2707 merge command.
2702 merge command.
2708
2703
2709 By default, update will refuse to run if doing so would require
2704 By default, update will refuse to run if doing so would require
2710 discarding local changes.
2705 discarding local changes.
2711 """
2706 """
2712 if rev and node:
2707 if rev and node:
2713 raise util.Abort(_("please specify just one revision"))
2708 raise util.Abort(_("please specify just one revision"))
2714
2709
2715 if not rev:
2710 if not rev:
2716 rev = node
2711 rev = node
2717
2712
2718 if date:
2713 if date:
2719 if rev:
2714 if rev:
2720 raise util.Abort(_("you can't specify a revision and a date"))
2715 raise util.Abort(_("you can't specify a revision and a date"))
2721 rev = cmdutil.finddate(ui, repo, date)
2716 rev = cmdutil.finddate(ui, repo, date)
2722
2717
2723 if clean:
2718 if clean:
2724 return hg.clean(repo, rev)
2719 return hg.clean(repo, rev)
2725 else:
2720 else:
2726 return hg.update(repo, rev)
2721 return hg.update(repo, rev)
2727
2722
2728 def verify(ui, repo):
2723 def verify(ui, repo):
2729 """verify the integrity of the repository
2724 """verify the integrity of the repository
2730
2725
2731 Verify the integrity of the current repository.
2726 Verify the integrity of the current repository.
2732
2727
2733 This will perform an extensive check of the repository's
2728 This will perform an extensive check of the repository's
2734 integrity, validating the hashes and checksums of each entry in
2729 integrity, validating the hashes and checksums of each entry in
2735 the changelog, manifest, and tracked files, as well as the
2730 the changelog, manifest, and tracked files, as well as the
2736 integrity of their crosslinks and indices.
2731 integrity of their crosslinks and indices.
2737 """
2732 """
2738 return hg.verify(repo)
2733 return hg.verify(repo)
2739
2734
2740 def version_(ui):
2735 def version_(ui):
2741 """output version and copyright information"""
2736 """output version and copyright information"""
2742 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2737 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2743 % version.get_version())
2738 % version.get_version())
2744 ui.status(_(
2739 ui.status(_(
2745 "\nCopyright (C) 2005-2007 Matt Mackall <mpm@selenic.com> and others\n"
2740 "\nCopyright (C) 2005-2007 Matt Mackall <mpm@selenic.com> and others\n"
2746 "This is free software; see the source for copying conditions. "
2741 "This is free software; see the source for copying conditions. "
2747 "There is NO\nwarranty; "
2742 "There is NO\nwarranty; "
2748 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2743 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2749 ))
2744 ))
2750
2745
2751 # Command options and aliases are listed here, alphabetically
2746 # Command options and aliases are listed here, alphabetically
2752
2747
2753 globalopts = [
2748 globalopts = [
2754 ('R', 'repository', '',
2749 ('R', 'repository', '',
2755 _('repository root directory or symbolic path name')),
2750 _('repository root directory or symbolic path name')),
2756 ('', 'cwd', '', _('change working directory')),
2751 ('', 'cwd', '', _('change working directory')),
2757 ('y', 'noninteractive', None,
2752 ('y', 'noninteractive', None,
2758 _('do not prompt, assume \'yes\' for any required answers')),
2753 _('do not prompt, assume \'yes\' for any required answers')),
2759 ('q', 'quiet', None, _('suppress output')),
2754 ('q', 'quiet', None, _('suppress output')),
2760 ('v', 'verbose', None, _('enable additional output')),
2755 ('v', 'verbose', None, _('enable additional output')),
2761 ('', 'config', [], _('set/override config option')),
2756 ('', 'config', [], _('set/override config option')),
2762 ('', 'debug', None, _('enable debugging output')),
2757 ('', 'debug', None, _('enable debugging output')),
2763 ('', 'debugger', None, _('start debugger')),
2758 ('', 'debugger', None, _('start debugger')),
2764 ('', 'encoding', util._encoding, _('set the charset encoding')),
2759 ('', 'encoding', util._encoding, _('set the charset encoding')),
2765 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2760 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2766 ('', 'lsprof', None, _('print improved command execution profile')),
2761 ('', 'lsprof', None, _('print improved command execution profile')),
2767 ('', 'traceback', None, _('print traceback on exception')),
2762 ('', 'traceback', None, _('print traceback on exception')),
2768 ('', 'time', None, _('time how long the command takes')),
2763 ('', 'time', None, _('time how long the command takes')),
2769 ('', 'profile', None, _('print command execution profile')),
2764 ('', 'profile', None, _('print command execution profile')),
2770 ('', 'version', None, _('output version information and exit')),
2765 ('', 'version', None, _('output version information and exit')),
2771 ('h', 'help', None, _('display help and exit')),
2766 ('h', 'help', None, _('display help and exit')),
2772 ]
2767 ]
2773
2768
2774 dryrunopts = [('n', 'dry-run', None,
2769 dryrunopts = [('n', 'dry-run', None,
2775 _('do not perform actions, just print output'))]
2770 _('do not perform actions, just print output'))]
2776
2771
2777 remoteopts = [
2772 remoteopts = [
2778 ('e', 'ssh', '', _('specify ssh command to use')),
2773 ('e', 'ssh', '', _('specify ssh command to use')),
2779 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2774 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2780 ]
2775 ]
2781
2776
2782 walkopts = [
2777 walkopts = [
2783 ('I', 'include', [], _('include names matching the given patterns')),
2778 ('I', 'include', [], _('include names matching the given patterns')),
2784 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2779 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2785 ]
2780 ]
2786
2781
2787 commitopts = [
2782 commitopts = [
2788 ('m', 'message', '', _('use <text> as commit message')),
2783 ('m', 'message', '', _('use <text> as commit message')),
2789 ('l', 'logfile', '', _('read commit message from <file>')),
2784 ('l', 'logfile', '', _('read commit message from <file>')),
2790 ]
2785 ]
2791
2786
2792 table = {
2787 table = {
2793 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2788 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2794 "addremove":
2789 "addremove":
2795 (addremove,
2790 (addremove,
2796 [('s', 'similarity', '',
2791 [('s', 'similarity', '',
2797 _('guess renamed files by similarity (0<=s<=100)')),
2792 _('guess renamed files by similarity (0<=s<=100)')),
2798 ] + walkopts + dryrunopts,
2793 ] + walkopts + dryrunopts,
2799 _('hg addremove [OPTION]... [FILE]...')),
2794 _('hg addremove [OPTION]... [FILE]...')),
2800 "^annotate":
2795 "^annotate":
2801 (annotate,
2796 (annotate,
2802 [('r', 'rev', '', _('annotate the specified revision')),
2797 [('r', 'rev', '', _('annotate the specified revision')),
2803 ('f', 'follow', None, _('follow file copies and renames')),
2798 ('f', 'follow', None, _('follow file copies and renames')),
2804 ('a', 'text', None, _('treat all files as text')),
2799 ('a', 'text', None, _('treat all files as text')),
2805 ('u', 'user', None, _('list the author')),
2800 ('u', 'user', None, _('list the author')),
2806 ('d', 'date', None, _('list the date')),
2801 ('d', 'date', None, _('list the date')),
2807 ('n', 'number', None, _('list the revision number (default)')),
2802 ('n', 'number', None, _('list the revision number (default)')),
2808 ('c', 'changeset', None, _('list the changeset')),
2803 ('c', 'changeset', None, _('list the changeset')),
2809 ('l', 'line-number', None,
2804 ('l', 'line-number', None,
2810 _('show line number at the first appearance'))
2805 _('show line number at the first appearance'))
2811 ] + walkopts,
2806 ] + walkopts,
2812 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2807 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2813 "archive":
2808 "archive":
2814 (archive,
2809 (archive,
2815 [('', 'no-decode', None, _('do not pass files through decoders')),
2810 [('', 'no-decode', None, _('do not pass files through decoders')),
2816 ('p', 'prefix', '', _('directory prefix for files in archive')),
2811 ('p', 'prefix', '', _('directory prefix for files in archive')),
2817 ('r', 'rev', '', _('revision to distribute')),
2812 ('r', 'rev', '', _('revision to distribute')),
2818 ('t', 'type', '', _('type of distribution to create')),
2813 ('t', 'type', '', _('type of distribution to create')),
2819 ] + walkopts,
2814 ] + walkopts,
2820 _('hg archive [OPTION]... DEST')),
2815 _('hg archive [OPTION]... DEST')),
2821 "backout":
2816 "backout":
2822 (backout,
2817 (backout,
2823 [('', 'merge', None,
2818 [('', 'merge', None,
2824 _('merge with old dirstate parent after backout')),
2819 _('merge with old dirstate parent after backout')),
2825 ('d', 'date', '', _('record datecode as commit date')),
2820 ('d', 'date', '', _('record datecode as commit date')),
2826 ('', 'parent', '', _('parent to choose when backing out merge')),
2821 ('', 'parent', '', _('parent to choose when backing out merge')),
2827 ('u', 'user', '', _('record user as committer')),
2822 ('u', 'user', '', _('record user as committer')),
2828 ('r', 'rev', '', _('revision to backout')),
2823 ('r', 'rev', '', _('revision to backout')),
2829 ] + walkopts + commitopts,
2824 ] + walkopts + commitopts,
2830 _('hg backout [OPTION]... [-r] REV')),
2825 _('hg backout [OPTION]... [-r] REV')),
2831 "branch":
2826 "branch":
2832 (branch,
2827 (branch,
2833 [('f', 'force', None,
2828 [('f', 'force', None,
2834 _('set branch name even if it shadows an existing branch'))],
2829 _('set branch name even if it shadows an existing branch'))],
2835 _('hg branch [NAME]')),
2830 _('hg branch [NAME]')),
2836 "branches":
2831 "branches":
2837 (branches,
2832 (branches,
2838 [('a', 'active', False,
2833 [('a', 'active', False,
2839 _('show only branches that have unmerged heads'))],
2834 _('show only branches that have unmerged heads'))],
2840 _('hg branches [-a]')),
2835 _('hg branches [-a]')),
2841 "bundle":
2836 "bundle":
2842 (bundle,
2837 (bundle,
2843 [('f', 'force', None,
2838 [('f', 'force', None,
2844 _('run even when remote repository is unrelated')),
2839 _('run even when remote repository is unrelated')),
2845 ('r', 'rev', [],
2840 ('r', 'rev', [],
2846 _('a changeset you would like to bundle')),
2841 _('a changeset you would like to bundle')),
2847 ('', 'base', [],
2842 ('', 'base', [],
2848 _('a base changeset to specify instead of a destination')),
2843 _('a base changeset to specify instead of a destination')),
2849 ] + remoteopts,
2844 ] + remoteopts,
2850 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2845 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2851 "cat":
2846 "cat":
2852 (cat,
2847 (cat,
2853 [('o', 'output', '', _('print output to file with formatted name')),
2848 [('o', 'output', '', _('print output to file with formatted name')),
2854 ('r', 'rev', '', _('print the given revision')),
2849 ('r', 'rev', '', _('print the given revision')),
2855 ] + walkopts,
2850 ] + walkopts,
2856 _('hg cat [OPTION]... FILE...')),
2851 _('hg cat [OPTION]... FILE...')),
2857 "^clone":
2852 "^clone":
2858 (clone,
2853 (clone,
2859 [('U', 'noupdate', None, _('do not update the new working directory')),
2854 [('U', 'noupdate', None, _('do not update the new working directory')),
2860 ('r', 'rev', [],
2855 ('r', 'rev', [],
2861 _('a changeset you would like to have after cloning')),
2856 _('a changeset you would like to have after cloning')),
2862 ('', 'pull', None, _('use pull protocol to copy metadata')),
2857 ('', 'pull', None, _('use pull protocol to copy metadata')),
2863 ('', 'uncompressed', None,
2858 ('', 'uncompressed', None,
2864 _('use uncompressed transfer (fast over LAN)')),
2859 _('use uncompressed transfer (fast over LAN)')),
2865 ] + remoteopts,
2860 ] + remoteopts,
2866 _('hg clone [OPTION]... SOURCE [DEST]')),
2861 _('hg clone [OPTION]... SOURCE [DEST]')),
2867 "^commit|ci":
2862 "^commit|ci":
2868 (commit,
2863 (commit,
2869 [('A', 'addremove', None,
2864 [('A', 'addremove', None,
2870 _('mark new/missing files as added/removed before committing')),
2865 _('mark new/missing files as added/removed before committing')),
2871 ('d', 'date', '', _('record datecode as commit date')),
2866 ('d', 'date', '', _('record datecode as commit date')),
2872 ('u', 'user', '', _('record user as commiter')),
2867 ('u', 'user', '', _('record user as commiter')),
2873 ] + walkopts + commitopts,
2868 ] + walkopts + commitopts,
2874 _('hg commit [OPTION]... [FILE]...')),
2869 _('hg commit [OPTION]... [FILE]...')),
2875 "copy|cp":
2870 "copy|cp":
2876 (copy,
2871 (copy,
2877 [('A', 'after', None, _('record a copy that has already occurred')),
2872 [('A', 'after', None, _('record a copy that has already occurred')),
2878 ('f', 'force', None,
2873 ('f', 'force', None,
2879 _('forcibly copy over an existing managed file')),
2874 _('forcibly copy over an existing managed file')),
2880 ] + walkopts + dryrunopts,
2875 ] + walkopts + dryrunopts,
2881 _('hg copy [OPTION]... [SOURCE]... DEST')),
2876 _('hg copy [OPTION]... [SOURCE]... DEST')),
2882 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2877 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2883 "debugcomplete":
2878 "debugcomplete":
2884 (debugcomplete,
2879 (debugcomplete,
2885 [('o', 'options', None, _('show the command options'))],
2880 [('o', 'options', None, _('show the command options'))],
2886 _('debugcomplete [-o] CMD')),
2881 _('debugcomplete [-o] CMD')),
2887 "debuginstall": (debuginstall, [], _('debuginstall')),
2882 "debuginstall": (debuginstall, [], _('debuginstall')),
2888 "debugrebuildstate":
2883 "debugrebuildstate":
2889 (debugrebuildstate,
2884 (debugrebuildstate,
2890 [('r', 'rev', '', _('revision to rebuild to'))],
2885 [('r', 'rev', '', _('revision to rebuild to'))],
2891 _('debugrebuildstate [-r REV] [REV]')),
2886 _('debugrebuildstate [-r REV] [REV]')),
2892 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2887 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2893 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2888 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2894 "debugstate": (debugstate, [], _('debugstate')),
2889 "debugstate": (debugstate, [], _('debugstate')),
2895 "debugdate":
2890 "debugdate":
2896 (debugdate,
2891 (debugdate,
2897 [('e', 'extended', None, _('try extended date formats'))],
2892 [('e', 'extended', None, _('try extended date formats'))],
2898 _('debugdate [-e] DATE [RANGE]')),
2893 _('debugdate [-e] DATE [RANGE]')),
2899 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2894 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2900 "debugindex": (debugindex, [], _('debugindex FILE')),
2895 "debugindex": (debugindex, [], _('debugindex FILE')),
2901 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2896 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2902 "debugrename":
2897 "debugrename":
2903 (debugrename,
2898 (debugrename,
2904 [('r', 'rev', '', _('revision to debug'))],
2899 [('r', 'rev', '', _('revision to debug'))],
2905 _('debugrename [-r REV] FILE')),
2900 _('debugrename [-r REV] FILE')),
2906 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2901 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2907 "^diff":
2902 "^diff":
2908 (diff,
2903 (diff,
2909 [('r', 'rev', [], _('revision')),
2904 [('r', 'rev', [], _('revision')),
2910 ('a', 'text', None, _('treat all files as text')),
2905 ('a', 'text', None, _('treat all files as text')),
2911 ('p', 'show-function', None,
2906 ('p', 'show-function', None,
2912 _('show which function each change is in')),
2907 _('show which function each change is in')),
2913 ('g', 'git', None, _('use git extended diff format')),
2908 ('g', 'git', None, _('use git extended diff format')),
2914 ('', 'nodates', None, _("don't include dates in diff headers")),
2909 ('', 'nodates', None, _("don't include dates in diff headers")),
2915 ('w', 'ignore-all-space', None,
2910 ('w', 'ignore-all-space', None,
2916 _('ignore white space when comparing lines')),
2911 _('ignore white space when comparing lines')),
2917 ('b', 'ignore-space-change', None,
2912 ('b', 'ignore-space-change', None,
2918 _('ignore changes in the amount of white space')),
2913 _('ignore changes in the amount of white space')),
2919 ('B', 'ignore-blank-lines', None,
2914 ('B', 'ignore-blank-lines', None,
2920 _('ignore changes whose lines are all blank')),
2915 _('ignore changes whose lines are all blank')),
2921 ] + walkopts,
2916 ] + walkopts,
2922 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2917 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2923 "^export":
2918 "^export":
2924 (export,
2919 (export,
2925 [('o', 'output', '', _('print output to file with formatted name')),
2920 [('o', 'output', '', _('print output to file with formatted name')),
2926 ('a', 'text', None, _('treat all files as text')),
2921 ('a', 'text', None, _('treat all files as text')),
2927 ('g', 'git', None, _('use git extended diff format')),
2922 ('g', 'git', None, _('use git extended diff format')),
2928 ('', 'nodates', None, _("don't include dates in diff headers")),
2923 ('', 'nodates', None, _("don't include dates in diff headers")),
2929 ('', 'switch-parent', None, _('diff against the second parent'))],
2924 ('', 'switch-parent', None, _('diff against the second parent'))],
2930 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2925 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2931 "grep":
2926 "grep":
2932 (grep,
2927 (grep,
2933 [('0', 'print0', None, _('end fields with NUL')),
2928 [('0', 'print0', None, _('end fields with NUL')),
2934 ('', 'all', None, _('print all revisions that match')),
2929 ('', 'all', None, _('print all revisions that match')),
2935 ('f', 'follow', None,
2930 ('f', 'follow', None,
2936 _('follow changeset history, or file history across copies and renames')),
2931 _('follow changeset history, or file history across copies and renames')),
2937 ('i', 'ignore-case', None, _('ignore case when matching')),
2932 ('i', 'ignore-case', None, _('ignore case when matching')),
2938 ('l', 'files-with-matches', None,
2933 ('l', 'files-with-matches', None,
2939 _('print only filenames and revs that match')),
2934 _('print only filenames and revs that match')),
2940 ('n', 'line-number', None, _('print matching line numbers')),
2935 ('n', 'line-number', None, _('print matching line numbers')),
2941 ('r', 'rev', [], _('search in given revision range')),
2936 ('r', 'rev', [], _('search in given revision range')),
2942 ('u', 'user', None, _('print user who committed change')),
2937 ('u', 'user', None, _('print user who committed change')),
2943 ] + walkopts,
2938 ] + walkopts,
2944 _('hg grep [OPTION]... PATTERN [FILE]...')),
2939 _('hg grep [OPTION]... PATTERN [FILE]...')),
2945 "heads":
2940 "heads":
2946 (heads,
2941 (heads,
2947 [('', 'style', '', _('display using template map file')),
2942 [('', 'style', '', _('display using template map file')),
2948 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2943 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2949 ('', 'template', '', _('display with template'))],
2944 ('', 'template', '', _('display with template'))],
2950 _('hg heads [-r REV] [REV]...')),
2945 _('hg heads [-r REV] [REV]...')),
2951 "help": (help_, [], _('hg help [COMMAND]')),
2946 "help": (help_, [], _('hg help [COMMAND]')),
2952 "identify|id":
2947 "identify|id":
2953 (identify,
2948 (identify,
2954 [('r', 'rev', '', _('identify the specified rev')),
2949 [('r', 'rev', '', _('identify the specified rev')),
2955 ('n', 'num', None, _('show local revision number')),
2950 ('n', 'num', None, _('show local revision number')),
2956 ('i', 'id', None, _('show global revision id')),
2951 ('i', 'id', None, _('show global revision id')),
2957 ('b', 'branch', None, _('show branch')),
2952 ('b', 'branch', None, _('show branch')),
2958 ('t', 'tags', None, _('show tags'))],
2953 ('t', 'tags', None, _('show tags'))],
2959 _('hg identify [-nibt] [-r REV] [SOURCE]')),
2954 _('hg identify [-nibt] [-r REV] [SOURCE]')),
2960 "import|patch":
2955 "import|patch":
2961 (import_,
2956 (import_,
2962 [('p', 'strip', 1,
2957 [('p', 'strip', 1,
2963 _('directory strip option for patch. This has the same\n'
2958 _('directory strip option for patch. This has the same\n'
2964 'meaning as the corresponding patch option')),
2959 'meaning as the corresponding patch option')),
2965 ('b', 'base', '', _('base path')),
2960 ('b', 'base', '', _('base path')),
2966 ('f', 'force', None,
2961 ('f', 'force', None,
2967 _('skip check for outstanding uncommitted changes')),
2962 _('skip check for outstanding uncommitted changes')),
2968 ('', 'exact', None,
2963 ('', 'exact', None,
2969 _('apply patch to the nodes from which it was generated')),
2964 _('apply patch to the nodes from which it was generated')),
2970 ('', 'import-branch', None,
2965 ('', 'import-branch', None,
2971 _('Use any branch information in patch (implied by --exact)'))] + commitopts,
2966 _('Use any branch information in patch (implied by --exact)'))] + commitopts,
2972 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2967 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2973 "incoming|in": (incoming,
2968 "incoming|in": (incoming,
2974 [('M', 'no-merges', None, _('do not show merges')),
2969 [('M', 'no-merges', None, _('do not show merges')),
2975 ('f', 'force', None,
2970 ('f', 'force', None,
2976 _('run even when remote repository is unrelated')),
2971 _('run even when remote repository is unrelated')),
2977 ('', 'style', '', _('display using template map file')),
2972 ('', 'style', '', _('display using template map file')),
2978 ('n', 'newest-first', None, _('show newest record first')),
2973 ('n', 'newest-first', None, _('show newest record first')),
2979 ('', 'bundle', '', _('file to store the bundles into')),
2974 ('', 'bundle', '', _('file to store the bundles into')),
2980 ('p', 'patch', None, _('show patch')),
2975 ('p', 'patch', None, _('show patch')),
2981 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2976 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2982 ('', 'template', '', _('display with template')),
2977 ('', 'template', '', _('display with template')),
2983 ] + remoteopts,
2978 ] + remoteopts,
2984 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2979 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2985 ' [--bundle FILENAME] [SOURCE]')),
2980 ' [--bundle FILENAME] [SOURCE]')),
2986 "^init":
2981 "^init":
2987 (init,
2982 (init,
2988 remoteopts,
2983 remoteopts,
2989 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2984 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2990 "locate":
2985 "locate":
2991 (locate,
2986 (locate,
2992 [('r', 'rev', '', _('search the repository as it stood at rev')),
2987 [('r', 'rev', '', _('search the repository as it stood at rev')),
2993 ('0', 'print0', None,
2988 ('0', 'print0', None,
2994 _('end filenames with NUL, for use with xargs')),
2989 _('end filenames with NUL, for use with xargs')),
2995 ('f', 'fullpath', None,
2990 ('f', 'fullpath', None,
2996 _('print complete paths from the filesystem root')),
2991 _('print complete paths from the filesystem root')),
2997 ] + walkopts,
2992 ] + walkopts,
2998 _('hg locate [OPTION]... [PATTERN]...')),
2993 _('hg locate [OPTION]... [PATTERN]...')),
2999 "^log|history":
2994 "^log|history":
3000 (log,
2995 (log,
3001 [('f', 'follow', None,
2996 [('f', 'follow', None,
3002 _('follow changeset history, or file history across copies and renames')),
2997 _('follow changeset history, or file history across copies and renames')),
3003 ('', 'follow-first', None,
2998 ('', 'follow-first', None,
3004 _('only follow the first parent of merge changesets')),
2999 _('only follow the first parent of merge changesets')),
3005 ('d', 'date', '', _('show revs matching date spec')),
3000 ('d', 'date', '', _('show revs matching date spec')),
3006 ('C', 'copies', None, _('show copied files')),
3001 ('C', 'copies', None, _('show copied files')),
3007 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3002 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3008 ('l', 'limit', '', _('limit number of changes displayed')),
3003 ('l', 'limit', '', _('limit number of changes displayed')),
3009 ('r', 'rev', [], _('show the specified revision or range')),
3004 ('r', 'rev', [], _('show the specified revision or range')),
3010 ('', 'removed', None, _('include revs where files were removed')),
3005 ('', 'removed', None, _('include revs where files were removed')),
3011 ('M', 'no-merges', None, _('do not show merges')),
3006 ('M', 'no-merges', None, _('do not show merges')),
3012 ('', 'style', '', _('display using template map file')),
3007 ('', 'style', '', _('display using template map file')),
3013 ('m', 'only-merges', None, _('show only merges')),
3008 ('m', 'only-merges', None, _('show only merges')),
3014 ('p', 'patch', None, _('show patch')),
3009 ('p', 'patch', None, _('show patch')),
3015 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3010 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3016 ('', 'template', '', _('display with template')),
3011 ('', 'template', '', _('display with template')),
3017 ] + walkopts,
3012 ] + walkopts,
3018 _('hg log [OPTION]... [FILE]')),
3013 _('hg log [OPTION]... [FILE]')),
3019 "manifest": (manifest, [], _('hg manifest [REV]')),
3014 "manifest": (manifest, [], _('hg manifest [REV]')),
3020 "^merge":
3015 "^merge":
3021 (merge,
3016 (merge,
3022 [('f', 'force', None, _('force a merge with outstanding changes')),
3017 [('f', 'force', None, _('force a merge with outstanding changes')),
3023 ('r', 'rev', '', _('revision to merge')),
3018 ('r', 'rev', '', _('revision to merge')),
3024 ],
3019 ],
3025 _('hg merge [-f] [[-r] REV]')),
3020 _('hg merge [-f] [[-r] REV]')),
3026 "outgoing|out": (outgoing,
3021 "outgoing|out": (outgoing,
3027 [('M', 'no-merges', None, _('do not show merges')),
3022 [('M', 'no-merges', None, _('do not show merges')),
3028 ('f', 'force', None,
3023 ('f', 'force', None,
3029 _('run even when remote repository is unrelated')),
3024 _('run even when remote repository is unrelated')),
3030 ('p', 'patch', None, _('show patch')),
3025 ('p', 'patch', None, _('show patch')),
3031 ('', 'style', '', _('display using template map file')),
3026 ('', 'style', '', _('display using template map file')),
3032 ('r', 'rev', [], _('a specific revision you would like to push')),
3027 ('r', 'rev', [], _('a specific revision you would like to push')),
3033 ('n', 'newest-first', None, _('show newest record first')),
3028 ('n', 'newest-first', None, _('show newest record first')),
3034 ('', 'template', '', _('display with template')),
3029 ('', 'template', '', _('display with template')),
3035 ] + remoteopts,
3030 ] + remoteopts,
3036 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3031 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3037 "^parents":
3032 "^parents":
3038 (parents,
3033 (parents,
3039 [('r', 'rev', '', _('show parents from the specified rev')),
3034 [('r', 'rev', '', _('show parents from the specified rev')),
3040 ('', 'style', '', _('display using template map file')),
3035 ('', 'style', '', _('display using template map file')),
3041 ('', 'template', '', _('display with template'))],
3036 ('', 'template', '', _('display with template'))],
3042 _('hg parents [-r REV] [FILE]')),
3037 _('hg parents [-r REV] [FILE]')),
3043 "paths": (paths, [], _('hg paths [NAME]')),
3038 "paths": (paths, [], _('hg paths [NAME]')),
3044 "^pull":
3039 "^pull":
3045 (pull,
3040 (pull,
3046 [('u', 'update', None,
3041 [('u', 'update', None,
3047 _('update to new tip if changesets were pulled')),
3042 _('update to new tip if changesets were pulled')),
3048 ('f', 'force', None,
3043 ('f', 'force', None,
3049 _('run even when remote repository is unrelated')),
3044 _('run even when remote repository is unrelated')),
3050 ('r', 'rev', [],
3045 ('r', 'rev', [],
3051 _('a specific revision up to which you would like to pull')),
3046 _('a specific revision up to which you would like to pull')),
3052 ] + remoteopts,
3047 ] + remoteopts,
3053 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3048 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3054 "^push":
3049 "^push":
3055 (push,
3050 (push,
3056 [('f', 'force', None, _('force push')),
3051 [('f', 'force', None, _('force push')),
3057 ('r', 'rev', [], _('a specific revision you would like to push')),
3052 ('r', 'rev', [], _('a specific revision you would like to push')),
3058 ] + remoteopts,
3053 ] + remoteopts,
3059 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3054 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3060 "debugrawcommit|rawcommit":
3055 "debugrawcommit|rawcommit":
3061 (rawcommit,
3056 (rawcommit,
3062 [('p', 'parent', [], _('parent')),
3057 [('p', 'parent', [], _('parent')),
3063 ('d', 'date', '', _('date code')),
3058 ('d', 'date', '', _('date code')),
3064 ('u', 'user', '', _('user')),
3059 ('u', 'user', '', _('user')),
3065 ('F', 'files', '', _('file list'))
3060 ('F', 'files', '', _('file list'))
3066 ] + commitopts,
3061 ] + commitopts,
3067 _('hg debugrawcommit [OPTION]... [FILE]...')),
3062 _('hg debugrawcommit [OPTION]... [FILE]...')),
3068 "recover": (recover, [], _('hg recover')),
3063 "recover": (recover, [], _('hg recover')),
3069 "^remove|rm":
3064 "^remove|rm":
3070 (remove,
3065 (remove,
3071 [('A', 'after', None, _('record remove that has already occurred')),
3066 [('A', 'after', None, _('record remove that has already occurred')),
3072 ('f', 'force', None, _('remove file even if modified')),
3067 ('f', 'force', None, _('remove file even if modified')),
3073 ] + walkopts,
3068 ] + walkopts,
3074 _('hg remove [OPTION]... FILE...')),
3069 _('hg remove [OPTION]... FILE...')),
3075 "rename|mv":
3070 "rename|mv":
3076 (rename,
3071 (rename,
3077 [('A', 'after', None, _('record a rename that has already occurred')),
3072 [('A', 'after', None, _('record a rename that has already occurred')),
3078 ('f', 'force', None,
3073 ('f', 'force', None,
3079 _('forcibly copy over an existing managed file')),
3074 _('forcibly copy over an existing managed file')),
3080 ] + walkopts + dryrunopts,
3075 ] + walkopts + dryrunopts,
3081 _('hg rename [OPTION]... SOURCE... DEST')),
3076 _('hg rename [OPTION]... SOURCE... DEST')),
3082 "^revert":
3077 "^revert":
3083 (revert,
3078 (revert,
3084 [('a', 'all', None, _('revert all changes when no arguments given')),
3079 [('a', 'all', None, _('revert all changes when no arguments given')),
3085 ('d', 'date', '', _('tipmost revision matching date')),
3080 ('d', 'date', '', _('tipmost revision matching date')),
3086 ('r', 'rev', '', _('revision to revert to')),
3081 ('r', 'rev', '', _('revision to revert to')),
3087 ('', 'no-backup', None, _('do not save backup copies of files')),
3082 ('', 'no-backup', None, _('do not save backup copies of files')),
3088 ] + walkopts + dryrunopts,
3083 ] + walkopts + dryrunopts,
3089 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3084 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3090 "rollback": (rollback, [], _('hg rollback')),
3085 "rollback": (rollback, [], _('hg rollback')),
3091 "root": (root, [], _('hg root')),
3086 "root": (root, [], _('hg root')),
3092 "showconfig|debugconfig":
3087 "showconfig|debugconfig":
3093 (showconfig,
3088 (showconfig,
3094 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3089 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3095 _('showconfig [-u] [NAME]...')),
3090 _('showconfig [-u] [NAME]...')),
3096 "^serve":
3091 "^serve":
3097 (serve,
3092 (serve,
3098 [('A', 'accesslog', '', _('name of access log file to write to')),
3093 [('A', 'accesslog', '', _('name of access log file to write to')),
3099 ('d', 'daemon', None, _('run server in background')),
3094 ('d', 'daemon', None, _('run server in background')),
3100 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3095 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3101 ('E', 'errorlog', '', _('name of error log file to write to')),
3096 ('E', 'errorlog', '', _('name of error log file to write to')),
3102 ('p', 'port', 0, _('port to use (default: 8000)')),
3097 ('p', 'port', 0, _('port to use (default: 8000)')),
3103 ('a', 'address', '', _('address to use')),
3098 ('a', 'address', '', _('address to use')),
3104 ('n', 'name', '',
3099 ('n', 'name', '',
3105 _('name to show in web pages (default: working dir)')),
3100 _('name to show in web pages (default: working dir)')),
3106 ('', 'webdir-conf', '', _('name of the webdir config file'
3101 ('', 'webdir-conf', '', _('name of the webdir config file'
3107 ' (serve more than one repo)')),
3102 ' (serve more than one repo)')),
3108 ('', 'pid-file', '', _('name of file to write process ID to')),
3103 ('', 'pid-file', '', _('name of file to write process ID to')),
3109 ('', 'stdio', None, _('for remote clients')),
3104 ('', 'stdio', None, _('for remote clients')),
3110 ('t', 'templates', '', _('web templates to use')),
3105 ('t', 'templates', '', _('web templates to use')),
3111 ('', 'style', '', _('template style to use')),
3106 ('', 'style', '', _('template style to use')),
3112 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3107 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3113 ('', 'certificate', '', _('SSL certificate file'))],
3108 ('', 'certificate', '', _('SSL certificate file'))],
3114 _('hg serve [OPTION]...')),
3109 _('hg serve [OPTION]...')),
3115 "^status|st":
3110 "^status|st":
3116 (status,
3111 (status,
3117 [('A', 'all', None, _('show status of all files')),
3112 [('A', 'all', None, _('show status of all files')),
3118 ('m', 'modified', None, _('show only modified files')),
3113 ('m', 'modified', None, _('show only modified files')),
3119 ('a', 'added', None, _('show only added files')),
3114 ('a', 'added', None, _('show only added files')),
3120 ('r', 'removed', None, _('show only removed files')),
3115 ('r', 'removed', None, _('show only removed files')),
3121 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3116 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3122 ('c', 'clean', None, _('show only files without changes')),
3117 ('c', 'clean', None, _('show only files without changes')),
3123 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3118 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3124 ('i', 'ignored', None, _('show only ignored files')),
3119 ('i', 'ignored', None, _('show only ignored files')),
3125 ('n', 'no-status', None, _('hide status prefix')),
3120 ('n', 'no-status', None, _('hide status prefix')),
3126 ('C', 'copies', None, _('show source of copied files')),
3121 ('C', 'copies', None, _('show source of copied files')),
3127 ('0', 'print0', None,
3122 ('0', 'print0', None,
3128 _('end filenames with NUL, for use with xargs')),
3123 _('end filenames with NUL, for use with xargs')),
3129 ('', 'rev', [], _('show difference from revision')),
3124 ('', 'rev', [], _('show difference from revision')),
3130 ] + walkopts,
3125 ] + walkopts,
3131 _('hg status [OPTION]... [FILE]...')),
3126 _('hg status [OPTION]... [FILE]...')),
3132 "tag":
3127 "tag":
3133 (tag,
3128 (tag,
3134 [('f', 'force', None, _('replace existing tag')),
3129 [('f', 'force', None, _('replace existing tag')),
3135 ('l', 'local', None, _('make the tag local')),
3130 ('l', 'local', None, _('make the tag local')),
3136 ('m', 'message', '', _('message for tag commit log entry')),
3131 ('m', 'message', '', _('message for tag commit log entry')),
3137 ('d', 'date', '', _('record datecode as commit date')),
3132 ('d', 'date', '', _('record datecode as commit date')),
3138 ('u', 'user', '', _('record user as commiter')),
3133 ('u', 'user', '', _('record user as commiter')),
3139 ('r', 'rev', '', _('revision to tag')),
3134 ('r', 'rev', '', _('revision to tag')),
3140 ('', 'remove', None, _('remove a tag'))],
3135 ('', 'remove', None, _('remove a tag'))],
3141 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3136 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3142 "tags": (tags, [], _('hg tags')),
3137 "tags": (tags, [], _('hg tags')),
3143 "tip":
3138 "tip":
3144 (tip,
3139 (tip,
3145 [('', 'style', '', _('display using template map file')),
3140 [('', 'style', '', _('display using template map file')),
3146 ('p', 'patch', None, _('show patch')),
3141 ('p', 'patch', None, _('show patch')),
3147 ('', 'template', '', _('display with template'))],
3142 ('', 'template', '', _('display with template'))],
3148 _('hg tip [-p]')),
3143 _('hg tip [-p]')),
3149 "unbundle":
3144 "unbundle":
3150 (unbundle,
3145 (unbundle,
3151 [('u', 'update', None,
3146 [('u', 'update', None,
3152 _('update to new tip if changesets were unbundled'))],
3147 _('update to new tip if changesets were unbundled'))],
3153 _('hg unbundle [-u] FILE...')),
3148 _('hg unbundle [-u] FILE...')),
3154 "^update|up|checkout|co":
3149 "^update|up|checkout|co":
3155 (update,
3150 (update,
3156 [('C', 'clean', None, _('overwrite locally modified files')),
3151 [('C', 'clean', None, _('overwrite locally modified files')),
3157 ('d', 'date', '', _('tipmost revision matching date')),
3152 ('d', 'date', '', _('tipmost revision matching date')),
3158 ('r', 'rev', '', _('revision'))],
3153 ('r', 'rev', '', _('revision'))],
3159 _('hg update [-C] [-d DATE] [[-r] REV]')),
3154 _('hg update [-C] [-d DATE] [[-r] REV]')),
3160 "verify": (verify, [], _('hg verify')),
3155 "verify": (verify, [], _('hg verify')),
3161 "version": (version_, [], _('hg version')),
3156 "version": (version_, [], _('hg version')),
3162 }
3157 }
3163
3158
3164 extensions.commandtable = table
3159 extensions.commandtable = table
3165
3160
3166 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3161 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3167 " debugindex debugindexdot debugdate debuginstall")
3162 " debugindex debugindexdot debugdate debuginstall")
3168 optionalrepo = ("paths serve showconfig")
3163 optionalrepo = ("paths serve showconfig")
3169
3164
3170 def dispatch(args, argv0=None):
3165 def dispatch(args, argv0=None):
3171 try:
3166 try:
3172 u = ui.ui(traceback='--traceback' in args)
3167 u = ui.ui(traceback='--traceback' in args)
3173 except util.Abort, inst:
3168 except util.Abort, inst:
3174 sys.stderr.write(_("abort: %s\n") % inst)
3169 sys.stderr.write(_("abort: %s\n") % inst)
3175 return -1
3170 return -1
3176 return cmdutil.runcatch(u, args, argv0=argv0)
3171 return cmdutil.runcatch(u, args, argv0=argv0)
3177
3172
3178 def run():
3173 def run():
3179 sys.exit(dispatch(sys.argv[1:], argv0=sys.argv[0]))
3174 sys.exit(dispatch(sys.argv[1:], argv0=sys.argv[0]))
@@ -1,500 +1,503 b''
1 """
1 """
2 dirstate.py - working directory tracking for mercurial
2 dirstate.py - working directory tracking for mercurial
3
3
4 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5
5
6 This software may be used and distributed according to the terms
6 This software may be used and distributed according to the terms
7 of the GNU General Public License, incorporated herein by reference.
7 of the GNU General Public License, incorporated herein by reference.
8 """
8 """
9
9
10 from node import *
10 from node import *
11 from i18n import _
11 from i18n import _
12 import struct, os, time, bisect, stat, strutil, util, re, errno, ignore
12 import struct, os, time, bisect, stat, strutil, util, re, errno, ignore
13 import cStringIO
13 import cStringIO
14
14
15 _unknown = ('?', 0, 0, 0)
15 _unknown = ('?', 0, 0, 0)
16 _format = ">cllll"
16 _format = ">cllll"
17
17
18 class dirstate(object):
18 class dirstate(object):
19
19
20 def __init__(self, opener, ui, root):
20 def __init__(self, opener, ui, root):
21 self._opener = opener
21 self._opener = opener
22 self._root = root
22 self._root = root
23 self._dirty = False
23 self._dirty = False
24 self._dirtypl = False
24 self._ui = ui
25 self._ui = ui
25
26
26 def __getattr__(self, name):
27 def __getattr__(self, name):
27 if name == '_map':
28 if name == '_map':
28 self._read()
29 self._read()
29 return self._map
30 return self._map
30 elif name == '_copymap':
31 elif name == '_copymap':
31 self._read()
32 self._read()
32 return self._copymap
33 return self._copymap
33 elif name == '_branch':
34 elif name == '_branch':
34 try:
35 try:
35 self._branch = (self._opener("branch").read().strip()
36 self._branch = (self._opener("branch").read().strip()
36 or "default")
37 or "default")
37 except IOError:
38 except IOError:
38 self._branch = "default"
39 self._branch = "default"
39 return self._branch
40 return self._branch
40 elif name == '_pl':
41 elif name == '_pl':
41 self._pl = [nullid, nullid]
42 self._pl = [nullid, nullid]
42 try:
43 try:
43 st = self._opener("dirstate").read(40)
44 st = self._opener("dirstate").read(40)
44 if len(st) == 40:
45 if len(st) == 40:
45 self._pl = st[:20], st[20:40]
46 self._pl = st[:20], st[20:40]
46 except IOError, err:
47 except IOError, err:
47 if err.errno != errno.ENOENT: raise
48 if err.errno != errno.ENOENT: raise
48 return self._pl
49 return self._pl
49 elif name == '_dirs':
50 elif name == '_dirs':
50 self._dirs = {}
51 self._dirs = {}
51 for f in self._map:
52 for f in self._map:
52 self._incpath(f)
53 self._incpath(f)
53 return self._dirs
54 return self._dirs
54 elif name == '_ignore':
55 elif name == '_ignore':
55 files = [self._join('.hgignore')]
56 files = [self._join('.hgignore')]
56 for name, path in self._ui.configitems("ui"):
57 for name, path in self._ui.configitems("ui"):
57 if name == 'ignore' or name.startswith('ignore.'):
58 if name == 'ignore' or name.startswith('ignore.'):
58 files.append(os.path.expanduser(path))
59 files.append(os.path.expanduser(path))
59 self._ignore = ignore.ignore(self._root, files, self._ui.warn)
60 self._ignore = ignore.ignore(self._root, files, self._ui.warn)
60 return self._ignore
61 return self._ignore
61 elif name == '_slash':
62 elif name == '_slash':
62 self._slash = self._ui.configbool('ui', 'slash') and os.sep != '/'
63 self._slash = self._ui.configbool('ui', 'slash') and os.sep != '/'
63 return self._slash
64 return self._slash
64 else:
65 else:
65 raise AttributeError, name
66 raise AttributeError, name
66
67
67 def _join(self, f):
68 def _join(self, f):
68 return os.path.join(self._root, f)
69 return os.path.join(self._root, f)
69
70
70 def getcwd(self):
71 def getcwd(self):
71 cwd = os.getcwd()
72 cwd = os.getcwd()
72 if cwd == self._root: return ''
73 if cwd == self._root: return ''
73 # self._root ends with a path separator if self._root is '/' or 'C:\'
74 # self._root ends with a path separator if self._root is '/' or 'C:\'
74 rootsep = self._root
75 rootsep = self._root
75 if not rootsep.endswith(os.sep):
76 if not rootsep.endswith(os.sep):
76 rootsep += os.sep
77 rootsep += os.sep
77 if cwd.startswith(rootsep):
78 if cwd.startswith(rootsep):
78 return cwd[len(rootsep):]
79 return cwd[len(rootsep):]
79 else:
80 else:
80 # we're outside the repo. return an absolute path.
81 # we're outside the repo. return an absolute path.
81 return cwd
82 return cwd
82
83
83 def pathto(self, f, cwd=None):
84 def pathto(self, f, cwd=None):
84 if cwd is None:
85 if cwd is None:
85 cwd = self.getcwd()
86 cwd = self.getcwd()
86 path = util.pathto(self._root, cwd, f)
87 path = util.pathto(self._root, cwd, f)
87 if self._slash:
88 if self._slash:
88 return path.replace(os.sep, '/')
89 return path.replace(os.sep, '/')
89 return path
90 return path
90
91
91 def __getitem__(self, key):
92 def __getitem__(self, key):
92 ''' current states:
93 ''' current states:
93 n normal
94 n normal
94 m needs merging
95 m needs merging
95 r marked for removal
96 r marked for removal
96 a marked for addition
97 a marked for addition
97 ? not tracked'''
98 ? not tracked'''
98 return self._map.get(key, ("?",))[0]
99 return self._map.get(key, ("?",))[0]
99
100
100 def __contains__(self, key):
101 def __contains__(self, key):
101 return key in self._map
102 return key in self._map
102
103
103 def __iter__(self):
104 def __iter__(self):
104 a = self._map.keys()
105 a = self._map.keys()
105 a.sort()
106 a.sort()
106 for x in a:
107 for x in a:
107 yield x
108 yield x
108
109
109 def parents(self):
110 def parents(self):
110 return self._pl
111 return self._pl
111
112
112 def branch(self):
113 def branch(self):
113 return self._branch
114 return self._branch
114
115
115 def setparents(self, p1, p2=nullid):
116 def setparents(self, p1, p2=nullid):
116 self._dirty = True
117 self._dirty = self._dirtypl = True
117 self._pl = p1, p2
118 self._pl = p1, p2
118
119
119 def setbranch(self, branch):
120 def setbranch(self, branch):
120 self._branch = branch
121 self._branch = branch
121 self._opener("branch", "w").write(branch + '\n')
122 self._opener("branch", "w").write(branch + '\n')
122
123
123 def _read(self):
124 def _read(self):
124 self._map = {}
125 self._map = {}
125 self._copymap = {}
126 self._copymap = {}
127 if not self._dirtypl:
126 self._pl = [nullid, nullid]
128 self._pl = [nullid, nullid]
127 try:
129 try:
128 st = self._opener("dirstate").read()
130 st = self._opener("dirstate").read()
129 except IOError, err:
131 except IOError, err:
130 if err.errno != errno.ENOENT: raise
132 if err.errno != errno.ENOENT: raise
131 return
133 return
132 if not st:
134 if not st:
133 return
135 return
134
136
137 if not self._dirtypl:
135 self._pl = [st[:20], st[20: 40]]
138 self._pl = [st[:20], st[20: 40]]
136
139
137 # deref fields so they will be local in loop
140 # deref fields so they will be local in loop
138 dmap = self._map
141 dmap = self._map
139 copymap = self._copymap
142 copymap = self._copymap
140 unpack = struct.unpack
143 unpack = struct.unpack
141
144
142 pos = 40
145 pos = 40
143 e_size = struct.calcsize(_format)
146 e_size = struct.calcsize(_format)
144
147
145 while pos < len(st):
148 while pos < len(st):
146 newpos = pos + e_size
149 newpos = pos + e_size
147 e = unpack(_format, st[pos:newpos])
150 e = unpack(_format, st[pos:newpos])
148 l = e[4]
151 l = e[4]
149 pos = newpos
152 pos = newpos
150 newpos = pos + l
153 newpos = pos + l
151 f = st[pos:newpos]
154 f = st[pos:newpos]
152 if '\0' in f:
155 if '\0' in f:
153 f, c = f.split('\0')
156 f, c = f.split('\0')
154 copymap[f] = c
157 copymap[f] = c
155 dmap[f] = e[:4]
158 dmap[f] = e[:4]
156 pos = newpos
159 pos = newpos
157
160
158 def invalidate(self):
161 def invalidate(self):
159 for a in "_map _copymap _branch _pl _dirs _ignore".split():
162 for a in "_map _copymap _branch _pl _dirs _ignore".split():
160 if hasattr(self, a):
163 if a in self.__dict__:
161 self.__delattr__(a)
164 delattr(self, a)
162 self._dirty = False
165 self._dirty = False
163
166
164 def copy(self, source, dest):
167 def copy(self, source, dest):
165 self._dirty = True
168 self._dirty = True
166 self._copymap[dest] = source
169 self._copymap[dest] = source
167
170
168 def copied(self, file):
171 def copied(self, file):
169 return self._copymap.get(file, None)
172 return self._copymap.get(file, None)
170
173
171 def copies(self):
174 def copies(self):
172 return self._copymap
175 return self._copymap
173
176
174 def _incpath(self, path):
177 def _incpath(self, path):
175 for c in strutil.findall(path, '/'):
178 for c in strutil.findall(path, '/'):
176 pc = path[:c]
179 pc = path[:c]
177 self._dirs.setdefault(pc, 0)
180 self._dirs.setdefault(pc, 0)
178 self._dirs[pc] += 1
181 self._dirs[pc] += 1
179
182
180 def _decpath(self, path):
183 def _decpath(self, path):
181 for c in strutil.findall(path, '/'):
184 for c in strutil.findall(path, '/'):
182 pc = path[:c]
185 pc = path[:c]
183 self._dirs.setdefault(pc, 0)
186 self._dirs.setdefault(pc, 0)
184 self._dirs[pc] -= 1
187 self._dirs[pc] -= 1
185
188
186 def _incpathcheck(self, f):
189 def _incpathcheck(self, f):
187 if '\r' in f or '\n' in f:
190 if '\r' in f or '\n' in f:
188 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames"))
191 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames"))
189 # shadows
192 # shadows
190 if f in self._dirs:
193 if f in self._dirs:
191 raise util.Abort(_('directory named %r already in dirstate') % f)
194 raise util.Abort(_('directory named %r already in dirstate') % f)
192 for c in strutil.rfindall(f, '/'):
195 for c in strutil.rfindall(f, '/'):
193 d = f[:c]
196 d = f[:c]
194 if d in self._dirs:
197 if d in self._dirs:
195 break
198 break
196 if d in self._map:
199 if d in self._map:
197 raise util.Abort(_('file named %r already in dirstate') % d)
200 raise util.Abort(_('file named %r already in dirstate') % d)
198 self._incpath(f)
201 self._incpath(f)
199
202
200 def normal(self, f):
203 def normal(self, f):
201 'mark a file normal'
204 'mark a file normal'
202 self._dirty = True
205 self._dirty = True
203 s = os.lstat(self._join(f))
206 s = os.lstat(self._join(f))
204 self._map[f] = ('n', s.st_mode, s.st_size, s.st_mtime)
207 self._map[f] = ('n', s.st_mode, s.st_size, s.st_mtime)
205 if self._copymap.has_key(f):
208 if self._copymap.has_key(f):
206 del self._copymap[f]
209 del self._copymap[f]
207
210
208 def normaldirty(self, f):
211 def normaldirty(self, f):
209 'mark a file normal, but possibly dirty'
212 'mark a file normal, but possibly dirty'
210 self._dirty = True
213 self._dirty = True
211 s = os.lstat(self._join(f))
214 s = os.lstat(self._join(f))
212 self._map[f] = ('n', s.st_mode, -1, -1)
215 self._map[f] = ('n', s.st_mode, -1, -1)
213 if f in self._copymap:
216 if f in self._copymap:
214 del self._copymap[f]
217 del self._copymap[f]
215
218
216 def add(self, f):
219 def add(self, f):
217 'mark a file added'
220 'mark a file added'
218 self._dirty = True
221 self._dirty = True
219 self._incpathcheck(f)
222 self._incpathcheck(f)
220 self._map[f] = ('a', 0, -1, -1)
223 self._map[f] = ('a', 0, -1, -1)
221 if f in self._copymap:
224 if f in self._copymap:
222 del self._copymap[f]
225 del self._copymap[f]
223
226
224 def remove(self, f):
227 def remove(self, f):
225 'mark a file removed'
228 'mark a file removed'
226 self._dirty = True
229 self._dirty = True
227 self._map[f] = ('r', 0, 0, 0)
230 self._map[f] = ('r', 0, 0, 0)
228 self._decpath(f)
231 self._decpath(f)
229 if f in self._copymap:
232 if f in self._copymap:
230 del self._copymap[f]
233 del self._copymap[f]
231
234
232 def merge(self, f):
235 def merge(self, f):
233 'mark a file merged'
236 'mark a file merged'
234 self._dirty = True
237 self._dirty = True
235 s = os.lstat(self._join(f))
238 s = os.lstat(self._join(f))
236 self._map[f] = ('m', s.st_mode, s.st_size, s.st_mtime)
239 self._map[f] = ('m', s.st_mode, s.st_size, s.st_mtime)
237 if f in self._copymap:
240 if f in self._copymap:
238 del self._copymap[f]
241 del self._copymap[f]
239
242
240 def forget(self, f):
243 def forget(self, f):
241 'forget a file'
244 'forget a file'
242 self._dirty = True
245 self._dirty = True
243 try:
246 try:
244 del self._map[f]
247 del self._map[f]
245 self._decpath(f)
248 self._decpath(f)
246 except KeyError:
249 except KeyError:
247 self._ui.warn(_("not in dirstate: %s!\n") % f)
250 self._ui.warn(_("not in dirstate: %s!\n") % f)
248
251
249 def rebuild(self, parent, files):
252 def rebuild(self, parent, files):
250 self.invalidate()
253 self.invalidate()
251 for f in files:
254 for f in files:
252 if files.execf(f):
255 if files.execf(f):
253 self._map[f] = ('n', 0777, -1, 0)
256 self._map[f] = ('n', 0777, -1, 0)
254 else:
257 else:
255 self._map[f] = ('n', 0666, -1, 0)
258 self._map[f] = ('n', 0666, -1, 0)
256 self._pl = (parent, nullid)
259 self._pl = (parent, nullid)
257 self._dirty = True
260 self._dirty = True
258
261
259 def write(self):
262 def write(self):
260 if not self._dirty:
263 if not self._dirty:
261 return
264 return
262 cs = cStringIO.StringIO()
265 cs = cStringIO.StringIO()
263 cs.write("".join(self._pl))
266 cs.write("".join(self._pl))
264 for f, e in self._map.iteritems():
267 for f, e in self._map.iteritems():
265 c = self.copied(f)
268 c = self.copied(f)
266 if c:
269 if c:
267 f = f + "\0" + c
270 f = f + "\0" + c
268 e = struct.pack(_format, e[0], e[1], e[2], e[3], len(f))
271 e = struct.pack(_format, e[0], e[1], e[2], e[3], len(f))
269 cs.write(e)
272 cs.write(e)
270 cs.write(f)
273 cs.write(f)
271 st = self._opener("dirstate", "w", atomictemp=True)
274 st = self._opener("dirstate", "w", atomictemp=True)
272 st.write(cs.getvalue())
275 st.write(cs.getvalue())
273 st.rename()
276 st.rename()
274 self._dirty = False
277 self._dirty = self._dirtypl = False
275
278
276 def _filter(self, files):
279 def _filter(self, files):
277 ret = {}
280 ret = {}
278 unknown = []
281 unknown = []
279
282
280 for x in files:
283 for x in files:
281 if x == '.':
284 if x == '.':
282 return self._map.copy()
285 return self._map.copy()
283 if x not in self._map:
286 if x not in self._map:
284 unknown.append(x)
287 unknown.append(x)
285 else:
288 else:
286 ret[x] = self._map[x]
289 ret[x] = self._map[x]
287
290
288 if not unknown:
291 if not unknown:
289 return ret
292 return ret
290
293
291 b = self._map.keys()
294 b = self._map.keys()
292 b.sort()
295 b.sort()
293 blen = len(b)
296 blen = len(b)
294
297
295 for x in unknown:
298 for x in unknown:
296 bs = bisect.bisect(b, "%s%s" % (x, '/'))
299 bs = bisect.bisect(b, "%s%s" % (x, '/'))
297 while bs < blen:
300 while bs < blen:
298 s = b[bs]
301 s = b[bs]
299 if len(s) > len(x) and s.startswith(x):
302 if len(s) > len(x) and s.startswith(x):
300 ret[s] = self._map[s]
303 ret[s] = self._map[s]
301 else:
304 else:
302 break
305 break
303 bs += 1
306 bs += 1
304 return ret
307 return ret
305
308
306 def _supported(self, f, st, verbose=False):
309 def _supported(self, f, st, verbose=False):
307 if stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode):
310 if stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode):
308 return True
311 return True
309 if verbose:
312 if verbose:
310 kind = 'unknown'
313 kind = 'unknown'
311 if stat.S_ISCHR(st.st_mode): kind = _('character device')
314 if stat.S_ISCHR(st.st_mode): kind = _('character device')
312 elif stat.S_ISBLK(st.st_mode): kind = _('block device')
315 elif stat.S_ISBLK(st.st_mode): kind = _('block device')
313 elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
316 elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
314 elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
317 elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
315 elif stat.S_ISDIR(st.st_mode): kind = _('directory')
318 elif stat.S_ISDIR(st.st_mode): kind = _('directory')
316 self._ui.warn(_('%s: unsupported file type (type is %s)\n')
319 self._ui.warn(_('%s: unsupported file type (type is %s)\n')
317 % (self.pathto(f), kind))
320 % (self.pathto(f), kind))
318 return False
321 return False
319
322
320 def walk(self, files=None, match=util.always, badmatch=None):
323 def walk(self, files=None, match=util.always, badmatch=None):
321 # filter out the stat
324 # filter out the stat
322 for src, f, st in self.statwalk(files, match, badmatch=badmatch):
325 for src, f, st in self.statwalk(files, match, badmatch=badmatch):
323 yield src, f
326 yield src, f
324
327
325 def statwalk(self, files=None, match=util.always, ignored=False,
328 def statwalk(self, files=None, match=util.always, ignored=False,
326 badmatch=None, directories=False):
329 badmatch=None, directories=False):
327 '''
330 '''
328 walk recursively through the directory tree, finding all files
331 walk recursively through the directory tree, finding all files
329 matched by the match function
332 matched by the match function
330
333
331 results are yielded in a tuple (src, filename, st), where src
334 results are yielded in a tuple (src, filename, st), where src
332 is one of:
335 is one of:
333 'f' the file was found in the directory tree
336 'f' the file was found in the directory tree
334 'd' the file is a directory of the tree
337 'd' the file is a directory of the tree
335 'm' the file was only in the dirstate and not in the tree
338 'm' the file was only in the dirstate and not in the tree
336 'b' file was not found and matched badmatch
339 'b' file was not found and matched badmatch
337
340
338 and st is the stat result if the file was found in the directory.
341 and st is the stat result if the file was found in the directory.
339 '''
342 '''
340
343
341 # walk all files by default
344 # walk all files by default
342 if not files:
345 if not files:
343 files = ['.']
346 files = ['.']
344 dc = self._map.copy()
347 dc = self._map.copy()
345 else:
348 else:
346 files = util.unique(files)
349 files = util.unique(files)
347 dc = self._filter(files)
350 dc = self._filter(files)
348
351
349 def imatch(file_):
352 def imatch(file_):
350 if file_ not in dc and self._ignore(file_):
353 if file_ not in dc and self._ignore(file_):
351 return False
354 return False
352 return match(file_)
355 return match(file_)
353
356
354 ignore = self._ignore
357 ignore = self._ignore
355 if ignored:
358 if ignored:
356 imatch = match
359 imatch = match
357 ignore = util.never
360 ignore = util.never
358
361
359 # self._root may end with a path separator when self._root == '/'
362 # self._root may end with a path separator when self._root == '/'
360 common_prefix_len = len(self._root)
363 common_prefix_len = len(self._root)
361 if not self._root.endswith(os.sep):
364 if not self._root.endswith(os.sep):
362 common_prefix_len += 1
365 common_prefix_len += 1
363 # recursion free walker, faster than os.walk.
366 # recursion free walker, faster than os.walk.
364 def findfiles(s):
367 def findfiles(s):
365 work = [s]
368 work = [s]
366 if directories:
369 if directories:
367 yield 'd', util.normpath(s[common_prefix_len:]), os.lstat(s)
370 yield 'd', util.normpath(s[common_prefix_len:]), os.lstat(s)
368 while work:
371 while work:
369 top = work.pop()
372 top = work.pop()
370 names = os.listdir(top)
373 names = os.listdir(top)
371 names.sort()
374 names.sort()
372 # nd is the top of the repository dir tree
375 # nd is the top of the repository dir tree
373 nd = util.normpath(top[common_prefix_len:])
376 nd = util.normpath(top[common_prefix_len:])
374 if nd == '.':
377 if nd == '.':
375 nd = ''
378 nd = ''
376 else:
379 else:
377 # do not recurse into a repo contained in this
380 # do not recurse into a repo contained in this
378 # one. use bisect to find .hg directory so speed
381 # one. use bisect to find .hg directory so speed
379 # is good on big directory.
382 # is good on big directory.
380 hg = bisect.bisect_left(names, '.hg')
383 hg = bisect.bisect_left(names, '.hg')
381 if hg < len(names) and names[hg] == '.hg':
384 if hg < len(names) and names[hg] == '.hg':
382 if os.path.isdir(os.path.join(top, '.hg')):
385 if os.path.isdir(os.path.join(top, '.hg')):
383 continue
386 continue
384 for f in names:
387 for f in names:
385 np = util.pconvert(os.path.join(nd, f))
388 np = util.pconvert(os.path.join(nd, f))
386 if seen(np):
389 if seen(np):
387 continue
390 continue
388 p = os.path.join(top, f)
391 p = os.path.join(top, f)
389 # don't trip over symlinks
392 # don't trip over symlinks
390 st = os.lstat(p)
393 st = os.lstat(p)
391 if stat.S_ISDIR(st.st_mode):
394 if stat.S_ISDIR(st.st_mode):
392 if not ignore(np):
395 if not ignore(np):
393 work.append(p)
396 work.append(p)
394 if directories:
397 if directories:
395 yield 'd', np, st
398 yield 'd', np, st
396 if imatch(np) and np in dc:
399 if imatch(np) and np in dc:
397 yield 'm', np, st
400 yield 'm', np, st
398 elif imatch(np):
401 elif imatch(np):
399 if self._supported(np, st):
402 if self._supported(np, st):
400 yield 'f', np, st
403 yield 'f', np, st
401 elif np in dc:
404 elif np in dc:
402 yield 'm', np, st
405 yield 'm', np, st
403
406
404 known = {'.hg': 1}
407 known = {'.hg': 1}
405 def seen(fn):
408 def seen(fn):
406 if fn in known: return True
409 if fn in known: return True
407 known[fn] = 1
410 known[fn] = 1
408
411
409 # step one, find all files that match our criteria
412 # step one, find all files that match our criteria
410 files.sort()
413 files.sort()
411 for ff in files:
414 for ff in files:
412 nf = util.normpath(ff)
415 nf = util.normpath(ff)
413 f = self._join(ff)
416 f = self._join(ff)
414 try:
417 try:
415 st = os.lstat(f)
418 st = os.lstat(f)
416 except OSError, inst:
419 except OSError, inst:
417 found = False
420 found = False
418 for fn in dc:
421 for fn in dc:
419 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
422 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
420 found = True
423 found = True
421 break
424 break
422 if not found:
425 if not found:
423 if inst.errno != errno.ENOENT or not badmatch:
426 if inst.errno != errno.ENOENT or not badmatch:
424 self._ui.warn('%s: %s\n' %
427 self._ui.warn('%s: %s\n' %
425 (self.pathto(ff), inst.strerror))
428 (self.pathto(ff), inst.strerror))
426 elif badmatch and badmatch(ff) and imatch(nf):
429 elif badmatch and badmatch(ff) and imatch(nf):
427 yield 'b', ff, None
430 yield 'b', ff, None
428 continue
431 continue
429 if stat.S_ISDIR(st.st_mode):
432 if stat.S_ISDIR(st.st_mode):
430 cmp1 = (lambda x, y: cmp(x[1], y[1]))
433 cmp1 = (lambda x, y: cmp(x[1], y[1]))
431 sorted_ = [ x for x in findfiles(f) ]
434 sorted_ = [ x for x in findfiles(f) ]
432 sorted_.sort(cmp1)
435 sorted_.sort(cmp1)
433 for e in sorted_:
436 for e in sorted_:
434 yield e
437 yield e
435 else:
438 else:
436 if not seen(nf) and match(nf):
439 if not seen(nf) and match(nf):
437 if self._supported(ff, st, verbose=True):
440 if self._supported(ff, st, verbose=True):
438 yield 'f', nf, st
441 yield 'f', nf, st
439 elif ff in dc:
442 elif ff in dc:
440 yield 'm', nf, st
443 yield 'm', nf, st
441
444
442 # step two run through anything left in the dc hash and yield
445 # step two run through anything left in the dc hash and yield
443 # if we haven't already seen it
446 # if we haven't already seen it
444 ks = dc.keys()
447 ks = dc.keys()
445 ks.sort()
448 ks.sort()
446 for k in ks:
449 for k in ks:
447 if not seen(k) and imatch(k):
450 if not seen(k) and imatch(k):
448 yield 'm', k, None
451 yield 'm', k, None
449
452
450 def status(self, files, match, list_ignored, list_clean):
453 def status(self, files, match, list_ignored, list_clean):
451 lookup, modified, added, unknown, ignored = [], [], [], [], []
454 lookup, modified, added, unknown, ignored = [], [], [], [], []
452 removed, deleted, clean = [], [], []
455 removed, deleted, clean = [], [], []
453
456
454 for src, fn, st in self.statwalk(files, match, ignored=list_ignored):
457 for src, fn, st in self.statwalk(files, match, ignored=list_ignored):
455 try:
458 try:
456 type_, mode, size, time = self._map[fn]
459 type_, mode, size, time = self._map[fn]
457 except KeyError:
460 except KeyError:
458 if list_ignored and self._ignore(fn):
461 if list_ignored and self._ignore(fn):
459 ignored.append(fn)
462 ignored.append(fn)
460 else:
463 else:
461 unknown.append(fn)
464 unknown.append(fn)
462 continue
465 continue
463 if src == 'm':
466 if src == 'm':
464 nonexistent = True
467 nonexistent = True
465 if not st:
468 if not st:
466 try:
469 try:
467 st = os.lstat(self._join(fn))
470 st = os.lstat(self._join(fn))
468 except OSError, inst:
471 except OSError, inst:
469 if inst.errno != errno.ENOENT:
472 if inst.errno != errno.ENOENT:
470 raise
473 raise
471 st = None
474 st = None
472 # We need to re-check that it is a valid file
475 # We need to re-check that it is a valid file
473 if st and self._supported(fn, st):
476 if st and self._supported(fn, st):
474 nonexistent = False
477 nonexistent = False
475 # XXX: what to do with file no longer present in the fs
478 # XXX: what to do with file no longer present in the fs
476 # who are not removed in the dirstate ?
479 # who are not removed in the dirstate ?
477 if nonexistent and type_ in "nm":
480 if nonexistent and type_ in "nm":
478 deleted.append(fn)
481 deleted.append(fn)
479 continue
482 continue
480 # check the common case first
483 # check the common case first
481 if type_ == 'n':
484 if type_ == 'n':
482 if not st:
485 if not st:
483 st = os.lstat(self._join(fn))
486 st = os.lstat(self._join(fn))
484 if (size >= 0 and (size != st.st_size
487 if (size >= 0 and (size != st.st_size
485 or (mode ^ st.st_mode) & 0100)
488 or (mode ^ st.st_mode) & 0100)
486 or fn in self._copymap):
489 or fn in self._copymap):
487 modified.append(fn)
490 modified.append(fn)
488 elif time != int(st.st_mtime):
491 elif time != int(st.st_mtime):
489 lookup.append(fn)
492 lookup.append(fn)
490 elif list_clean:
493 elif list_clean:
491 clean.append(fn)
494 clean.append(fn)
492 elif type_ == 'm':
495 elif type_ == 'm':
493 modified.append(fn)
496 modified.append(fn)
494 elif type_ == 'a':
497 elif type_ == 'a':
495 added.append(fn)
498 added.append(fn)
496 elif type_ == 'r':
499 elif type_ == 'r':
497 removed.append(fn)
500 removed.append(fn)
498
501
499 return (lookup, modified, added, removed, deleted, unknown, ignored,
502 return (lookup, modified, added, removed, deleted, unknown, ignored,
500 clean)
503 clean)
@@ -1,122 +1,120 b''
1 # lock.py - simple locking scheme for mercurial
1 # lock.py - simple locking scheme for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import errno, os, socket, time, util
8 import errno, os, socket, time, util
9
9
10 class LockException(IOError):
10 class LockException(IOError):
11 def __init__(self, errno, strerror, filename, desc):
11 def __init__(self, errno, strerror, filename, desc):
12 IOError.__init__(self, errno, strerror, filename)
12 IOError.__init__(self, errno, strerror, filename)
13 self.desc = desc
13 self.desc = desc
14
14
15 class LockHeld(LockException):
15 class LockHeld(LockException):
16 def __init__(self, errno, filename, desc, locker):
16 def __init__(self, errno, filename, desc, locker):
17 LockException.__init__(self, errno, 'Lock held', filename, desc)
17 LockException.__init__(self, errno, 'Lock held', filename, desc)
18 self.locker = locker
18 self.locker = locker
19
19
20 class LockUnavailable(LockException):
20 class LockUnavailable(LockException):
21 pass
21 pass
22
22
23 class lock(object):
23 class lock(object):
24 # lock is symlink on platforms that support it, file on others.
24 # lock is symlink on platforms that support it, file on others.
25
25
26 # symlink is used because create of directory entry and contents
26 # symlink is used because create of directory entry and contents
27 # are atomic even over nfs.
27 # are atomic even over nfs.
28
28
29 # old-style lock: symlink to pid
29 # old-style lock: symlink to pid
30 # new-style lock: symlink to hostname:pid
30 # new-style lock: symlink to hostname:pid
31
31
32 _host = None
33
32 def __init__(self, file, timeout=-1, releasefn=None, desc=None):
34 def __init__(self, file, timeout=-1, releasefn=None, desc=None):
33 self.f = file
35 self.f = file
34 self.held = 0
36 self.held = 0
35 self.timeout = timeout
37 self.timeout = timeout
36 self.releasefn = releasefn
38 self.releasefn = releasefn
37 self.id = None
38 self.host = None
39 self.pid = None
40 self.desc = desc
39 self.desc = desc
41 self.lock()
40 self.lock()
42
41
43 def __del__(self):
42 def __del__(self):
44 self.release()
43 self.release()
45
44
46 def lock(self):
45 def lock(self):
47 timeout = self.timeout
46 timeout = self.timeout
48 while 1:
47 while 1:
49 try:
48 try:
50 self.trylock()
49 self.trylock()
51 return 1
50 return 1
52 except LockHeld, inst:
51 except LockHeld, inst:
53 if timeout != 0:
52 if timeout != 0:
54 time.sleep(1)
53 time.sleep(1)
55 if timeout > 0:
54 if timeout > 0:
56 timeout -= 1
55 timeout -= 1
57 continue
56 continue
58 raise LockHeld(errno.ETIMEDOUT, inst.filename, self.desc,
57 raise LockHeld(errno.ETIMEDOUT, inst.filename, self.desc,
59 inst.locker)
58 inst.locker)
60
59
61 def trylock(self):
60 def trylock(self):
62 if self.id is None:
61 if lock._host is None:
63 self.host = socket.gethostname()
62 lock._host = socket.gethostname()
64 self.pid = os.getpid()
63 lockname = '%s:%s' % (lock._host, os.getpid())
65 self.id = '%s:%s' % (self.host, self.pid)
66 while not self.held:
64 while not self.held:
67 try:
65 try:
68 util.makelock(self.id, self.f)
66 util.makelock(lockname, self.f)
69 self.held = 1
67 self.held = 1
70 except (OSError, IOError), why:
68 except (OSError, IOError), why:
71 if why.errno == errno.EEXIST:
69 if why.errno == errno.EEXIST:
72 locker = self.testlock()
70 locker = self.testlock()
73 if locker is not None:
71 if locker is not None:
74 raise LockHeld(errno.EAGAIN, self.f, self.desc,
72 raise LockHeld(errno.EAGAIN, self.f, self.desc,
75 locker)
73 locker)
76 else:
74 else:
77 raise LockUnavailable(why.errno, why.strerror,
75 raise LockUnavailable(why.errno, why.strerror,
78 why.filename, self.desc)
76 why.filename, self.desc)
79
77
80 def testlock(self):
78 def testlock(self):
81 """return id of locker if lock is valid, else None.
79 """return id of locker if lock is valid, else None.
82
80
83 If old-style lock, we cannot tell what machine locker is on.
81 If old-style lock, we cannot tell what machine locker is on.
84 with new-style lock, if locker is on this machine, we can
82 with new-style lock, if locker is on this machine, we can
85 see if locker is alive. If locker is on this machine but
83 see if locker is alive. If locker is on this machine but
86 not alive, we can safely break lock.
84 not alive, we can safely break lock.
87
85
88 The lock file is only deleted when None is returned.
86 The lock file is only deleted when None is returned.
89
87
90 """
88 """
91 locker = util.readlock(self.f)
89 locker = util.readlock(self.f)
92 try:
90 try:
93 host, pid = locker.split(":", 1)
91 host, pid = locker.split(":", 1)
94 except ValueError:
92 except ValueError:
95 return locker
93 return locker
96 if host != self.host:
94 if host != lock._host:
97 return locker
95 return locker
98 try:
96 try:
99 pid = int(pid)
97 pid = int(pid)
100 except:
98 except:
101 return locker
99 return locker
102 if util.testpid(pid):
100 if util.testpid(pid):
103 return locker
101 return locker
104 # if locker dead, break lock. must do this with another lock
102 # if locker dead, break lock. must do this with another lock
105 # held, or can race and break valid lock.
103 # held, or can race and break valid lock.
106 try:
104 try:
107 l = lock(self.f + '.break')
105 l = lock(self.f + '.break')
108 l.trylock()
106 l.trylock()
109 os.unlink(self.f)
107 os.unlink(self.f)
110 l.release()
108 l.release()
111 except (LockHeld, LockUnavailable):
109 except (LockHeld, LockUnavailable):
112 return locker
110 return locker
113
111
114 def release(self):
112 def release(self):
115 if self.held:
113 if self.held:
116 self.held = 0
114 self.held = 0
117 if self.releasefn:
115 if self.releasefn:
118 self.releasefn()
116 self.releasefn()
119 try:
117 try:
120 os.unlink(self.f)
118 os.unlink(self.f)
121 except: pass
119 except: pass
122
120
@@ -1,1319 +1,1319 b''
1 # patch.py - patch file parsing routines
1 # patch.py - patch file parsing routines
2 #
2 #
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from i18n import _
9 from i18n import _
10 from node import *
10 from node import *
11 import base85, cmdutil, mdiff, util, context, revlog, diffhelpers
11 import base85, cmdutil, mdiff, util, context, revlog, diffhelpers
12 import cStringIO, email.Parser, os, popen2, re, sha
12 import cStringIO, email.Parser, os, popen2, re, sha
13 import sys, tempfile, zlib
13 import sys, tempfile, zlib
14
14
15 class PatchError(Exception):
15 class PatchError(Exception):
16 pass
16 pass
17
17
18 class NoHunks(PatchError):
18 class NoHunks(PatchError):
19 pass
19 pass
20
20
21 # helper functions
21 # helper functions
22
22
23 def copyfile(src, dst, basedir=None):
23 def copyfile(src, dst, basedir=None):
24 if not basedir:
24 if not basedir:
25 basedir = os.getcwd()
25 basedir = os.getcwd()
26
26
27 abssrc, absdst = [os.path.join(basedir, n) for n in (src, dst)]
27 abssrc, absdst = [os.path.join(basedir, n) for n in (src, dst)]
28 if os.path.exists(absdst):
28 if os.path.exists(absdst):
29 raise util.Abort(_("cannot create %s: destination already exists") %
29 raise util.Abort(_("cannot create %s: destination already exists") %
30 dst)
30 dst)
31
31
32 targetdir = os.path.dirname(absdst)
32 targetdir = os.path.dirname(absdst)
33 if not os.path.isdir(targetdir):
33 if not os.path.isdir(targetdir):
34 os.makedirs(targetdir)
34 os.makedirs(targetdir)
35
35
36 util.copyfile(abssrc, absdst)
36 util.copyfile(abssrc, absdst)
37
37
38 # public functions
38 # public functions
39
39
40 def extract(ui, fileobj):
40 def extract(ui, fileobj):
41 '''extract patch from data read from fileobj.
41 '''extract patch from data read from fileobj.
42
42
43 patch can be a normal patch or contained in an email message.
43 patch can be a normal patch or contained in an email message.
44
44
45 return tuple (filename, message, user, date, node, p1, p2).
45 return tuple (filename, message, user, date, node, p1, p2).
46 Any item in the returned tuple can be None. If filename is None,
46 Any item in the returned tuple can be None. If filename is None,
47 fileobj did not contain a patch. Caller must unlink filename when done.'''
47 fileobj did not contain a patch. Caller must unlink filename when done.'''
48
48
49 # attempt to detect the start of a patch
49 # attempt to detect the start of a patch
50 # (this heuristic is borrowed from quilt)
50 # (this heuristic is borrowed from quilt)
51 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
51 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
52 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
52 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
53 '(---|\*\*\*)[ \t])', re.MULTILINE)
53 '(---|\*\*\*)[ \t])', re.MULTILINE)
54
54
55 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
55 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
56 tmpfp = os.fdopen(fd, 'w')
56 tmpfp = os.fdopen(fd, 'w')
57 try:
57 try:
58 msg = email.Parser.Parser().parse(fileobj)
58 msg = email.Parser.Parser().parse(fileobj)
59
59
60 subject = msg['Subject']
60 subject = msg['Subject']
61 user = msg['From']
61 user = msg['From']
62 # should try to parse msg['Date']
62 # should try to parse msg['Date']
63 date = None
63 date = None
64 nodeid = None
64 nodeid = None
65 branch = None
65 branch = None
66 parents = []
66 parents = []
67
67
68 if subject:
68 if subject:
69 if subject.startswith('[PATCH'):
69 if subject.startswith('[PATCH'):
70 pend = subject.find(']')
70 pend = subject.find(']')
71 if pend >= 0:
71 if pend >= 0:
72 subject = subject[pend+1:].lstrip()
72 subject = subject[pend+1:].lstrip()
73 subject = subject.replace('\n\t', ' ')
73 subject = subject.replace('\n\t', ' ')
74 ui.debug('Subject: %s\n' % subject)
74 ui.debug('Subject: %s\n' % subject)
75 if user:
75 if user:
76 ui.debug('From: %s\n' % user)
76 ui.debug('From: %s\n' % user)
77 diffs_seen = 0
77 diffs_seen = 0
78 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
78 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
79 message = ''
79 message = ''
80 for part in msg.walk():
80 for part in msg.walk():
81 content_type = part.get_content_type()
81 content_type = part.get_content_type()
82 ui.debug('Content-Type: %s\n' % content_type)
82 ui.debug('Content-Type: %s\n' % content_type)
83 if content_type not in ok_types:
83 if content_type not in ok_types:
84 continue
84 continue
85 payload = part.get_payload(decode=True)
85 payload = part.get_payload(decode=True)
86 m = diffre.search(payload)
86 m = diffre.search(payload)
87 if m:
87 if m:
88 hgpatch = False
88 hgpatch = False
89 ignoretext = False
89 ignoretext = False
90
90
91 ui.debug(_('found patch at byte %d\n') % m.start(0))
91 ui.debug(_('found patch at byte %d\n') % m.start(0))
92 diffs_seen += 1
92 diffs_seen += 1
93 cfp = cStringIO.StringIO()
93 cfp = cStringIO.StringIO()
94 for line in payload[:m.start(0)].splitlines():
94 for line in payload[:m.start(0)].splitlines():
95 if line.startswith('# HG changeset patch'):
95 if line.startswith('# HG changeset patch'):
96 ui.debug(_('patch generated by hg export\n'))
96 ui.debug(_('patch generated by hg export\n'))
97 hgpatch = True
97 hgpatch = True
98 # drop earlier commit message content
98 # drop earlier commit message content
99 cfp.seek(0)
99 cfp.seek(0)
100 cfp.truncate()
100 cfp.truncate()
101 subject = None
101 subject = None
102 elif hgpatch:
102 elif hgpatch:
103 if line.startswith('# User '):
103 if line.startswith('# User '):
104 user = line[7:]
104 user = line[7:]
105 ui.debug('From: %s\n' % user)
105 ui.debug('From: %s\n' % user)
106 elif line.startswith("# Date "):
106 elif line.startswith("# Date "):
107 date = line[7:]
107 date = line[7:]
108 elif line.startswith("# Branch "):
108 elif line.startswith("# Branch "):
109 branch = line[9:]
109 branch = line[9:]
110 elif line.startswith("# Node ID "):
110 elif line.startswith("# Node ID "):
111 nodeid = line[10:]
111 nodeid = line[10:]
112 elif line.startswith("# Parent "):
112 elif line.startswith("# Parent "):
113 parents.append(line[10:])
113 parents.append(line[10:])
114 elif line == '---' and 'git-send-email' in msg['X-Mailer']:
114 elif line == '---' and 'git-send-email' in msg['X-Mailer']:
115 ignoretext = True
115 ignoretext = True
116 if not line.startswith('# ') and not ignoretext:
116 if not line.startswith('# ') and not ignoretext:
117 cfp.write(line)
117 cfp.write(line)
118 cfp.write('\n')
118 cfp.write('\n')
119 message = cfp.getvalue()
119 message = cfp.getvalue()
120 if tmpfp:
120 if tmpfp:
121 tmpfp.write(payload)
121 tmpfp.write(payload)
122 if not payload.endswith('\n'):
122 if not payload.endswith('\n'):
123 tmpfp.write('\n')
123 tmpfp.write('\n')
124 elif not diffs_seen and message and content_type == 'text/plain':
124 elif not diffs_seen and message and content_type == 'text/plain':
125 message += '\n' + payload
125 message += '\n' + payload
126 except:
126 except:
127 tmpfp.close()
127 tmpfp.close()
128 os.unlink(tmpname)
128 os.unlink(tmpname)
129 raise
129 raise
130
130
131 if subject and not message.startswith(subject):
131 if subject and not message.startswith(subject):
132 message = '%s\n%s' % (subject, message)
132 message = '%s\n%s' % (subject, message)
133 tmpfp.close()
133 tmpfp.close()
134 if not diffs_seen:
134 if not diffs_seen:
135 os.unlink(tmpname)
135 os.unlink(tmpname)
136 return None, message, user, date, branch, None, None, None
136 return None, message, user, date, branch, None, None, None
137 p1 = parents and parents.pop(0) or None
137 p1 = parents and parents.pop(0) or None
138 p2 = parents and parents.pop(0) or None
138 p2 = parents and parents.pop(0) or None
139 return tmpname, message, user, date, branch, nodeid, p1, p2
139 return tmpname, message, user, date, branch, nodeid, p1, p2
140
140
141 GP_PATCH = 1 << 0 # we have to run patch
141 GP_PATCH = 1 << 0 # we have to run patch
142 GP_FILTER = 1 << 1 # there's some copy/rename operation
142 GP_FILTER = 1 << 1 # there's some copy/rename operation
143 GP_BINARY = 1 << 2 # there's a binary patch
143 GP_BINARY = 1 << 2 # there's a binary patch
144
144
145 def readgitpatch(fp, firstline):
145 def readgitpatch(fp, firstline):
146 """extract git-style metadata about patches from <patchname>"""
146 """extract git-style metadata about patches from <patchname>"""
147 class gitpatch:
147 class gitpatch:
148 "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
148 "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
149 def __init__(self, path):
149 def __init__(self, path):
150 self.path = path
150 self.path = path
151 self.oldpath = None
151 self.oldpath = None
152 self.mode = None
152 self.mode = None
153 self.op = 'MODIFY'
153 self.op = 'MODIFY'
154 self.copymod = False
154 self.copymod = False
155 self.lineno = 0
155 self.lineno = 0
156 self.binary = False
156 self.binary = False
157
157
158 def reader(fp, firstline):
158 def reader(fp, firstline):
159 yield firstline
159 yield firstline
160 for line in fp:
160 for line in fp:
161 yield line
161 yield line
162
162
163 # Filter patch for git information
163 # Filter patch for git information
164 gitre = re.compile('diff --git a/(.*) b/(.*)')
164 gitre = re.compile('diff --git a/(.*) b/(.*)')
165 gp = None
165 gp = None
166 gitpatches = []
166 gitpatches = []
167 # Can have a git patch with only metadata, causing patch to complain
167 # Can have a git patch with only metadata, causing patch to complain
168 dopatch = 0
168 dopatch = 0
169
169
170 lineno = 0
170 lineno = 0
171 for line in reader(fp, firstline):
171 for line in reader(fp, firstline):
172 lineno += 1
172 lineno += 1
173 if line.startswith('diff --git'):
173 if line.startswith('diff --git'):
174 m = gitre.match(line)
174 m = gitre.match(line)
175 if m:
175 if m:
176 if gp:
176 if gp:
177 gitpatches.append(gp)
177 gitpatches.append(gp)
178 src, dst = m.group(1, 2)
178 src, dst = m.group(1, 2)
179 gp = gitpatch(dst)
179 gp = gitpatch(dst)
180 gp.lineno = lineno
180 gp.lineno = lineno
181 elif gp:
181 elif gp:
182 if line.startswith('--- '):
182 if line.startswith('--- '):
183 if gp.op in ('COPY', 'RENAME'):
183 if gp.op in ('COPY', 'RENAME'):
184 gp.copymod = True
184 gp.copymod = True
185 dopatch |= GP_FILTER
185 dopatch |= GP_FILTER
186 gitpatches.append(gp)
186 gitpatches.append(gp)
187 gp = None
187 gp = None
188 dopatch |= GP_PATCH
188 dopatch |= GP_PATCH
189 continue
189 continue
190 if line.startswith('rename from '):
190 if line.startswith('rename from '):
191 gp.op = 'RENAME'
191 gp.op = 'RENAME'
192 gp.oldpath = line[12:].rstrip()
192 gp.oldpath = line[12:].rstrip()
193 elif line.startswith('rename to '):
193 elif line.startswith('rename to '):
194 gp.path = line[10:].rstrip()
194 gp.path = line[10:].rstrip()
195 elif line.startswith('copy from '):
195 elif line.startswith('copy from '):
196 gp.op = 'COPY'
196 gp.op = 'COPY'
197 gp.oldpath = line[10:].rstrip()
197 gp.oldpath = line[10:].rstrip()
198 elif line.startswith('copy to '):
198 elif line.startswith('copy to '):
199 gp.path = line[8:].rstrip()
199 gp.path = line[8:].rstrip()
200 elif line.startswith('deleted file'):
200 elif line.startswith('deleted file'):
201 gp.op = 'DELETE'
201 gp.op = 'DELETE'
202 elif line.startswith('new file mode '):
202 elif line.startswith('new file mode '):
203 gp.op = 'ADD'
203 gp.op = 'ADD'
204 gp.mode = int(line.rstrip()[-3:], 8)
204 gp.mode = int(line.rstrip()[-3:], 8)
205 elif line.startswith('new mode '):
205 elif line.startswith('new mode '):
206 gp.mode = int(line.rstrip()[-3:], 8)
206 gp.mode = int(line.rstrip()[-3:], 8)
207 elif line.startswith('GIT binary patch'):
207 elif line.startswith('GIT binary patch'):
208 dopatch |= GP_BINARY
208 dopatch |= GP_BINARY
209 gp.binary = True
209 gp.binary = True
210 if gp:
210 if gp:
211 gitpatches.append(gp)
211 gitpatches.append(gp)
212
212
213 if not gitpatches:
213 if not gitpatches:
214 dopatch = GP_PATCH
214 dopatch = GP_PATCH
215
215
216 return (dopatch, gitpatches)
216 return (dopatch, gitpatches)
217
217
218 def patch(patchname, ui, strip=1, cwd=None, files={}):
218 def patch(patchname, ui, strip=1, cwd=None, files={}):
219 """apply <patchname> to the working directory.
219 """apply <patchname> to the working directory.
220 returns whether patch was applied with fuzz factor."""
220 returns whether patch was applied with fuzz factor."""
221 patcher = ui.config('ui', 'patch')
221 patcher = ui.config('ui', 'patch')
222 args = []
222 args = []
223 try:
223 try:
224 if patcher:
224 if patcher:
225 return externalpatch(patcher, args, patchname, ui, strip, cwd,
225 return externalpatch(patcher, args, patchname, ui, strip, cwd,
226 files)
226 files)
227 else:
227 else:
228 try:
228 try:
229 return internalpatch(patchname, ui, strip, cwd, files)
229 return internalpatch(patchname, ui, strip, cwd, files)
230 except NoHunks:
230 except NoHunks:
231 patcher = util.find_exe('gpatch') or util.find_exe('patch')
231 patcher = util.find_exe('gpatch') or util.find_exe('patch')
232 ui.debug('no valid hunks found; trying with %r instead\n' %
232 ui.debug('no valid hunks found; trying with %r instead\n' %
233 patcher)
233 patcher)
234 if util.needbinarypatch():
234 if util.needbinarypatch():
235 args.append('--binary')
235 args.append('--binary')
236 return externalpatch(patcher, args, patchname, ui, strip, cwd,
236 return externalpatch(patcher, args, patchname, ui, strip, cwd,
237 files)
237 files)
238 except PatchError, err:
238 except PatchError, err:
239 s = str(err)
239 s = str(err)
240 if s:
240 if s:
241 raise util.Abort(s)
241 raise util.Abort(s)
242 else:
242 else:
243 raise util.Abort(_('patch failed to apply'))
243 raise util.Abort(_('patch failed to apply'))
244
244
245 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
245 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
246 """use <patcher> to apply <patchname> to the working directory.
246 """use <patcher> to apply <patchname> to the working directory.
247 returns whether patch was applied with fuzz factor."""
247 returns whether patch was applied with fuzz factor."""
248
248
249 fuzz = False
249 fuzz = False
250 if cwd:
250 if cwd:
251 args.append('-d %s' % util.shellquote(cwd))
251 args.append('-d %s' % util.shellquote(cwd))
252 fp = os.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
252 fp = os.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
253 util.shellquote(patchname)))
253 util.shellquote(patchname)))
254
254
255 for line in fp:
255 for line in fp:
256 line = line.rstrip()
256 line = line.rstrip()
257 ui.note(line + '\n')
257 ui.note(line + '\n')
258 if line.startswith('patching file '):
258 if line.startswith('patching file '):
259 pf = util.parse_patch_output(line)
259 pf = util.parse_patch_output(line)
260 printed_file = False
260 printed_file = False
261 files.setdefault(pf, (None, None))
261 files.setdefault(pf, (None, None))
262 elif line.find('with fuzz') >= 0:
262 elif line.find('with fuzz') >= 0:
263 fuzz = True
263 fuzz = True
264 if not printed_file:
264 if not printed_file:
265 ui.warn(pf + '\n')
265 ui.warn(pf + '\n')
266 printed_file = True
266 printed_file = True
267 ui.warn(line + '\n')
267 ui.warn(line + '\n')
268 elif line.find('saving rejects to file') >= 0:
268 elif line.find('saving rejects to file') >= 0:
269 ui.warn(line + '\n')
269 ui.warn(line + '\n')
270 elif line.find('FAILED') >= 0:
270 elif line.find('FAILED') >= 0:
271 if not printed_file:
271 if not printed_file:
272 ui.warn(pf + '\n')
272 ui.warn(pf + '\n')
273 printed_file = True
273 printed_file = True
274 ui.warn(line + '\n')
274 ui.warn(line + '\n')
275 code = fp.close()
275 code = fp.close()
276 if code:
276 if code:
277 raise PatchError(_("patch command failed: %s") %
277 raise PatchError(_("patch command failed: %s") %
278 util.explain_exit(code)[0])
278 util.explain_exit(code)[0])
279 return fuzz
279 return fuzz
280
280
281 def internalpatch(patchname, ui, strip, cwd, files):
281 def internalpatch(patchname, ui, strip, cwd, files):
282 """use builtin patch to apply <patchname> to the working directory.
282 """use builtin patch to apply <patchname> to the working directory.
283 returns whether patch was applied with fuzz factor."""
283 returns whether patch was applied with fuzz factor."""
284 fp = file(patchname)
284 fp = file(patchname, 'rb')
285 if cwd:
285 if cwd:
286 curdir = os.getcwd()
286 curdir = os.getcwd()
287 os.chdir(cwd)
287 os.chdir(cwd)
288 try:
288 try:
289 ret = applydiff(ui, fp, files, strip=strip)
289 ret = applydiff(ui, fp, files, strip=strip)
290 finally:
290 finally:
291 if cwd:
291 if cwd:
292 os.chdir(curdir)
292 os.chdir(curdir)
293 if ret < 0:
293 if ret < 0:
294 raise PatchError
294 raise PatchError
295 return ret > 0
295 return ret > 0
296
296
297 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
297 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
298 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
298 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
299 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
299 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
300
300
301 class patchfile:
301 class patchfile:
302 def __init__(self, ui, fname):
302 def __init__(self, ui, fname):
303 self.fname = fname
303 self.fname = fname
304 self.ui = ui
304 self.ui = ui
305 try:
305 try:
306 fp = file(fname, 'r')
306 fp = file(fname, 'rb')
307 self.lines = fp.readlines()
307 self.lines = fp.readlines()
308 self.exists = True
308 self.exists = True
309 except IOError:
309 except IOError:
310 dirname = os.path.dirname(fname)
310 dirname = os.path.dirname(fname)
311 if dirname and not os.path.isdir(dirname):
311 if dirname and not os.path.isdir(dirname):
312 dirs = dirname.split(os.path.sep)
312 dirs = dirname.split(os.path.sep)
313 d = ""
313 d = ""
314 for x in dirs:
314 for x in dirs:
315 d = os.path.join(d, x)
315 d = os.path.join(d, x)
316 if not os.path.isdir(d):
316 if not os.path.isdir(d):
317 os.mkdir(d)
317 os.mkdir(d)
318 self.lines = []
318 self.lines = []
319 self.exists = False
319 self.exists = False
320
320
321 self.hash = {}
321 self.hash = {}
322 self.dirty = 0
322 self.dirty = 0
323 self.offset = 0
323 self.offset = 0
324 self.rej = []
324 self.rej = []
325 self.fileprinted = False
325 self.fileprinted = False
326 self.printfile(False)
326 self.printfile(False)
327 self.hunks = 0
327 self.hunks = 0
328
328
329 def printfile(self, warn):
329 def printfile(self, warn):
330 if self.fileprinted:
330 if self.fileprinted:
331 return
331 return
332 if warn or self.ui.verbose:
332 if warn or self.ui.verbose:
333 self.fileprinted = True
333 self.fileprinted = True
334 s = _("patching file %s\n") % self.fname
334 s = _("patching file %s\n") % self.fname
335 if warn:
335 if warn:
336 self.ui.warn(s)
336 self.ui.warn(s)
337 else:
337 else:
338 self.ui.note(s)
338 self.ui.note(s)
339
339
340
340
341 def findlines(self, l, linenum):
341 def findlines(self, l, linenum):
342 # looks through the hash and finds candidate lines. The
342 # looks through the hash and finds candidate lines. The
343 # result is a list of line numbers sorted based on distance
343 # result is a list of line numbers sorted based on distance
344 # from linenum
344 # from linenum
345 def sorter(a, b):
345 def sorter(a, b):
346 vala = abs(a - linenum)
346 vala = abs(a - linenum)
347 valb = abs(b - linenum)
347 valb = abs(b - linenum)
348 return cmp(vala, valb)
348 return cmp(vala, valb)
349
349
350 try:
350 try:
351 cand = self.hash[l]
351 cand = self.hash[l]
352 except:
352 except:
353 return []
353 return []
354
354
355 if len(cand) > 1:
355 if len(cand) > 1:
356 # resort our list of potentials forward then back.
356 # resort our list of potentials forward then back.
357 cand.sort(cmp=sorter)
357 cand.sort(cmp=sorter)
358 return cand
358 return cand
359
359
360 def hashlines(self):
360 def hashlines(self):
361 self.hash = {}
361 self.hash = {}
362 for x in xrange(len(self.lines)):
362 for x in xrange(len(self.lines)):
363 s = self.lines[x]
363 s = self.lines[x]
364 self.hash.setdefault(s, []).append(x)
364 self.hash.setdefault(s, []).append(x)
365
365
366 def write_rej(self):
366 def write_rej(self):
367 # our rejects are a little different from patch(1). This always
367 # our rejects are a little different from patch(1). This always
368 # creates rejects in the same form as the original patch. A file
368 # creates rejects in the same form as the original patch. A file
369 # header is inserted so that you can run the reject through patch again
369 # header is inserted so that you can run the reject through patch again
370 # without having to type the filename.
370 # without having to type the filename.
371
371
372 if not self.rej:
372 if not self.rej:
373 return
373 return
374 if self.hunks != 1:
374 if self.hunks != 1:
375 hunkstr = "s"
375 hunkstr = "s"
376 else:
376 else:
377 hunkstr = ""
377 hunkstr = ""
378
378
379 fname = self.fname + ".rej"
379 fname = self.fname + ".rej"
380 self.ui.warn(
380 self.ui.warn(
381 _("%d out of %d hunk%s FAILED -- saving rejects to file %s\n") %
381 _("%d out of %d hunk%s FAILED -- saving rejects to file %s\n") %
382 (len(self.rej), self.hunks, hunkstr, fname))
382 (len(self.rej), self.hunks, hunkstr, fname))
383 try: os.unlink(fname)
383 try: os.unlink(fname)
384 except:
384 except:
385 pass
385 pass
386 fp = file(fname, 'w')
386 fp = file(fname, 'wb')
387 base = os.path.basename(self.fname)
387 base = os.path.basename(self.fname)
388 fp.write("--- %s\n+++ %s\n" % (base, base))
388 fp.write("--- %s\n+++ %s\n" % (base, base))
389 for x in self.rej:
389 for x in self.rej:
390 for l in x.hunk:
390 for l in x.hunk:
391 fp.write(l)
391 fp.write(l)
392 if l[-1] != '\n':
392 if l[-1] != '\n':
393 fp.write("\n\ No newline at end of file\n")
393 fp.write("\n\ No newline at end of file\n")
394
394
395 def write(self, dest=None):
395 def write(self, dest=None):
396 if self.dirty:
396 if self.dirty:
397 if not dest:
397 if not dest:
398 dest = self.fname
398 dest = self.fname
399 st = None
399 st = None
400 try:
400 try:
401 st = os.lstat(dest)
401 st = os.lstat(dest)
402 if st.st_nlink > 1:
402 if st.st_nlink > 1:
403 os.unlink(dest)
403 os.unlink(dest)
404 except: pass
404 except: pass
405 fp = file(dest, 'w')
405 fp = file(dest, 'wb')
406 if st:
406 if st:
407 os.chmod(dest, st.st_mode)
407 os.chmod(dest, st.st_mode)
408 fp.writelines(self.lines)
408 fp.writelines(self.lines)
409 fp.close()
409 fp.close()
410
410
411 def close(self):
411 def close(self):
412 self.write()
412 self.write()
413 self.write_rej()
413 self.write_rej()
414
414
415 def apply(self, h, reverse):
415 def apply(self, h, reverse):
416 if not h.complete():
416 if not h.complete():
417 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
417 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
418 (h.number, h.desc, len(h.a), h.lena, len(h.b),
418 (h.number, h.desc, len(h.a), h.lena, len(h.b),
419 h.lenb))
419 h.lenb))
420
420
421 self.hunks += 1
421 self.hunks += 1
422 if reverse:
422 if reverse:
423 h.reverse()
423 h.reverse()
424
424
425 if self.exists and h.createfile():
425 if self.exists and h.createfile():
426 self.ui.warn(_("file %s already exists\n") % self.fname)
426 self.ui.warn(_("file %s already exists\n") % self.fname)
427 self.rej.append(h)
427 self.rej.append(h)
428 return -1
428 return -1
429
429
430 if isinstance(h, binhunk):
430 if isinstance(h, binhunk):
431 if h.rmfile():
431 if h.rmfile():
432 os.unlink(self.fname)
432 os.unlink(self.fname)
433 else:
433 else:
434 self.lines[:] = h.new()
434 self.lines[:] = h.new()
435 self.offset += len(h.new())
435 self.offset += len(h.new())
436 self.dirty = 1
436 self.dirty = 1
437 return 0
437 return 0
438
438
439 # fast case first, no offsets, no fuzz
439 # fast case first, no offsets, no fuzz
440 old = h.old()
440 old = h.old()
441 # patch starts counting at 1 unless we are adding the file
441 # patch starts counting at 1 unless we are adding the file
442 if h.starta == 0:
442 if h.starta == 0:
443 start = 0
443 start = 0
444 else:
444 else:
445 start = h.starta + self.offset - 1
445 start = h.starta + self.offset - 1
446 orig_start = start
446 orig_start = start
447 if diffhelpers.testhunk(old, self.lines, start) == 0:
447 if diffhelpers.testhunk(old, self.lines, start) == 0:
448 if h.rmfile():
448 if h.rmfile():
449 os.unlink(self.fname)
449 os.unlink(self.fname)
450 else:
450 else:
451 self.lines[start : start + h.lena] = h.new()
451 self.lines[start : start + h.lena] = h.new()
452 self.offset += h.lenb - h.lena
452 self.offset += h.lenb - h.lena
453 self.dirty = 1
453 self.dirty = 1
454 return 0
454 return 0
455
455
456 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
456 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
457 self.hashlines()
457 self.hashlines()
458 if h.hunk[-1][0] != ' ':
458 if h.hunk[-1][0] != ' ':
459 # if the hunk tried to put something at the bottom of the file
459 # if the hunk tried to put something at the bottom of the file
460 # override the start line and use eof here
460 # override the start line and use eof here
461 search_start = len(self.lines)
461 search_start = len(self.lines)
462 else:
462 else:
463 search_start = orig_start
463 search_start = orig_start
464
464
465 for fuzzlen in xrange(3):
465 for fuzzlen in xrange(3):
466 for toponly in [ True, False ]:
466 for toponly in [ True, False ]:
467 old = h.old(fuzzlen, toponly)
467 old = h.old(fuzzlen, toponly)
468
468
469 cand = self.findlines(old[0][1:], search_start)
469 cand = self.findlines(old[0][1:], search_start)
470 for l in cand:
470 for l in cand:
471 if diffhelpers.testhunk(old, self.lines, l) == 0:
471 if diffhelpers.testhunk(old, self.lines, l) == 0:
472 newlines = h.new(fuzzlen, toponly)
472 newlines = h.new(fuzzlen, toponly)
473 self.lines[l : l + len(old)] = newlines
473 self.lines[l : l + len(old)] = newlines
474 self.offset += len(newlines) - len(old)
474 self.offset += len(newlines) - len(old)
475 self.dirty = 1
475 self.dirty = 1
476 if fuzzlen:
476 if fuzzlen:
477 fuzzstr = "with fuzz %d " % fuzzlen
477 fuzzstr = "with fuzz %d " % fuzzlen
478 f = self.ui.warn
478 f = self.ui.warn
479 self.printfile(True)
479 self.printfile(True)
480 else:
480 else:
481 fuzzstr = ""
481 fuzzstr = ""
482 f = self.ui.note
482 f = self.ui.note
483 offset = l - orig_start - fuzzlen
483 offset = l - orig_start - fuzzlen
484 if offset == 1:
484 if offset == 1:
485 linestr = "line"
485 linestr = "line"
486 else:
486 else:
487 linestr = "lines"
487 linestr = "lines"
488 f(_("Hunk #%d succeeded at %d %s(offset %d %s).\n") %
488 f(_("Hunk #%d succeeded at %d %s(offset %d %s).\n") %
489 (h.number, l+1, fuzzstr, offset, linestr))
489 (h.number, l+1, fuzzstr, offset, linestr))
490 return fuzzlen
490 return fuzzlen
491 self.printfile(True)
491 self.printfile(True)
492 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
492 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
493 self.rej.append(h)
493 self.rej.append(h)
494 return -1
494 return -1
495
495
496 class hunk:
496 class hunk:
497 def __init__(self, desc, num, lr, context):
497 def __init__(self, desc, num, lr, context):
498 self.number = num
498 self.number = num
499 self.desc = desc
499 self.desc = desc
500 self.hunk = [ desc ]
500 self.hunk = [ desc ]
501 self.a = []
501 self.a = []
502 self.b = []
502 self.b = []
503 if context:
503 if context:
504 self.read_context_hunk(lr)
504 self.read_context_hunk(lr)
505 else:
505 else:
506 self.read_unified_hunk(lr)
506 self.read_unified_hunk(lr)
507
507
508 def read_unified_hunk(self, lr):
508 def read_unified_hunk(self, lr):
509 m = unidesc.match(self.desc)
509 m = unidesc.match(self.desc)
510 if not m:
510 if not m:
511 raise PatchError(_("bad hunk #%d") % self.number)
511 raise PatchError(_("bad hunk #%d") % self.number)
512 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
512 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
513 if self.lena == None:
513 if self.lena == None:
514 self.lena = 1
514 self.lena = 1
515 else:
515 else:
516 self.lena = int(self.lena)
516 self.lena = int(self.lena)
517 if self.lenb == None:
517 if self.lenb == None:
518 self.lenb = 1
518 self.lenb = 1
519 else:
519 else:
520 self.lenb = int(self.lenb)
520 self.lenb = int(self.lenb)
521 self.starta = int(self.starta)
521 self.starta = int(self.starta)
522 self.startb = int(self.startb)
522 self.startb = int(self.startb)
523 diffhelpers.addlines(lr.fp, self.hunk, self.lena, self.lenb, self.a, self.b)
523 diffhelpers.addlines(lr.fp, self.hunk, self.lena, self.lenb, self.a, self.b)
524 # if we hit eof before finishing out the hunk, the last line will
524 # if we hit eof before finishing out the hunk, the last line will
525 # be zero length. Lets try to fix it up.
525 # be zero length. Lets try to fix it up.
526 while len(self.hunk[-1]) == 0:
526 while len(self.hunk[-1]) == 0:
527 del self.hunk[-1]
527 del self.hunk[-1]
528 del self.a[-1]
528 del self.a[-1]
529 del self.b[-1]
529 del self.b[-1]
530 self.lena -= 1
530 self.lena -= 1
531 self.lenb -= 1
531 self.lenb -= 1
532
532
533 def read_context_hunk(self, lr):
533 def read_context_hunk(self, lr):
534 self.desc = lr.readline()
534 self.desc = lr.readline()
535 m = contextdesc.match(self.desc)
535 m = contextdesc.match(self.desc)
536 if not m:
536 if not m:
537 raise PatchError(_("bad hunk #%d") % self.number)
537 raise PatchError(_("bad hunk #%d") % self.number)
538 foo, self.starta, foo2, aend, foo3 = m.groups()
538 foo, self.starta, foo2, aend, foo3 = m.groups()
539 self.starta = int(self.starta)
539 self.starta = int(self.starta)
540 if aend == None:
540 if aend == None:
541 aend = self.starta
541 aend = self.starta
542 self.lena = int(aend) - self.starta
542 self.lena = int(aend) - self.starta
543 if self.starta:
543 if self.starta:
544 self.lena += 1
544 self.lena += 1
545 for x in xrange(self.lena):
545 for x in xrange(self.lena):
546 l = lr.readline()
546 l = lr.readline()
547 if l.startswith('---'):
547 if l.startswith('---'):
548 lr.push(l)
548 lr.push(l)
549 break
549 break
550 s = l[2:]
550 s = l[2:]
551 if l.startswith('- ') or l.startswith('! '):
551 if l.startswith('- ') or l.startswith('! '):
552 u = '-' + s
552 u = '-' + s
553 elif l.startswith(' '):
553 elif l.startswith(' '):
554 u = ' ' + s
554 u = ' ' + s
555 else:
555 else:
556 raise PatchError(_("bad hunk #%d old text line %d") %
556 raise PatchError(_("bad hunk #%d old text line %d") %
557 (self.number, x))
557 (self.number, x))
558 self.a.append(u)
558 self.a.append(u)
559 self.hunk.append(u)
559 self.hunk.append(u)
560
560
561 l = lr.readline()
561 l = lr.readline()
562 if l.startswith('\ '):
562 if l.startswith('\ '):
563 s = self.a[-1][:-1]
563 s = self.a[-1][:-1]
564 self.a[-1] = s
564 self.a[-1] = s
565 self.hunk[-1] = s
565 self.hunk[-1] = s
566 l = lr.readline()
566 l = lr.readline()
567 m = contextdesc.match(l)
567 m = contextdesc.match(l)
568 if not m:
568 if not m:
569 raise PatchError(_("bad hunk #%d") % self.number)
569 raise PatchError(_("bad hunk #%d") % self.number)
570 foo, self.startb, foo2, bend, foo3 = m.groups()
570 foo, self.startb, foo2, bend, foo3 = m.groups()
571 self.startb = int(self.startb)
571 self.startb = int(self.startb)
572 if bend == None:
572 if bend == None:
573 bend = self.startb
573 bend = self.startb
574 self.lenb = int(bend) - self.startb
574 self.lenb = int(bend) - self.startb
575 if self.startb:
575 if self.startb:
576 self.lenb += 1
576 self.lenb += 1
577 hunki = 1
577 hunki = 1
578 for x in xrange(self.lenb):
578 for x in xrange(self.lenb):
579 l = lr.readline()
579 l = lr.readline()
580 if l.startswith('\ '):
580 if l.startswith('\ '):
581 s = self.b[-1][:-1]
581 s = self.b[-1][:-1]
582 self.b[-1] = s
582 self.b[-1] = s
583 self.hunk[hunki-1] = s
583 self.hunk[hunki-1] = s
584 continue
584 continue
585 if not l:
585 if not l:
586 lr.push(l)
586 lr.push(l)
587 break
587 break
588 s = l[2:]
588 s = l[2:]
589 if l.startswith('+ ') or l.startswith('! '):
589 if l.startswith('+ ') or l.startswith('! '):
590 u = '+' + s
590 u = '+' + s
591 elif l.startswith(' '):
591 elif l.startswith(' '):
592 u = ' ' + s
592 u = ' ' + s
593 elif len(self.b) == 0:
593 elif len(self.b) == 0:
594 # this can happen when the hunk does not add any lines
594 # this can happen when the hunk does not add any lines
595 lr.push(l)
595 lr.push(l)
596 break
596 break
597 else:
597 else:
598 raise PatchError(_("bad hunk #%d old text line %d") %
598 raise PatchError(_("bad hunk #%d old text line %d") %
599 (self.number, x))
599 (self.number, x))
600 self.b.append(s)
600 self.b.append(s)
601 while True:
601 while True:
602 if hunki >= len(self.hunk):
602 if hunki >= len(self.hunk):
603 h = ""
603 h = ""
604 else:
604 else:
605 h = self.hunk[hunki]
605 h = self.hunk[hunki]
606 hunki += 1
606 hunki += 1
607 if h == u:
607 if h == u:
608 break
608 break
609 elif h.startswith('-'):
609 elif h.startswith('-'):
610 continue
610 continue
611 else:
611 else:
612 self.hunk.insert(hunki-1, u)
612 self.hunk.insert(hunki-1, u)
613 break
613 break
614
614
615 if not self.a:
615 if not self.a:
616 # this happens when lines were only added to the hunk
616 # this happens when lines were only added to the hunk
617 for x in self.hunk:
617 for x in self.hunk:
618 if x.startswith('-') or x.startswith(' '):
618 if x.startswith('-') or x.startswith(' '):
619 self.a.append(x)
619 self.a.append(x)
620 if not self.b:
620 if not self.b:
621 # this happens when lines were only deleted from the hunk
621 # this happens when lines were only deleted from the hunk
622 for x in self.hunk:
622 for x in self.hunk:
623 if x.startswith('+') or x.startswith(' '):
623 if x.startswith('+') or x.startswith(' '):
624 self.b.append(x[1:])
624 self.b.append(x[1:])
625 # @@ -start,len +start,len @@
625 # @@ -start,len +start,len @@
626 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
626 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
627 self.startb, self.lenb)
627 self.startb, self.lenb)
628 self.hunk[0] = self.desc
628 self.hunk[0] = self.desc
629
629
630 def reverse(self):
630 def reverse(self):
631 origlena = self.lena
631 origlena = self.lena
632 origstarta = self.starta
632 origstarta = self.starta
633 self.lena = self.lenb
633 self.lena = self.lenb
634 self.starta = self.startb
634 self.starta = self.startb
635 self.lenb = origlena
635 self.lenb = origlena
636 self.startb = origstarta
636 self.startb = origstarta
637 self.a = []
637 self.a = []
638 self.b = []
638 self.b = []
639 # self.hunk[0] is the @@ description
639 # self.hunk[0] is the @@ description
640 for x in xrange(1, len(self.hunk)):
640 for x in xrange(1, len(self.hunk)):
641 o = self.hunk[x]
641 o = self.hunk[x]
642 if o.startswith('-'):
642 if o.startswith('-'):
643 n = '+' + o[1:]
643 n = '+' + o[1:]
644 self.b.append(o[1:])
644 self.b.append(o[1:])
645 elif o.startswith('+'):
645 elif o.startswith('+'):
646 n = '-' + o[1:]
646 n = '-' + o[1:]
647 self.a.append(n)
647 self.a.append(n)
648 else:
648 else:
649 n = o
649 n = o
650 self.b.append(o[1:])
650 self.b.append(o[1:])
651 self.a.append(o)
651 self.a.append(o)
652 self.hunk[x] = o
652 self.hunk[x] = o
653
653
654 def fix_newline(self):
654 def fix_newline(self):
655 diffhelpers.fix_newline(self.hunk, self.a, self.b)
655 diffhelpers.fix_newline(self.hunk, self.a, self.b)
656
656
657 def complete(self):
657 def complete(self):
658 return len(self.a) == self.lena and len(self.b) == self.lenb
658 return len(self.a) == self.lena and len(self.b) == self.lenb
659
659
660 def createfile(self):
660 def createfile(self):
661 return self.starta == 0 and self.lena == 0
661 return self.starta == 0 and self.lena == 0
662
662
663 def rmfile(self):
663 def rmfile(self):
664 return self.startb == 0 and self.lenb == 0
664 return self.startb == 0 and self.lenb == 0
665
665
666 def fuzzit(self, l, fuzz, toponly):
666 def fuzzit(self, l, fuzz, toponly):
667 # this removes context lines from the top and bottom of list 'l'. It
667 # this removes context lines from the top and bottom of list 'l'. It
668 # checks the hunk to make sure only context lines are removed, and then
668 # checks the hunk to make sure only context lines are removed, and then
669 # returns a new shortened list of lines.
669 # returns a new shortened list of lines.
670 fuzz = min(fuzz, len(l)-1)
670 fuzz = min(fuzz, len(l)-1)
671 if fuzz:
671 if fuzz:
672 top = 0
672 top = 0
673 bot = 0
673 bot = 0
674 hlen = len(self.hunk)
674 hlen = len(self.hunk)
675 for x in xrange(hlen-1):
675 for x in xrange(hlen-1):
676 # the hunk starts with the @@ line, so use x+1
676 # the hunk starts with the @@ line, so use x+1
677 if self.hunk[x+1][0] == ' ':
677 if self.hunk[x+1][0] == ' ':
678 top += 1
678 top += 1
679 else:
679 else:
680 break
680 break
681 if not toponly:
681 if not toponly:
682 for x in xrange(hlen-1):
682 for x in xrange(hlen-1):
683 if self.hunk[hlen-bot-1][0] == ' ':
683 if self.hunk[hlen-bot-1][0] == ' ':
684 bot += 1
684 bot += 1
685 else:
685 else:
686 break
686 break
687
687
688 # top and bot now count context in the hunk
688 # top and bot now count context in the hunk
689 # adjust them if either one is short
689 # adjust them if either one is short
690 context = max(top, bot, 3)
690 context = max(top, bot, 3)
691 if bot < context:
691 if bot < context:
692 bot = max(0, fuzz - (context - bot))
692 bot = max(0, fuzz - (context - bot))
693 else:
693 else:
694 bot = min(fuzz, bot)
694 bot = min(fuzz, bot)
695 if top < context:
695 if top < context:
696 top = max(0, fuzz - (context - top))
696 top = max(0, fuzz - (context - top))
697 else:
697 else:
698 top = min(fuzz, top)
698 top = min(fuzz, top)
699
699
700 return l[top:len(l)-bot]
700 return l[top:len(l)-bot]
701 return l
701 return l
702
702
703 def old(self, fuzz=0, toponly=False):
703 def old(self, fuzz=0, toponly=False):
704 return self.fuzzit(self.a, fuzz, toponly)
704 return self.fuzzit(self.a, fuzz, toponly)
705
705
706 def newctrl(self):
706 def newctrl(self):
707 res = []
707 res = []
708 for x in self.hunk:
708 for x in self.hunk:
709 c = x[0]
709 c = x[0]
710 if c == ' ' or c == '+':
710 if c == ' ' or c == '+':
711 res.append(x)
711 res.append(x)
712 return res
712 return res
713
713
714 def new(self, fuzz=0, toponly=False):
714 def new(self, fuzz=0, toponly=False):
715 return self.fuzzit(self.b, fuzz, toponly)
715 return self.fuzzit(self.b, fuzz, toponly)
716
716
717 class binhunk:
717 class binhunk:
718 'A binary patch file. Only understands literals so far.'
718 'A binary patch file. Only understands literals so far.'
719 def __init__(self, gitpatch):
719 def __init__(self, gitpatch):
720 self.gitpatch = gitpatch
720 self.gitpatch = gitpatch
721 self.text = None
721 self.text = None
722 self.hunk = ['GIT binary patch\n']
722 self.hunk = ['GIT binary patch\n']
723
723
724 def createfile(self):
724 def createfile(self):
725 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
725 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
726
726
727 def rmfile(self):
727 def rmfile(self):
728 return self.gitpatch.op == 'DELETE'
728 return self.gitpatch.op == 'DELETE'
729
729
730 def complete(self):
730 def complete(self):
731 return self.text is not None
731 return self.text is not None
732
732
733 def new(self):
733 def new(self):
734 return [self.text]
734 return [self.text]
735
735
736 def extract(self, fp):
736 def extract(self, fp):
737 line = fp.readline()
737 line = fp.readline()
738 self.hunk.append(line)
738 self.hunk.append(line)
739 while line and not line.startswith('literal '):
739 while line and not line.startswith('literal '):
740 line = fp.readline()
740 line = fp.readline()
741 self.hunk.append(line)
741 self.hunk.append(line)
742 if not line:
742 if not line:
743 raise PatchError(_('could not extract binary patch'))
743 raise PatchError(_('could not extract binary patch'))
744 size = int(line[8:].rstrip())
744 size = int(line[8:].rstrip())
745 dec = []
745 dec = []
746 line = fp.readline()
746 line = fp.readline()
747 self.hunk.append(line)
747 self.hunk.append(line)
748 while len(line) > 1:
748 while len(line) > 1:
749 l = line[0]
749 l = line[0]
750 if l <= 'Z' and l >= 'A':
750 if l <= 'Z' and l >= 'A':
751 l = ord(l) - ord('A') + 1
751 l = ord(l) - ord('A') + 1
752 else:
752 else:
753 l = ord(l) - ord('a') + 27
753 l = ord(l) - ord('a') + 27
754 dec.append(base85.b85decode(line[1:-1])[:l])
754 dec.append(base85.b85decode(line[1:-1])[:l])
755 line = fp.readline()
755 line = fp.readline()
756 self.hunk.append(line)
756 self.hunk.append(line)
757 text = zlib.decompress(''.join(dec))
757 text = zlib.decompress(''.join(dec))
758 if len(text) != size:
758 if len(text) != size:
759 raise PatchError(_('binary patch is %d bytes, not %d') %
759 raise PatchError(_('binary patch is %d bytes, not %d') %
760 len(text), size)
760 len(text), size)
761 self.text = text
761 self.text = text
762
762
763 def parsefilename(str):
763 def parsefilename(str):
764 # --- filename \t|space stuff
764 # --- filename \t|space stuff
765 s = str[4:]
765 s = str[4:]
766 i = s.find('\t')
766 i = s.find('\t')
767 if i < 0:
767 if i < 0:
768 i = s.find(' ')
768 i = s.find(' ')
769 if i < 0:
769 if i < 0:
770 return s
770 return s
771 return s[:i]
771 return s[:i]
772
772
773 def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
773 def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
774 def pathstrip(path, count=1):
774 def pathstrip(path, count=1):
775 pathlen = len(path)
775 pathlen = len(path)
776 i = 0
776 i = 0
777 if count == 0:
777 if count == 0:
778 return path.rstrip()
778 return path.rstrip()
779 while count > 0:
779 while count > 0:
780 i = path.find(os.sep, i)
780 i = path.find('/', i)
781 if i == -1:
781 if i == -1:
782 raise PatchError(_("unable to strip away %d dirs from %s") %
782 raise PatchError(_("unable to strip away %d dirs from %s") %
783 (count, path))
783 (count, path))
784 i += 1
784 i += 1
785 # consume '//' in the path
785 # consume '//' in the path
786 while i < pathlen - 1 and path[i] == os.sep:
786 while i < pathlen - 1 and path[i] == '/':
787 i += 1
787 i += 1
788 count -= 1
788 count -= 1
789 return path[i:].rstrip()
789 return path[i:].rstrip()
790
790
791 nulla = afile_orig == "/dev/null"
791 nulla = afile_orig == "/dev/null"
792 nullb = bfile_orig == "/dev/null"
792 nullb = bfile_orig == "/dev/null"
793 afile = pathstrip(afile_orig, strip)
793 afile = pathstrip(afile_orig, strip)
794 gooda = os.path.exists(afile) and not nulla
794 gooda = os.path.exists(afile) and not nulla
795 bfile = pathstrip(bfile_orig, strip)
795 bfile = pathstrip(bfile_orig, strip)
796 if afile == bfile:
796 if afile == bfile:
797 goodb = gooda
797 goodb = gooda
798 else:
798 else:
799 goodb = os.path.exists(bfile) and not nullb
799 goodb = os.path.exists(bfile) and not nullb
800 createfunc = hunk.createfile
800 createfunc = hunk.createfile
801 if reverse:
801 if reverse:
802 createfunc = hunk.rmfile
802 createfunc = hunk.rmfile
803 if not goodb and not gooda and not createfunc():
803 if not goodb and not gooda and not createfunc():
804 raise PatchError(_("unable to find %s or %s for patching") %
804 raise PatchError(_("unable to find %s or %s for patching") %
805 (afile, bfile))
805 (afile, bfile))
806 if gooda and goodb:
806 if gooda and goodb:
807 fname = bfile
807 fname = bfile
808 if afile in bfile:
808 if afile in bfile:
809 fname = afile
809 fname = afile
810 elif gooda:
810 elif gooda:
811 fname = afile
811 fname = afile
812 elif not nullb:
812 elif not nullb:
813 fname = bfile
813 fname = bfile
814 if afile in bfile:
814 if afile in bfile:
815 fname = afile
815 fname = afile
816 elif not nulla:
816 elif not nulla:
817 fname = afile
817 fname = afile
818 return fname
818 return fname
819
819
820 class linereader:
820 class linereader:
821 # simple class to allow pushing lines back into the input stream
821 # simple class to allow pushing lines back into the input stream
822 def __init__(self, fp):
822 def __init__(self, fp):
823 self.fp = fp
823 self.fp = fp
824 self.buf = []
824 self.buf = []
825
825
826 def push(self, line):
826 def push(self, line):
827 self.buf.append(line)
827 self.buf.append(line)
828
828
829 def readline(self):
829 def readline(self):
830 if self.buf:
830 if self.buf:
831 l = self.buf[0]
831 l = self.buf[0]
832 del self.buf[0]
832 del self.buf[0]
833 return l
833 return l
834 return self.fp.readline()
834 return self.fp.readline()
835
835
836 def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False,
836 def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False,
837 rejmerge=None, updatedir=None):
837 rejmerge=None, updatedir=None):
838 """reads a patch from fp and tries to apply it. The dict 'changed' is
838 """reads a patch from fp and tries to apply it. The dict 'changed' is
839 filled in with all of the filenames changed by the patch. Returns 0
839 filled in with all of the filenames changed by the patch. Returns 0
840 for a clean patch, -1 if any rejects were found and 1 if there was
840 for a clean patch, -1 if any rejects were found and 1 if there was
841 any fuzz."""
841 any fuzz."""
842
842
843 def scangitpatch(fp, firstline, cwd=None):
843 def scangitpatch(fp, firstline, cwd=None):
844 '''git patches can modify a file, then copy that file to
844 '''git patches can modify a file, then copy that file to
845 a new file, but expect the source to be the unmodified form.
845 a new file, but expect the source to be the unmodified form.
846 So we scan the patch looking for that case so we can do
846 So we scan the patch looking for that case so we can do
847 the copies ahead of time.'''
847 the copies ahead of time.'''
848
848
849 pos = 0
849 pos = 0
850 try:
850 try:
851 pos = fp.tell()
851 pos = fp.tell()
852 except IOError:
852 except IOError:
853 fp = cStringIO.StringIO(fp.read())
853 fp = cStringIO.StringIO(fp.read())
854
854
855 (dopatch, gitpatches) = readgitpatch(fp, firstline)
855 (dopatch, gitpatches) = readgitpatch(fp, firstline)
856 for gp in gitpatches:
856 for gp in gitpatches:
857 if gp.copymod:
857 if gp.copymod:
858 copyfile(gp.oldpath, gp.path, basedir=cwd)
858 copyfile(gp.oldpath, gp.path, basedir=cwd)
859
859
860 fp.seek(pos)
860 fp.seek(pos)
861
861
862 return fp, dopatch, gitpatches
862 return fp, dopatch, gitpatches
863
863
864 current_hunk = None
864 current_hunk = None
865 current_file = None
865 current_file = None
866 afile = ""
866 afile = ""
867 bfile = ""
867 bfile = ""
868 state = None
868 state = None
869 hunknum = 0
869 hunknum = 0
870 rejects = 0
870 rejects = 0
871
871
872 git = False
872 git = False
873 gitre = re.compile('diff --git (a/.*) (b/.*)')
873 gitre = re.compile('diff --git (a/.*) (b/.*)')
874
874
875 # our states
875 # our states
876 BFILE = 1
876 BFILE = 1
877 err = 0
877 err = 0
878 context = None
878 context = None
879 lr = linereader(fp)
879 lr = linereader(fp)
880 dopatch = True
880 dopatch = True
881 gitworkdone = False
881 gitworkdone = False
882
882
883 while True:
883 while True:
884 newfile = False
884 newfile = False
885 x = lr.readline()
885 x = lr.readline()
886 if not x:
886 if not x:
887 break
887 break
888 if current_hunk:
888 if current_hunk:
889 if x.startswith('\ '):
889 if x.startswith('\ '):
890 current_hunk.fix_newline()
890 current_hunk.fix_newline()
891 ret = current_file.apply(current_hunk, reverse)
891 ret = current_file.apply(current_hunk, reverse)
892 if ret >= 0:
892 if ret >= 0:
893 changed.setdefault(current_file.fname, (None, None))
893 changed.setdefault(current_file.fname, (None, None))
894 if ret > 0:
894 if ret > 0:
895 err = 1
895 err = 1
896 current_hunk = None
896 current_hunk = None
897 gitworkdone = False
897 gitworkdone = False
898 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
898 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
899 ((context or context == None) and x.startswith('***************')))):
899 ((context or context == None) and x.startswith('***************')))):
900 try:
900 try:
901 if context == None and x.startswith('***************'):
901 if context == None and x.startswith('***************'):
902 context = True
902 context = True
903 current_hunk = hunk(x, hunknum + 1, lr, context)
903 current_hunk = hunk(x, hunknum + 1, lr, context)
904 except PatchError, err:
904 except PatchError, err:
905 ui.debug(err)
905 ui.debug(err)
906 current_hunk = None
906 current_hunk = None
907 continue
907 continue
908 hunknum += 1
908 hunknum += 1
909 if not current_file:
909 if not current_file:
910 if sourcefile:
910 if sourcefile:
911 current_file = patchfile(ui, sourcefile)
911 current_file = patchfile(ui, sourcefile)
912 else:
912 else:
913 current_file = selectfile(afile, bfile, current_hunk,
913 current_file = selectfile(afile, bfile, current_hunk,
914 strip, reverse)
914 strip, reverse)
915 current_file = patchfile(ui, current_file)
915 current_file = patchfile(ui, current_file)
916 elif state == BFILE and x.startswith('GIT binary patch'):
916 elif state == BFILE and x.startswith('GIT binary patch'):
917 current_hunk = binhunk(changed[bfile[2:]][1])
917 current_hunk = binhunk(changed[bfile[2:]][1])
918 if not current_file:
918 if not current_file:
919 if sourcefile:
919 if sourcefile:
920 current_file = patchfile(ui, sourcefile)
920 current_file = patchfile(ui, sourcefile)
921 else:
921 else:
922 current_file = selectfile(afile, bfile, current_hunk,
922 current_file = selectfile(afile, bfile, current_hunk,
923 strip, reverse)
923 strip, reverse)
924 current_file = patchfile(ui, current_file)
924 current_file = patchfile(ui, current_file)
925 hunknum += 1
925 hunknum += 1
926 current_hunk.extract(fp)
926 current_hunk.extract(fp)
927 elif x.startswith('diff --git'):
927 elif x.startswith('diff --git'):
928 # check for git diff, scanning the whole patch file if needed
928 # check for git diff, scanning the whole patch file if needed
929 m = gitre.match(x)
929 m = gitre.match(x)
930 if m:
930 if m:
931 afile, bfile = m.group(1, 2)
931 afile, bfile = m.group(1, 2)
932 if not git:
932 if not git:
933 git = True
933 git = True
934 fp, dopatch, gitpatches = scangitpatch(fp, x)
934 fp, dopatch, gitpatches = scangitpatch(fp, x)
935 for gp in gitpatches:
935 for gp in gitpatches:
936 changed[gp.path] = (gp.op, gp)
936 changed[gp.path] = (gp.op, gp)
937 # else error?
937 # else error?
938 # copy/rename + modify should modify target, not source
938 # copy/rename + modify should modify target, not source
939 if changed.get(bfile[2:], (None, None))[0] in ('COPY',
939 if changed.get(bfile[2:], (None, None))[0] in ('COPY',
940 'RENAME'):
940 'RENAME'):
941 afile = bfile
941 afile = bfile
942 gitworkdone = True
942 gitworkdone = True
943 newfile = True
943 newfile = True
944 elif x.startswith('---'):
944 elif x.startswith('---'):
945 # check for a unified diff
945 # check for a unified diff
946 l2 = lr.readline()
946 l2 = lr.readline()
947 if not l2.startswith('+++'):
947 if not l2.startswith('+++'):
948 lr.push(l2)
948 lr.push(l2)
949 continue
949 continue
950 newfile = True
950 newfile = True
951 context = False
951 context = False
952 afile = parsefilename(x)
952 afile = parsefilename(x)
953 bfile = parsefilename(l2)
953 bfile = parsefilename(l2)
954 elif x.startswith('***'):
954 elif x.startswith('***'):
955 # check for a context diff
955 # check for a context diff
956 l2 = lr.readline()
956 l2 = lr.readline()
957 if not l2.startswith('---'):
957 if not l2.startswith('---'):
958 lr.push(l2)
958 lr.push(l2)
959 continue
959 continue
960 l3 = lr.readline()
960 l3 = lr.readline()
961 lr.push(l3)
961 lr.push(l3)
962 if not l3.startswith("***************"):
962 if not l3.startswith("***************"):
963 lr.push(l2)
963 lr.push(l2)
964 continue
964 continue
965 newfile = True
965 newfile = True
966 context = True
966 context = True
967 afile = parsefilename(x)
967 afile = parsefilename(x)
968 bfile = parsefilename(l2)
968 bfile = parsefilename(l2)
969
969
970 if newfile:
970 if newfile:
971 if current_file:
971 if current_file:
972 current_file.close()
972 current_file.close()
973 if rejmerge:
973 if rejmerge:
974 rejmerge(current_file)
974 rejmerge(current_file)
975 rejects += len(current_file.rej)
975 rejects += len(current_file.rej)
976 state = BFILE
976 state = BFILE
977 current_file = None
977 current_file = None
978 hunknum = 0
978 hunknum = 0
979 if current_hunk:
979 if current_hunk:
980 if current_hunk.complete():
980 if current_hunk.complete():
981 ret = current_file.apply(current_hunk, reverse)
981 ret = current_file.apply(current_hunk, reverse)
982 if ret >= 0:
982 if ret >= 0:
983 changed.setdefault(current_file.fname, (None, None))
983 changed.setdefault(current_file.fname, (None, None))
984 if ret > 0:
984 if ret > 0:
985 err = 1
985 err = 1
986 else:
986 else:
987 fname = current_file and current_file.fname or None
987 fname = current_file and current_file.fname or None
988 raise PatchError(_("malformed patch %s %s") % (fname,
988 raise PatchError(_("malformed patch %s %s") % (fname,
989 current_hunk.desc))
989 current_hunk.desc))
990 if current_file:
990 if current_file:
991 current_file.close()
991 current_file.close()
992 if rejmerge:
992 if rejmerge:
993 rejmerge(current_file)
993 rejmerge(current_file)
994 rejects += len(current_file.rej)
994 rejects += len(current_file.rej)
995 if updatedir and git:
995 if updatedir and git:
996 updatedir(gitpatches)
996 updatedir(gitpatches)
997 if rejects:
997 if rejects:
998 return -1
998 return -1
999 if hunknum == 0 and dopatch and not gitworkdone:
999 if hunknum == 0 and dopatch and not gitworkdone:
1000 raise NoHunks
1000 raise NoHunks
1001 return err
1001 return err
1002
1002
1003 def diffopts(ui, opts={}, untrusted=False):
1003 def diffopts(ui, opts={}, untrusted=False):
1004 def get(key, name=None):
1004 def get(key, name=None):
1005 return (opts.get(key) or
1005 return (opts.get(key) or
1006 ui.configbool('diff', name or key, None, untrusted=untrusted))
1006 ui.configbool('diff', name or key, None, untrusted=untrusted))
1007 return mdiff.diffopts(
1007 return mdiff.diffopts(
1008 text=opts.get('text'),
1008 text=opts.get('text'),
1009 git=get('git'),
1009 git=get('git'),
1010 nodates=get('nodates'),
1010 nodates=get('nodates'),
1011 showfunc=get('show_function', 'showfunc'),
1011 showfunc=get('show_function', 'showfunc'),
1012 ignorews=get('ignore_all_space', 'ignorews'),
1012 ignorews=get('ignore_all_space', 'ignorews'),
1013 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1013 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1014 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'))
1014 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'))
1015
1015
1016 def updatedir(ui, repo, patches):
1016 def updatedir(ui, repo, patches):
1017 '''Update dirstate after patch application according to metadata'''
1017 '''Update dirstate after patch application according to metadata'''
1018 if not patches:
1018 if not patches:
1019 return
1019 return
1020 copies = []
1020 copies = []
1021 removes = {}
1021 removes = {}
1022 cfiles = patches.keys()
1022 cfiles = patches.keys()
1023 cwd = repo.getcwd()
1023 cwd = repo.getcwd()
1024 if cwd:
1024 if cwd:
1025 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1025 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1026 for f in patches:
1026 for f in patches:
1027 ctype, gp = patches[f]
1027 ctype, gp = patches[f]
1028 if ctype == 'RENAME':
1028 if ctype == 'RENAME':
1029 copies.append((gp.oldpath, gp.path, gp.copymod))
1029 copies.append((gp.oldpath, gp.path, gp.copymod))
1030 removes[gp.oldpath] = 1
1030 removes[gp.oldpath] = 1
1031 elif ctype == 'COPY':
1031 elif ctype == 'COPY':
1032 copies.append((gp.oldpath, gp.path, gp.copymod))
1032 copies.append((gp.oldpath, gp.path, gp.copymod))
1033 elif ctype == 'DELETE':
1033 elif ctype == 'DELETE':
1034 removes[gp.path] = 1
1034 removes[gp.path] = 1
1035 for src, dst, after in copies:
1035 for src, dst, after in copies:
1036 if not after:
1036 if not after:
1037 copyfile(src, dst, repo.root)
1037 copyfile(src, dst, repo.root)
1038 repo.copy(src, dst)
1038 repo.copy(src, dst)
1039 removes = removes.keys()
1039 removes = removes.keys()
1040 if removes:
1040 if removes:
1041 removes.sort()
1041 removes.sort()
1042 repo.remove(removes, True)
1042 repo.remove(removes, True)
1043 for f in patches:
1043 for f in patches:
1044 ctype, gp = patches[f]
1044 ctype, gp = patches[f]
1045 if gp and gp.mode:
1045 if gp and gp.mode:
1046 x = gp.mode & 0100 != 0
1046 x = gp.mode & 0100 != 0
1047 dst = os.path.join(repo.root, gp.path)
1047 dst = os.path.join(repo.root, gp.path)
1048 # patch won't create empty files
1048 # patch won't create empty files
1049 if ctype == 'ADD' and not os.path.exists(dst):
1049 if ctype == 'ADD' and not os.path.exists(dst):
1050 repo.wwrite(gp.path, '', x and 'x' or '')
1050 repo.wwrite(gp.path, '', x and 'x' or '')
1051 else:
1051 else:
1052 util.set_exec(dst, x)
1052 util.set_exec(dst, x)
1053 cmdutil.addremove(repo, cfiles)
1053 cmdutil.addremove(repo, cfiles)
1054 files = patches.keys()
1054 files = patches.keys()
1055 files.extend([r for r in removes if r not in files])
1055 files.extend([r for r in removes if r not in files])
1056 files.sort()
1056 files.sort()
1057
1057
1058 return files
1058 return files
1059
1059
1060 def b85diff(fp, to, tn):
1060 def b85diff(fp, to, tn):
1061 '''print base85-encoded binary diff'''
1061 '''print base85-encoded binary diff'''
1062 def gitindex(text):
1062 def gitindex(text):
1063 if not text:
1063 if not text:
1064 return '0' * 40
1064 return '0' * 40
1065 l = len(text)
1065 l = len(text)
1066 s = sha.new('blob %d\0' % l)
1066 s = sha.new('blob %d\0' % l)
1067 s.update(text)
1067 s.update(text)
1068 return s.hexdigest()
1068 return s.hexdigest()
1069
1069
1070 def fmtline(line):
1070 def fmtline(line):
1071 l = len(line)
1071 l = len(line)
1072 if l <= 26:
1072 if l <= 26:
1073 l = chr(ord('A') + l - 1)
1073 l = chr(ord('A') + l - 1)
1074 else:
1074 else:
1075 l = chr(l - 26 + ord('a') - 1)
1075 l = chr(l - 26 + ord('a') - 1)
1076 return '%c%s\n' % (l, base85.b85encode(line, True))
1076 return '%c%s\n' % (l, base85.b85encode(line, True))
1077
1077
1078 def chunk(text, csize=52):
1078 def chunk(text, csize=52):
1079 l = len(text)
1079 l = len(text)
1080 i = 0
1080 i = 0
1081 while i < l:
1081 while i < l:
1082 yield text[i:i+csize]
1082 yield text[i:i+csize]
1083 i += csize
1083 i += csize
1084
1084
1085 tohash = gitindex(to)
1085 tohash = gitindex(to)
1086 tnhash = gitindex(tn)
1086 tnhash = gitindex(tn)
1087 if tohash == tnhash:
1087 if tohash == tnhash:
1088 return ""
1088 return ""
1089
1089
1090 # TODO: deltas
1090 # TODO: deltas
1091 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1091 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1092 (tohash, tnhash, len(tn))]
1092 (tohash, tnhash, len(tn))]
1093 for l in chunk(zlib.compress(tn)):
1093 for l in chunk(zlib.compress(tn)):
1094 ret.append(fmtline(l))
1094 ret.append(fmtline(l))
1095 ret.append('\n')
1095 ret.append('\n')
1096 return ''.join(ret)
1096 return ''.join(ret)
1097
1097
1098 def diff(repo, node1=None, node2=None, files=None, match=util.always,
1098 def diff(repo, node1=None, node2=None, files=None, match=util.always,
1099 fp=None, changes=None, opts=None):
1099 fp=None, changes=None, opts=None):
1100 '''print diff of changes to files between two nodes, or node and
1100 '''print diff of changes to files between two nodes, or node and
1101 working directory.
1101 working directory.
1102
1102
1103 if node1 is None, use first dirstate parent instead.
1103 if node1 is None, use first dirstate parent instead.
1104 if node2 is None, compare node1 with working directory.'''
1104 if node2 is None, compare node1 with working directory.'''
1105
1105
1106 if opts is None:
1106 if opts is None:
1107 opts = mdiff.defaultopts
1107 opts = mdiff.defaultopts
1108 if fp is None:
1108 if fp is None:
1109 fp = repo.ui
1109 fp = repo.ui
1110
1110
1111 if not node1:
1111 if not node1:
1112 node1 = repo.dirstate.parents()[0]
1112 node1 = repo.dirstate.parents()[0]
1113
1113
1114 ccache = {}
1114 ccache = {}
1115 def getctx(r):
1115 def getctx(r):
1116 if r not in ccache:
1116 if r not in ccache:
1117 ccache[r] = context.changectx(repo, r)
1117 ccache[r] = context.changectx(repo, r)
1118 return ccache[r]
1118 return ccache[r]
1119
1119
1120 flcache = {}
1120 flcache = {}
1121 def getfilectx(f, ctx):
1121 def getfilectx(f, ctx):
1122 flctx = ctx.filectx(f, filelog=flcache.get(f))
1122 flctx = ctx.filectx(f, filelog=flcache.get(f))
1123 if f not in flcache:
1123 if f not in flcache:
1124 flcache[f] = flctx._filelog
1124 flcache[f] = flctx._filelog
1125 return flctx
1125 return flctx
1126
1126
1127 # reading the data for node1 early allows it to play nicely
1127 # reading the data for node1 early allows it to play nicely
1128 # with repo.status and the revlog cache.
1128 # with repo.status and the revlog cache.
1129 ctx1 = context.changectx(repo, node1)
1129 ctx1 = context.changectx(repo, node1)
1130 # force manifest reading
1130 # force manifest reading
1131 man1 = ctx1.manifest()
1131 man1 = ctx1.manifest()
1132 date1 = util.datestr(ctx1.date())
1132 date1 = util.datestr(ctx1.date())
1133
1133
1134 if not changes:
1134 if not changes:
1135 changes = repo.status(node1, node2, files, match=match)[:5]
1135 changes = repo.status(node1, node2, files, match=match)[:5]
1136 modified, added, removed, deleted, unknown = changes
1136 modified, added, removed, deleted, unknown = changes
1137
1137
1138 if not modified and not added and not removed:
1138 if not modified and not added and not removed:
1139 return
1139 return
1140
1140
1141 if node2:
1141 if node2:
1142 ctx2 = context.changectx(repo, node2)
1142 ctx2 = context.changectx(repo, node2)
1143 execf2 = ctx2.manifest().execf
1143 execf2 = ctx2.manifest().execf
1144 else:
1144 else:
1145 ctx2 = context.workingctx(repo)
1145 ctx2 = context.workingctx(repo)
1146 execf2 = util.execfunc(repo.root, None)
1146 execf2 = util.execfunc(repo.root, None)
1147 if execf2 is None:
1147 if execf2 is None:
1148 execf2 = ctx2.parents()[0].manifest().copy().execf
1148 execf2 = ctx2.parents()[0].manifest().copy().execf
1149
1149
1150 # returns False if there was no rename between ctx1 and ctx2
1150 # returns False if there was no rename between ctx1 and ctx2
1151 # returns None if the file was created between ctx1 and ctx2
1151 # returns None if the file was created between ctx1 and ctx2
1152 # returns the (file, node) present in ctx1 that was renamed to f in ctx2
1152 # returns the (file, node) present in ctx1 that was renamed to f in ctx2
1153 def renamed(f):
1153 def renamed(f):
1154 startrev = ctx1.rev()
1154 startrev = ctx1.rev()
1155 c = ctx2
1155 c = ctx2
1156 crev = c.rev()
1156 crev = c.rev()
1157 if crev is None:
1157 if crev is None:
1158 crev = repo.changelog.count()
1158 crev = repo.changelog.count()
1159 orig = f
1159 orig = f
1160 while crev > startrev:
1160 while crev > startrev:
1161 if f in c.files():
1161 if f in c.files():
1162 try:
1162 try:
1163 src = getfilectx(f, c).renamed()
1163 src = getfilectx(f, c).renamed()
1164 except revlog.LookupError:
1164 except revlog.LookupError:
1165 return None
1165 return None
1166 if src:
1166 if src:
1167 f = src[0]
1167 f = src[0]
1168 crev = c.parents()[0].rev()
1168 crev = c.parents()[0].rev()
1169 # try to reuse
1169 # try to reuse
1170 c = getctx(crev)
1170 c = getctx(crev)
1171 if f not in man1:
1171 if f not in man1:
1172 return None
1172 return None
1173 if f == orig:
1173 if f == orig:
1174 return False
1174 return False
1175 return f
1175 return f
1176
1176
1177 if repo.ui.quiet:
1177 if repo.ui.quiet:
1178 r = None
1178 r = None
1179 else:
1179 else:
1180 hexfunc = repo.ui.debugflag and hex or short
1180 hexfunc = repo.ui.debugflag and hex or short
1181 r = [hexfunc(node) for node in [node1, node2] if node]
1181 r = [hexfunc(node) for node in [node1, node2] if node]
1182
1182
1183 if opts.git:
1183 if opts.git:
1184 copied = {}
1184 copied = {}
1185 for f in added:
1185 for f in added:
1186 src = renamed(f)
1186 src = renamed(f)
1187 if src:
1187 if src:
1188 copied[f] = src
1188 copied[f] = src
1189 srcs = [x[1] for x in copied.items()]
1189 srcs = [x[1] for x in copied.items()]
1190
1190
1191 all = modified + added + removed
1191 all = modified + added + removed
1192 all.sort()
1192 all.sort()
1193 gone = {}
1193 gone = {}
1194
1194
1195 for f in all:
1195 for f in all:
1196 to = None
1196 to = None
1197 tn = None
1197 tn = None
1198 dodiff = True
1198 dodiff = True
1199 header = []
1199 header = []
1200 if f in man1:
1200 if f in man1:
1201 to = getfilectx(f, ctx1).data()
1201 to = getfilectx(f, ctx1).data()
1202 if f not in removed:
1202 if f not in removed:
1203 tn = getfilectx(f, ctx2).data()
1203 tn = getfilectx(f, ctx2).data()
1204 if opts.git:
1204 if opts.git:
1205 def gitmode(x):
1205 def gitmode(x):
1206 return x and '100755' or '100644'
1206 return x and '100755' or '100644'
1207 def addmodehdr(header, omode, nmode):
1207 def addmodehdr(header, omode, nmode):
1208 if omode != nmode:
1208 if omode != nmode:
1209 header.append('old mode %s\n' % omode)
1209 header.append('old mode %s\n' % omode)
1210 header.append('new mode %s\n' % nmode)
1210 header.append('new mode %s\n' % nmode)
1211
1211
1212 a, b = f, f
1212 a, b = f, f
1213 if f in added:
1213 if f in added:
1214 mode = gitmode(execf2(f))
1214 mode = gitmode(execf2(f))
1215 if f in copied:
1215 if f in copied:
1216 a = copied[f]
1216 a = copied[f]
1217 omode = gitmode(man1.execf(a))
1217 omode = gitmode(man1.execf(a))
1218 addmodehdr(header, omode, mode)
1218 addmodehdr(header, omode, mode)
1219 if a in removed and a not in gone:
1219 if a in removed and a not in gone:
1220 op = 'rename'
1220 op = 'rename'
1221 gone[a] = 1
1221 gone[a] = 1
1222 else:
1222 else:
1223 op = 'copy'
1223 op = 'copy'
1224 header.append('%s from %s\n' % (op, a))
1224 header.append('%s from %s\n' % (op, a))
1225 header.append('%s to %s\n' % (op, f))
1225 header.append('%s to %s\n' % (op, f))
1226 to = getfilectx(a, ctx1).data()
1226 to = getfilectx(a, ctx1).data()
1227 else:
1227 else:
1228 header.append('new file mode %s\n' % mode)
1228 header.append('new file mode %s\n' % mode)
1229 if util.binary(tn):
1229 if util.binary(tn):
1230 dodiff = 'binary'
1230 dodiff = 'binary'
1231 elif f in removed:
1231 elif f in removed:
1232 if f in srcs:
1232 if f in srcs:
1233 dodiff = False
1233 dodiff = False
1234 else:
1234 else:
1235 mode = gitmode(man1.execf(f))
1235 mode = gitmode(man1.execf(f))
1236 header.append('deleted file mode %s\n' % mode)
1236 header.append('deleted file mode %s\n' % mode)
1237 else:
1237 else:
1238 omode = gitmode(man1.execf(f))
1238 omode = gitmode(man1.execf(f))
1239 nmode = gitmode(execf2(f))
1239 nmode = gitmode(execf2(f))
1240 addmodehdr(header, omode, nmode)
1240 addmodehdr(header, omode, nmode)
1241 if util.binary(to) or util.binary(tn):
1241 if util.binary(to) or util.binary(tn):
1242 dodiff = 'binary'
1242 dodiff = 'binary'
1243 r = None
1243 r = None
1244 header.insert(0, 'diff --git a/%s b/%s\n' % (a, b))
1244 header.insert(0, 'diff --git a/%s b/%s\n' % (a, b))
1245 if dodiff:
1245 if dodiff:
1246 if dodiff == 'binary':
1246 if dodiff == 'binary':
1247 text = b85diff(fp, to, tn)
1247 text = b85diff(fp, to, tn)
1248 else:
1248 else:
1249 text = mdiff.unidiff(to, date1,
1249 text = mdiff.unidiff(to, date1,
1250 # ctx2 date may be dynamic
1250 # ctx2 date may be dynamic
1251 tn, util.datestr(ctx2.date()),
1251 tn, util.datestr(ctx2.date()),
1252 f, r, opts=opts)
1252 f, r, opts=opts)
1253 if text or len(header) > 1:
1253 if text or len(header) > 1:
1254 fp.write(''.join(header))
1254 fp.write(''.join(header))
1255 fp.write(text)
1255 fp.write(text)
1256
1256
1257 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1257 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1258 opts=None):
1258 opts=None):
1259 '''export changesets as hg patches.'''
1259 '''export changesets as hg patches.'''
1260
1260
1261 total = len(revs)
1261 total = len(revs)
1262 revwidth = max([len(str(rev)) for rev in revs])
1262 revwidth = max([len(str(rev)) for rev in revs])
1263
1263
1264 def single(rev, seqno, fp):
1264 def single(rev, seqno, fp):
1265 ctx = repo.changectx(rev)
1265 ctx = repo.changectx(rev)
1266 node = ctx.node()
1266 node = ctx.node()
1267 parents = [p.node() for p in ctx.parents() if p]
1267 parents = [p.node() for p in ctx.parents() if p]
1268 branch = ctx.branch()
1268 branch = ctx.branch()
1269 if switch_parent:
1269 if switch_parent:
1270 parents.reverse()
1270 parents.reverse()
1271 prev = (parents and parents[0]) or nullid
1271 prev = (parents and parents[0]) or nullid
1272
1272
1273 if not fp:
1273 if not fp:
1274 fp = cmdutil.make_file(repo, template, node, total=total,
1274 fp = cmdutil.make_file(repo, template, node, total=total,
1275 seqno=seqno, revwidth=revwidth)
1275 seqno=seqno, revwidth=revwidth)
1276 if fp != sys.stdout and hasattr(fp, 'name'):
1276 if fp != sys.stdout and hasattr(fp, 'name'):
1277 repo.ui.note("%s\n" % fp.name)
1277 repo.ui.note("%s\n" % fp.name)
1278
1278
1279 fp.write("# HG changeset patch\n")
1279 fp.write("# HG changeset patch\n")
1280 fp.write("# User %s\n" % ctx.user())
1280 fp.write("# User %s\n" % ctx.user())
1281 fp.write("# Date %d %d\n" % ctx.date())
1281 fp.write("# Date %d %d\n" % ctx.date())
1282 if branch and (branch != 'default'):
1282 if branch and (branch != 'default'):
1283 fp.write("# Branch %s\n" % branch)
1283 fp.write("# Branch %s\n" % branch)
1284 fp.write("# Node ID %s\n" % hex(node))
1284 fp.write("# Node ID %s\n" % hex(node))
1285 fp.write("# Parent %s\n" % hex(prev))
1285 fp.write("# Parent %s\n" % hex(prev))
1286 if len(parents) > 1:
1286 if len(parents) > 1:
1287 fp.write("# Parent %s\n" % hex(parents[1]))
1287 fp.write("# Parent %s\n" % hex(parents[1]))
1288 fp.write(ctx.description().rstrip())
1288 fp.write(ctx.description().rstrip())
1289 fp.write("\n\n")
1289 fp.write("\n\n")
1290
1290
1291 diff(repo, prev, node, fp=fp, opts=opts)
1291 diff(repo, prev, node, fp=fp, opts=opts)
1292 if fp not in (sys.stdout, repo.ui):
1292 if fp not in (sys.stdout, repo.ui):
1293 fp.close()
1293 fp.close()
1294
1294
1295 for seqno, rev in enumerate(revs):
1295 for seqno, rev in enumerate(revs):
1296 single(rev, seqno+1, fp)
1296 single(rev, seqno+1, fp)
1297
1297
1298 def diffstat(patchlines):
1298 def diffstat(patchlines):
1299 if not util.find_exe('diffstat'):
1299 if not util.find_exe('diffstat'):
1300 return
1300 return
1301 fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
1301 fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
1302 try:
1302 try:
1303 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
1303 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
1304 try:
1304 try:
1305 for line in patchlines: print >> p.tochild, line
1305 for line in patchlines: print >> p.tochild, line
1306 p.tochild.close()
1306 p.tochild.close()
1307 if p.wait(): return
1307 if p.wait(): return
1308 fp = os.fdopen(fd, 'r')
1308 fp = os.fdopen(fd, 'r')
1309 stat = []
1309 stat = []
1310 for line in fp: stat.append(line.lstrip())
1310 for line in fp: stat.append(line.lstrip())
1311 last = stat.pop()
1311 last = stat.pop()
1312 stat.insert(0, last)
1312 stat.insert(0, last)
1313 stat = ''.join(stat)
1313 stat = ''.join(stat)
1314 if stat.startswith('0 files'): raise ValueError
1314 if stat.startswith('0 files'): raise ValueError
1315 return stat
1315 return stat
1316 except: raise
1316 except: raise
1317 finally:
1317 finally:
1318 try: os.unlink(name)
1318 try: os.unlink(name)
1319 except: pass
1319 except: pass
@@ -1,1614 +1,1618 b''
1 """
1 """
2 util.py - Mercurial utility functions and platform specfic implementations
2 util.py - Mercurial utility functions and platform specfic implementations
3
3
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
7
7
8 This software may be used and distributed according to the terms
8 This software may be used and distributed according to the terms
9 of the GNU General Public License, incorporated herein by reference.
9 of the GNU General Public License, incorporated herein by reference.
10
10
11 This contains helper routines that are independent of the SCM core and hide
11 This contains helper routines that are independent of the SCM core and hide
12 platform-specific details from the core.
12 platform-specific details from the core.
13 """
13 """
14
14
15 from i18n import _
15 from i18n import _
16 import cStringIO, errno, getpass, popen2, re, shutil, sys, tempfile
16 import cStringIO, errno, getpass, popen2, re, shutil, sys, tempfile
17 import os, threading, time, calendar, ConfigParser, locale, glob
17 import os, threading, time, calendar, ConfigParser, locale, glob
18
18
19 try:
19 try:
20 set = set
20 set = set
21 frozenset = frozenset
21 frozenset = frozenset
22 except NameError:
22 except NameError:
23 from sets import Set as set, ImmutableSet as frozenset
23 from sets import Set as set, ImmutableSet as frozenset
24
24
25 try:
25 try:
26 _encoding = os.environ.get("HGENCODING")
26 _encoding = os.environ.get("HGENCODING")
27 if sys.platform == 'darwin' and not _encoding:
27 if sys.platform == 'darwin' and not _encoding:
28 # On darwin, getpreferredencoding ignores the locale environment and
28 # On darwin, getpreferredencoding ignores the locale environment and
29 # always returns mac-roman. We override this if the environment is
29 # always returns mac-roman. We override this if the environment is
30 # not C (has been customized by the user).
30 # not C (has been customized by the user).
31 locale.setlocale(locale.LC_CTYPE, '')
31 locale.setlocale(locale.LC_CTYPE, '')
32 _encoding = locale.getlocale()[1]
32 _encoding = locale.getlocale()[1]
33 if not _encoding:
33 if not _encoding:
34 _encoding = locale.getpreferredencoding() or 'ascii'
34 _encoding = locale.getpreferredencoding() or 'ascii'
35 except locale.Error:
35 except locale.Error:
36 _encoding = 'ascii'
36 _encoding = 'ascii'
37 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
37 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
38 _fallbackencoding = 'ISO-8859-1'
38 _fallbackencoding = 'ISO-8859-1'
39
39
40 def tolocal(s):
40 def tolocal(s):
41 """
41 """
42 Convert a string from internal UTF-8 to local encoding
42 Convert a string from internal UTF-8 to local encoding
43
43
44 All internal strings should be UTF-8 but some repos before the
44 All internal strings should be UTF-8 but some repos before the
45 implementation of locale support may contain latin1 or possibly
45 implementation of locale support may contain latin1 or possibly
46 other character sets. We attempt to decode everything strictly
46 other character sets. We attempt to decode everything strictly
47 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
47 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
48 replace unknown characters.
48 replace unknown characters.
49 """
49 """
50 for e in ('UTF-8', _fallbackencoding):
50 for e in ('UTF-8', _fallbackencoding):
51 try:
51 try:
52 u = s.decode(e) # attempt strict decoding
52 u = s.decode(e) # attempt strict decoding
53 return u.encode(_encoding, "replace")
53 return u.encode(_encoding, "replace")
54 except LookupError, k:
54 except LookupError, k:
55 raise Abort(_("%s, please check your locale settings") % k)
55 raise Abort(_("%s, please check your locale settings") % k)
56 except UnicodeDecodeError:
56 except UnicodeDecodeError:
57 pass
57 pass
58 u = s.decode("utf-8", "replace") # last ditch
58 u = s.decode("utf-8", "replace") # last ditch
59 return u.encode(_encoding, "replace")
59 return u.encode(_encoding, "replace")
60
60
61 def fromlocal(s):
61 def fromlocal(s):
62 """
62 """
63 Convert a string from the local character encoding to UTF-8
63 Convert a string from the local character encoding to UTF-8
64
64
65 We attempt to decode strings using the encoding mode set by
65 We attempt to decode strings using the encoding mode set by
66 HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
66 HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
67 characters will cause an error message. Other modes include
67 characters will cause an error message. Other modes include
68 'replace', which replaces unknown characters with a special
68 'replace', which replaces unknown characters with a special
69 Unicode character, and 'ignore', which drops the character.
69 Unicode character, and 'ignore', which drops the character.
70 """
70 """
71 try:
71 try:
72 return s.decode(_encoding, _encodingmode).encode("utf-8")
72 return s.decode(_encoding, _encodingmode).encode("utf-8")
73 except UnicodeDecodeError, inst:
73 except UnicodeDecodeError, inst:
74 sub = s[max(0, inst.start-10):inst.start+10]
74 sub = s[max(0, inst.start-10):inst.start+10]
75 raise Abort("decoding near '%s': %s!" % (sub, inst))
75 raise Abort("decoding near '%s': %s!" % (sub, inst))
76 except LookupError, k:
76 except LookupError, k:
77 raise Abort(_("%s, please check your locale settings") % k)
77 raise Abort(_("%s, please check your locale settings") % k)
78
78
79 def locallen(s):
79 def locallen(s):
80 """Find the length in characters of a local string"""
80 """Find the length in characters of a local string"""
81 return len(s.decode(_encoding, "replace"))
81 return len(s.decode(_encoding, "replace"))
82
82
83 def localsub(s, a, b=None):
83 def localsub(s, a, b=None):
84 try:
84 try:
85 u = s.decode(_encoding, _encodingmode)
85 u = s.decode(_encoding, _encodingmode)
86 if b is not None:
86 if b is not None:
87 u = u[a:b]
87 u = u[a:b]
88 else:
88 else:
89 u = u[:a]
89 u = u[:a]
90 return u.encode(_encoding, _encodingmode)
90 return u.encode(_encoding, _encodingmode)
91 except UnicodeDecodeError, inst:
91 except UnicodeDecodeError, inst:
92 sub = s[max(0, inst.start-10), inst.start+10]
92 sub = s[max(0, inst.start-10), inst.start+10]
93 raise Abort(_("decoding near '%s': %s!") % (sub, inst))
93 raise Abort(_("decoding near '%s': %s!") % (sub, inst))
94
94
95 # used by parsedate
95 # used by parsedate
96 defaultdateformats = (
96 defaultdateformats = (
97 '%Y-%m-%d %H:%M:%S',
97 '%Y-%m-%d %H:%M:%S',
98 '%Y-%m-%d %I:%M:%S%p',
98 '%Y-%m-%d %I:%M:%S%p',
99 '%Y-%m-%d %H:%M',
99 '%Y-%m-%d %H:%M',
100 '%Y-%m-%d %I:%M%p',
100 '%Y-%m-%d %I:%M%p',
101 '%Y-%m-%d',
101 '%Y-%m-%d',
102 '%m-%d',
102 '%m-%d',
103 '%m/%d',
103 '%m/%d',
104 '%m/%d/%y',
104 '%m/%d/%y',
105 '%m/%d/%Y',
105 '%m/%d/%Y',
106 '%a %b %d %H:%M:%S %Y',
106 '%a %b %d %H:%M:%S %Y',
107 '%a %b %d %I:%M:%S%p %Y',
107 '%a %b %d %I:%M:%S%p %Y',
108 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
108 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
109 '%b %d %H:%M:%S %Y',
109 '%b %d %H:%M:%S %Y',
110 '%b %d %I:%M:%S%p %Y',
110 '%b %d %I:%M:%S%p %Y',
111 '%b %d %H:%M:%S',
111 '%b %d %H:%M:%S',
112 '%b %d %I:%M:%S%p',
112 '%b %d %I:%M:%S%p',
113 '%b %d %H:%M',
113 '%b %d %H:%M',
114 '%b %d %I:%M%p',
114 '%b %d %I:%M%p',
115 '%b %d %Y',
115 '%b %d %Y',
116 '%b %d',
116 '%b %d',
117 '%H:%M:%S',
117 '%H:%M:%S',
118 '%I:%M:%SP',
118 '%I:%M:%SP',
119 '%H:%M',
119 '%H:%M',
120 '%I:%M%p',
120 '%I:%M%p',
121 )
121 )
122
122
123 extendeddateformats = defaultdateformats + (
123 extendeddateformats = defaultdateformats + (
124 "%Y",
124 "%Y",
125 "%Y-%m",
125 "%Y-%m",
126 "%b",
126 "%b",
127 "%b %Y",
127 "%b %Y",
128 )
128 )
129
129
130 class SignalInterrupt(Exception):
130 class SignalInterrupt(Exception):
131 """Exception raised on SIGTERM and SIGHUP."""
131 """Exception raised on SIGTERM and SIGHUP."""
132
132
133 # differences from SafeConfigParser:
133 # differences from SafeConfigParser:
134 # - case-sensitive keys
134 # - case-sensitive keys
135 # - allows values that are not strings (this means that you may not
135 # - allows values that are not strings (this means that you may not
136 # be able to save the configuration to a file)
136 # be able to save the configuration to a file)
137 class configparser(ConfigParser.SafeConfigParser):
137 class configparser(ConfigParser.SafeConfigParser):
138 def optionxform(self, optionstr):
138 def optionxform(self, optionstr):
139 return optionstr
139 return optionstr
140
140
141 def set(self, section, option, value):
141 def set(self, section, option, value):
142 return ConfigParser.ConfigParser.set(self, section, option, value)
142 return ConfigParser.ConfigParser.set(self, section, option, value)
143
143
144 def _interpolate(self, section, option, rawval, vars):
144 def _interpolate(self, section, option, rawval, vars):
145 if not isinstance(rawval, basestring):
145 if not isinstance(rawval, basestring):
146 return rawval
146 return rawval
147 return ConfigParser.SafeConfigParser._interpolate(self, section,
147 return ConfigParser.SafeConfigParser._interpolate(self, section,
148 option, rawval, vars)
148 option, rawval, vars)
149
149
150 def cachefunc(func):
150 def cachefunc(func):
151 '''cache the result of function calls'''
151 '''cache the result of function calls'''
152 # XXX doesn't handle keywords args
152 # XXX doesn't handle keywords args
153 cache = {}
153 cache = {}
154 if func.func_code.co_argcount == 1:
154 if func.func_code.co_argcount == 1:
155 # we gain a small amount of time because
155 # we gain a small amount of time because
156 # we don't need to pack/unpack the list
156 # we don't need to pack/unpack the list
157 def f(arg):
157 def f(arg):
158 if arg not in cache:
158 if arg not in cache:
159 cache[arg] = func(arg)
159 cache[arg] = func(arg)
160 return cache[arg]
160 return cache[arg]
161 else:
161 else:
162 def f(*args):
162 def f(*args):
163 if args not in cache:
163 if args not in cache:
164 cache[args] = func(*args)
164 cache[args] = func(*args)
165 return cache[args]
165 return cache[args]
166
166
167 return f
167 return f
168
168
169 def pipefilter(s, cmd):
169 def pipefilter(s, cmd):
170 '''filter string S through command CMD, returning its output'''
170 '''filter string S through command CMD, returning its output'''
171 (pin, pout) = os.popen2(cmd, 'b')
171 (pin, pout) = os.popen2(cmd, 'b')
172 def writer():
172 def writer():
173 try:
173 try:
174 pin.write(s)
174 pin.write(s)
175 pin.close()
175 pin.close()
176 except IOError, inst:
176 except IOError, inst:
177 if inst.errno != errno.EPIPE:
177 if inst.errno != errno.EPIPE:
178 raise
178 raise
179
179
180 # we should use select instead on UNIX, but this will work on most
180 # we should use select instead on UNIX, but this will work on most
181 # systems, including Windows
181 # systems, including Windows
182 w = threading.Thread(target=writer)
182 w = threading.Thread(target=writer)
183 w.start()
183 w.start()
184 f = pout.read()
184 f = pout.read()
185 pout.close()
185 pout.close()
186 w.join()
186 w.join()
187 return f
187 return f
188
188
189 def tempfilter(s, cmd):
189 def tempfilter(s, cmd):
190 '''filter string S through a pair of temporary files with CMD.
190 '''filter string S through a pair of temporary files with CMD.
191 CMD is used as a template to create the real command to be run,
191 CMD is used as a template to create the real command to be run,
192 with the strings INFILE and OUTFILE replaced by the real names of
192 with the strings INFILE and OUTFILE replaced by the real names of
193 the temporary files generated.'''
193 the temporary files generated.'''
194 inname, outname = None, None
194 inname, outname = None, None
195 try:
195 try:
196 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
196 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
197 fp = os.fdopen(infd, 'wb')
197 fp = os.fdopen(infd, 'wb')
198 fp.write(s)
198 fp.write(s)
199 fp.close()
199 fp.close()
200 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
200 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
201 os.close(outfd)
201 os.close(outfd)
202 cmd = cmd.replace('INFILE', inname)
202 cmd = cmd.replace('INFILE', inname)
203 cmd = cmd.replace('OUTFILE', outname)
203 cmd = cmd.replace('OUTFILE', outname)
204 code = os.system(cmd)
204 code = os.system(cmd)
205 if sys.platform == 'OpenVMS' and code & 1:
205 if sys.platform == 'OpenVMS' and code & 1:
206 code = 0
206 code = 0
207 if code: raise Abort(_("command '%s' failed: %s") %
207 if code: raise Abort(_("command '%s' failed: %s") %
208 (cmd, explain_exit(code)))
208 (cmd, explain_exit(code)))
209 return open(outname, 'rb').read()
209 return open(outname, 'rb').read()
210 finally:
210 finally:
211 try:
211 try:
212 if inname: os.unlink(inname)
212 if inname: os.unlink(inname)
213 except: pass
213 except: pass
214 try:
214 try:
215 if outname: os.unlink(outname)
215 if outname: os.unlink(outname)
216 except: pass
216 except: pass
217
217
218 filtertable = {
218 filtertable = {
219 'tempfile:': tempfilter,
219 'tempfile:': tempfilter,
220 'pipe:': pipefilter,
220 'pipe:': pipefilter,
221 }
221 }
222
222
223 def filter(s, cmd):
223 def filter(s, cmd):
224 "filter a string through a command that transforms its input to its output"
224 "filter a string through a command that transforms its input to its output"
225 for name, fn in filtertable.iteritems():
225 for name, fn in filtertable.iteritems():
226 if cmd.startswith(name):
226 if cmd.startswith(name):
227 return fn(s, cmd[len(name):].lstrip())
227 return fn(s, cmd[len(name):].lstrip())
228 return pipefilter(s, cmd)
228 return pipefilter(s, cmd)
229
229
230 def binary(s):
230 def binary(s):
231 """return true if a string is binary data using diff's heuristic"""
231 """return true if a string is binary data using diff's heuristic"""
232 if s and '\0' in s[:4096]:
232 if s and '\0' in s[:4096]:
233 return True
233 return True
234 return False
234 return False
235
235
236 def unique(g):
236 def unique(g):
237 """return the uniq elements of iterable g"""
237 """return the uniq elements of iterable g"""
238 seen = {}
238 seen = {}
239 l = []
239 l = []
240 for f in g:
240 for f in g:
241 if f not in seen:
241 if f not in seen:
242 seen[f] = 1
242 seen[f] = 1
243 l.append(f)
243 l.append(f)
244 return l
244 return l
245
245
246 class Abort(Exception):
246 class Abort(Exception):
247 """Raised if a command needs to print an error and exit."""
247 """Raised if a command needs to print an error and exit."""
248
248
249 class UnexpectedOutput(Abort):
249 class UnexpectedOutput(Abort):
250 """Raised to print an error with part of output and exit."""
250 """Raised to print an error with part of output and exit."""
251
251
252 def always(fn): return True
252 def always(fn): return True
253 def never(fn): return False
253 def never(fn): return False
254
254
255 def expand_glob(pats):
255 def expand_glob(pats):
256 '''On Windows, expand the implicit globs in a list of patterns'''
256 '''On Windows, expand the implicit globs in a list of patterns'''
257 if os.name != 'nt':
257 if os.name != 'nt':
258 return list(pats)
258 return list(pats)
259 ret = []
259 ret = []
260 for p in pats:
260 for p in pats:
261 kind, name = patkind(p, None)
261 kind, name = patkind(p, None)
262 if kind is None:
262 if kind is None:
263 globbed = glob.glob(name)
263 globbed = glob.glob(name)
264 if globbed:
264 if globbed:
265 ret.extend(globbed)
265 ret.extend(globbed)
266 continue
266 continue
267 # if we couldn't expand the glob, just keep it around
267 # if we couldn't expand the glob, just keep it around
268 ret.append(p)
268 ret.append(p)
269 return ret
269 return ret
270
270
271 def patkind(name, dflt_pat='glob'):
271 def patkind(name, dflt_pat='glob'):
272 """Split a string into an optional pattern kind prefix and the
272 """Split a string into an optional pattern kind prefix and the
273 actual pattern."""
273 actual pattern."""
274 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
274 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
275 if name.startswith(prefix + ':'): return name.split(':', 1)
275 if name.startswith(prefix + ':'): return name.split(':', 1)
276 return dflt_pat, name
276 return dflt_pat, name
277
277
278 def globre(pat, head='^', tail='$'):
278 def globre(pat, head='^', tail='$'):
279 "convert a glob pattern into a regexp"
279 "convert a glob pattern into a regexp"
280 i, n = 0, len(pat)
280 i, n = 0, len(pat)
281 res = ''
281 res = ''
282 group = False
282 group = False
283 def peek(): return i < n and pat[i]
283 def peek(): return i < n and pat[i]
284 while i < n:
284 while i < n:
285 c = pat[i]
285 c = pat[i]
286 i = i+1
286 i = i+1
287 if c == '*':
287 if c == '*':
288 if peek() == '*':
288 if peek() == '*':
289 i += 1
289 i += 1
290 res += '.*'
290 res += '.*'
291 else:
291 else:
292 res += '[^/]*'
292 res += '[^/]*'
293 elif c == '?':
293 elif c == '?':
294 res += '.'
294 res += '.'
295 elif c == '[':
295 elif c == '[':
296 j = i
296 j = i
297 if j < n and pat[j] in '!]':
297 if j < n and pat[j] in '!]':
298 j += 1
298 j += 1
299 while j < n and pat[j] != ']':
299 while j < n and pat[j] != ']':
300 j += 1
300 j += 1
301 if j >= n:
301 if j >= n:
302 res += '\\['
302 res += '\\['
303 else:
303 else:
304 stuff = pat[i:j].replace('\\','\\\\')
304 stuff = pat[i:j].replace('\\','\\\\')
305 i = j + 1
305 i = j + 1
306 if stuff[0] == '!':
306 if stuff[0] == '!':
307 stuff = '^' + stuff[1:]
307 stuff = '^' + stuff[1:]
308 elif stuff[0] == '^':
308 elif stuff[0] == '^':
309 stuff = '\\' + stuff
309 stuff = '\\' + stuff
310 res = '%s[%s]' % (res, stuff)
310 res = '%s[%s]' % (res, stuff)
311 elif c == '{':
311 elif c == '{':
312 group = True
312 group = True
313 res += '(?:'
313 res += '(?:'
314 elif c == '}' and group:
314 elif c == '}' and group:
315 res += ')'
315 res += ')'
316 group = False
316 group = False
317 elif c == ',' and group:
317 elif c == ',' and group:
318 res += '|'
318 res += '|'
319 elif c == '\\':
319 elif c == '\\':
320 p = peek()
320 p = peek()
321 if p:
321 if p:
322 i += 1
322 i += 1
323 res += re.escape(p)
323 res += re.escape(p)
324 else:
324 else:
325 res += re.escape(c)
325 res += re.escape(c)
326 else:
326 else:
327 res += re.escape(c)
327 res += re.escape(c)
328 return head + res + tail
328 return head + res + tail
329
329
330 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
330 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
331
331
332 def pathto(root, n1, n2):
332 def pathto(root, n1, n2):
333 '''return the relative path from one place to another.
333 '''return the relative path from one place to another.
334 root should use os.sep to separate directories
334 root should use os.sep to separate directories
335 n1 should use os.sep to separate directories
335 n1 should use os.sep to separate directories
336 n2 should use "/" to separate directories
336 n2 should use "/" to separate directories
337 returns an os.sep-separated path.
337 returns an os.sep-separated path.
338
338
339 If n1 is a relative path, it's assumed it's
339 If n1 is a relative path, it's assumed it's
340 relative to root.
340 relative to root.
341 n2 should always be relative to root.
341 n2 should always be relative to root.
342 '''
342 '''
343 if not n1: return localpath(n2)
343 if not n1: return localpath(n2)
344 if os.path.isabs(n1):
344 if os.path.isabs(n1):
345 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
345 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
346 return os.path.join(root, localpath(n2))
346 return os.path.join(root, localpath(n2))
347 n2 = '/'.join((pconvert(root), n2))
347 n2 = '/'.join((pconvert(root), n2))
348 a, b = n1.split(os.sep), n2.split('/')
348 a, b = n1.split(os.sep), n2.split('/')
349 a.reverse()
349 a.reverse()
350 b.reverse()
350 b.reverse()
351 while a and b and a[-1] == b[-1]:
351 while a and b and a[-1] == b[-1]:
352 a.pop()
352 a.pop()
353 b.pop()
353 b.pop()
354 b.reverse()
354 b.reverse()
355 return os.sep.join((['..'] * len(a)) + b)
355 return os.sep.join((['..'] * len(a)) + b)
356
356
357 def canonpath(root, cwd, myname):
357 def canonpath(root, cwd, myname):
358 """return the canonical path of myname, given cwd and root"""
358 """return the canonical path of myname, given cwd and root"""
359 if root == os.sep:
359 if root == os.sep:
360 rootsep = os.sep
360 rootsep = os.sep
361 elif root.endswith(os.sep):
361 elif root.endswith(os.sep):
362 rootsep = root
362 rootsep = root
363 else:
363 else:
364 rootsep = root + os.sep
364 rootsep = root + os.sep
365 name = myname
365 name = myname
366 if not os.path.isabs(name):
366 if not os.path.isabs(name):
367 name = os.path.join(root, cwd, name)
367 name = os.path.join(root, cwd, name)
368 name = os.path.normpath(name)
368 name = os.path.normpath(name)
369 if name != rootsep and name.startswith(rootsep):
369 if name != rootsep and name.startswith(rootsep):
370 name = name[len(rootsep):]
370 name = name[len(rootsep):]
371 audit_path(name)
371 audit_path(name)
372 return pconvert(name)
372 return pconvert(name)
373 elif name == root:
373 elif name == root:
374 return ''
374 return ''
375 else:
375 else:
376 # Determine whether `name' is in the hierarchy at or beneath `root',
376 # Determine whether `name' is in the hierarchy at or beneath `root',
377 # by iterating name=dirname(name) until that causes no change (can't
377 # by iterating name=dirname(name) until that causes no change (can't
378 # check name == '/', because that doesn't work on windows). For each
378 # check name == '/', because that doesn't work on windows). For each
379 # `name', compare dev/inode numbers. If they match, the list `rel'
379 # `name', compare dev/inode numbers. If they match, the list `rel'
380 # holds the reversed list of components making up the relative file
380 # holds the reversed list of components making up the relative file
381 # name we want.
381 # name we want.
382 root_st = os.stat(root)
382 root_st = os.stat(root)
383 rel = []
383 rel = []
384 while True:
384 while True:
385 try:
385 try:
386 name_st = os.stat(name)
386 name_st = os.stat(name)
387 except OSError:
387 except OSError:
388 break
388 break
389 if samestat(name_st, root_st):
389 if samestat(name_st, root_st):
390 if not rel:
390 if not rel:
391 # name was actually the same as root (maybe a symlink)
391 # name was actually the same as root (maybe a symlink)
392 return ''
392 return ''
393 rel.reverse()
393 rel.reverse()
394 name = os.path.join(*rel)
394 name = os.path.join(*rel)
395 audit_path(name)
395 audit_path(name)
396 return pconvert(name)
396 return pconvert(name)
397 dirname, basename = os.path.split(name)
397 dirname, basename = os.path.split(name)
398 rel.append(basename)
398 rel.append(basename)
399 if dirname == name:
399 if dirname == name:
400 break
400 break
401 name = dirname
401 name = dirname
402
402
403 raise Abort('%s not under root' % myname)
403 raise Abort('%s not under root' % myname)
404
404
405 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None):
405 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None):
406 return _matcher(canonroot, cwd, names, inc, exc, 'glob', src)
406 return _matcher(canonroot, cwd, names, inc, exc, 'glob', src)
407
407
408 def cmdmatcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None,
408 def cmdmatcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None,
409 globbed=False, default=None):
409 globbed=False, default=None):
410 default = default or 'relpath'
410 default = default or 'relpath'
411 if default == 'relpath' and not globbed:
411 if default == 'relpath' and not globbed:
412 names = expand_glob(names)
412 names = expand_glob(names)
413 return _matcher(canonroot, cwd, names, inc, exc, default, src)
413 return _matcher(canonroot, cwd, names, inc, exc, default, src)
414
414
415 def _matcher(canonroot, cwd, names, inc, exc, dflt_pat, src):
415 def _matcher(canonroot, cwd, names, inc, exc, dflt_pat, src):
416 """build a function to match a set of file patterns
416 """build a function to match a set of file patterns
417
417
418 arguments:
418 arguments:
419 canonroot - the canonical root of the tree you're matching against
419 canonroot - the canonical root of the tree you're matching against
420 cwd - the current working directory, if relevant
420 cwd - the current working directory, if relevant
421 names - patterns to find
421 names - patterns to find
422 inc - patterns to include
422 inc - patterns to include
423 exc - patterns to exclude
423 exc - patterns to exclude
424 dflt_pat - if a pattern in names has no explicit type, assume this one
424 dflt_pat - if a pattern in names has no explicit type, assume this one
425 src - where these patterns came from (e.g. .hgignore)
425 src - where these patterns came from (e.g. .hgignore)
426
426
427 a pattern is one of:
427 a pattern is one of:
428 'glob:<glob>' - a glob relative to cwd
428 'glob:<glob>' - a glob relative to cwd
429 're:<regexp>' - a regular expression
429 're:<regexp>' - a regular expression
430 'path:<path>' - a path relative to canonroot
430 'path:<path>' - a path relative to canonroot
431 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
431 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
432 'relpath:<path>' - a path relative to cwd
432 'relpath:<path>' - a path relative to cwd
433 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
433 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
434 '<something>' - one of the cases above, selected by the dflt_pat argument
434 '<something>' - one of the cases above, selected by the dflt_pat argument
435
435
436 returns:
436 returns:
437 a 3-tuple containing
437 a 3-tuple containing
438 - list of roots (places where one should start a recursive walk of the fs);
438 - list of roots (places where one should start a recursive walk of the fs);
439 this often matches the explicit non-pattern names passed in, but also
439 this often matches the explicit non-pattern names passed in, but also
440 includes the initial part of glob: patterns that has no glob characters
440 includes the initial part of glob: patterns that has no glob characters
441 - a bool match(filename) function
441 - a bool match(filename) function
442 - a bool indicating if any patterns were passed in
442 - a bool indicating if any patterns were passed in
443 """
443 """
444
444
445 # a common case: no patterns at all
445 # a common case: no patterns at all
446 if not names and not inc and not exc:
446 if not names and not inc and not exc:
447 return [], always, False
447 return [], always, False
448
448
449 def contains_glob(name):
449 def contains_glob(name):
450 for c in name:
450 for c in name:
451 if c in _globchars: return True
451 if c in _globchars: return True
452 return False
452 return False
453
453
454 def regex(kind, name, tail):
454 def regex(kind, name, tail):
455 '''convert a pattern into a regular expression'''
455 '''convert a pattern into a regular expression'''
456 if not name:
456 if not name:
457 return ''
457 return ''
458 if kind == 're':
458 if kind == 're':
459 return name
459 return name
460 elif kind == 'path':
460 elif kind == 'path':
461 return '^' + re.escape(name) + '(?:/|$)'
461 return '^' + re.escape(name) + '(?:/|$)'
462 elif kind == 'relglob':
462 elif kind == 'relglob':
463 return globre(name, '(?:|.*/)', tail)
463 return globre(name, '(?:|.*/)', tail)
464 elif kind == 'relpath':
464 elif kind == 'relpath':
465 return re.escape(name) + '(?:/|$)'
465 return re.escape(name) + '(?:/|$)'
466 elif kind == 'relre':
466 elif kind == 'relre':
467 if name.startswith('^'):
467 if name.startswith('^'):
468 return name
468 return name
469 return '.*' + name
469 return '.*' + name
470 return globre(name, '', tail)
470 return globre(name, '', tail)
471
471
472 def matchfn(pats, tail):
472 def matchfn(pats, tail):
473 """build a matching function from a set of patterns"""
473 """build a matching function from a set of patterns"""
474 if not pats:
474 if not pats:
475 return
475 return
476 try:
476 try:
477 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
477 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
478 return re.compile(pat).match
478 return re.compile(pat).match
479 except re.error:
479 except re.error:
480 for k, p in pats:
480 for k, p in pats:
481 try:
481 try:
482 re.compile('(?:%s)' % regex(k, p, tail))
482 re.compile('(?:%s)' % regex(k, p, tail))
483 except re.error:
483 except re.error:
484 if src:
484 if src:
485 raise Abort("%s: invalid pattern (%s): %s" %
485 raise Abort("%s: invalid pattern (%s): %s" %
486 (src, k, p))
486 (src, k, p))
487 else:
487 else:
488 raise Abort("invalid pattern (%s): %s" % (k, p))
488 raise Abort("invalid pattern (%s): %s" % (k, p))
489 raise Abort("invalid pattern")
489 raise Abort("invalid pattern")
490
490
491 def globprefix(pat):
491 def globprefix(pat):
492 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
492 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
493 root = []
493 root = []
494 for p in pat.split('/'):
494 for p in pat.split('/'):
495 if contains_glob(p): break
495 if contains_glob(p): break
496 root.append(p)
496 root.append(p)
497 return '/'.join(root) or '.'
497 return '/'.join(root) or '.'
498
498
499 def normalizepats(names, default):
499 def normalizepats(names, default):
500 pats = []
500 pats = []
501 roots = []
501 roots = []
502 anypats = False
502 anypats = False
503 for kind, name in [patkind(p, default) for p in names]:
503 for kind, name in [patkind(p, default) for p in names]:
504 if kind in ('glob', 'relpath'):
504 if kind in ('glob', 'relpath'):
505 name = canonpath(canonroot, cwd, name)
505 name = canonpath(canonroot, cwd, name)
506 elif kind in ('relglob', 'path'):
506 elif kind in ('relglob', 'path'):
507 name = normpath(name)
507 name = normpath(name)
508
508
509 pats.append((kind, name))
509 pats.append((kind, name))
510
510
511 if kind in ('glob', 're', 'relglob', 'relre'):
511 if kind in ('glob', 're', 'relglob', 'relre'):
512 anypats = True
512 anypats = True
513
513
514 if kind == 'glob':
514 if kind == 'glob':
515 root = globprefix(name)
515 root = globprefix(name)
516 roots.append(root)
516 roots.append(root)
517 elif kind in ('relpath', 'path'):
517 elif kind in ('relpath', 'path'):
518 roots.append(name or '.')
518 roots.append(name or '.')
519 elif kind == 'relglob':
519 elif kind == 'relglob':
520 roots.append('.')
520 roots.append('.')
521 return roots, pats, anypats
521 return roots, pats, anypats
522
522
523 roots, pats, anypats = normalizepats(names, dflt_pat)
523 roots, pats, anypats = normalizepats(names, dflt_pat)
524
524
525 patmatch = matchfn(pats, '$') or always
525 patmatch = matchfn(pats, '$') or always
526 incmatch = always
526 incmatch = always
527 if inc:
527 if inc:
528 dummy, inckinds, dummy = normalizepats(inc, 'glob')
528 dummy, inckinds, dummy = normalizepats(inc, 'glob')
529 incmatch = matchfn(inckinds, '(?:/|$)')
529 incmatch = matchfn(inckinds, '(?:/|$)')
530 excmatch = lambda fn: False
530 excmatch = lambda fn: False
531 if exc:
531 if exc:
532 dummy, exckinds, dummy = normalizepats(exc, 'glob')
532 dummy, exckinds, dummy = normalizepats(exc, 'glob')
533 excmatch = matchfn(exckinds, '(?:/|$)')
533 excmatch = matchfn(exckinds, '(?:/|$)')
534
534
535 if not names and inc and not exc:
535 if not names and inc and not exc:
536 # common case: hgignore patterns
536 # common case: hgignore patterns
537 match = incmatch
537 match = incmatch
538 else:
538 else:
539 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
539 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
540
540
541 return (roots, match, (inc or exc or anypats) and True)
541 return (roots, match, (inc or exc or anypats) and True)
542
542
543 _hgexecutable = 'hg'
543 _hgexecutable = 'hg'
544
544
545 def set_hgexecutable(path):
545 def set_hgexecutable(path):
546 """remember location of the 'hg' executable if easily possible
546 """remember location of the 'hg' executable if easily possible
547
547
548 path might be None or empty if hg was loaded as a module,
548 path might be None or empty if hg was loaded as a module,
549 fall back to 'hg' in this case.
549 fall back to 'hg' in this case.
550 """
550 """
551 global _hgexecutable
551 global _hgexecutable
552 if path:
552 if path:
553 _hgexecutable = os.path.abspath(path)
553 _hgexecutable = os.path.abspath(path)
554
554
555 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
555 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
556 '''enhanced shell command execution.
556 '''enhanced shell command execution.
557 run with environment maybe modified, maybe in different dir.
557 run with environment maybe modified, maybe in different dir.
558
558
559 if command fails and onerr is None, return status. if ui object,
559 if command fails and onerr is None, return status. if ui object,
560 print error message and return status, else raise onerr object as
560 print error message and return status, else raise onerr object as
561 exception.'''
561 exception.'''
562 def py2shell(val):
562 def py2shell(val):
563 'convert python object into string that is useful to shell'
563 'convert python object into string that is useful to shell'
564 if val in (None, False):
564 if val in (None, False):
565 return '0'
565 return '0'
566 if val == True:
566 if val == True:
567 return '1'
567 return '1'
568 return str(val)
568 return str(val)
569 oldenv = {}
569 oldenv = {}
570 for k in environ:
570 for k in environ:
571 oldenv[k] = os.environ.get(k)
571 oldenv[k] = os.environ.get(k)
572 if cwd is not None:
572 if cwd is not None:
573 oldcwd = os.getcwd()
573 oldcwd = os.getcwd()
574 origcmd = cmd
574 origcmd = cmd
575 if os.name == 'nt':
575 if os.name == 'nt':
576 cmd = '"%s"' % cmd
576 cmd = '"%s"' % cmd
577 try:
577 try:
578 for k, v in environ.iteritems():
578 for k, v in environ.iteritems():
579 os.environ[k] = py2shell(v)
579 os.environ[k] = py2shell(v)
580 if 'HG' not in os.environ:
580 if 'HG' not in os.environ:
581 os.environ['HG'] = _hgexecutable
581 os.environ['HG'] = _hgexecutable
582 if cwd is not None and oldcwd != cwd:
582 if cwd is not None and oldcwd != cwd:
583 os.chdir(cwd)
583 os.chdir(cwd)
584 rc = os.system(cmd)
584 rc = os.system(cmd)
585 if sys.platform == 'OpenVMS' and rc & 1:
585 if sys.platform == 'OpenVMS' and rc & 1:
586 rc = 0
586 rc = 0
587 if rc and onerr:
587 if rc and onerr:
588 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
588 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
589 explain_exit(rc)[0])
589 explain_exit(rc)[0])
590 if errprefix:
590 if errprefix:
591 errmsg = '%s: %s' % (errprefix, errmsg)
591 errmsg = '%s: %s' % (errprefix, errmsg)
592 try:
592 try:
593 onerr.warn(errmsg + '\n')
593 onerr.warn(errmsg + '\n')
594 except AttributeError:
594 except AttributeError:
595 raise onerr(errmsg)
595 raise onerr(errmsg)
596 return rc
596 return rc
597 finally:
597 finally:
598 for k, v in oldenv.iteritems():
598 for k, v in oldenv.iteritems():
599 if v is None:
599 if v is None:
600 del os.environ[k]
600 del os.environ[k]
601 else:
601 else:
602 os.environ[k] = v
602 os.environ[k] = v
603 if cwd is not None and oldcwd != cwd:
603 if cwd is not None and oldcwd != cwd:
604 os.chdir(oldcwd)
604 os.chdir(oldcwd)
605
605
606 # os.path.lexists is not available on python2.3
606 # os.path.lexists is not available on python2.3
607 def lexists(filename):
607 def lexists(filename):
608 "test whether a file with this name exists. does not follow symlinks"
608 "test whether a file with this name exists. does not follow symlinks"
609 try:
609 try:
610 os.lstat(filename)
610 os.lstat(filename)
611 except:
611 except:
612 return False
612 return False
613 return True
613 return True
614
614
615 def rename(src, dst):
615 def rename(src, dst):
616 """forcibly rename a file"""
616 """forcibly rename a file"""
617 try:
617 try:
618 os.rename(src, dst)
618 os.rename(src, dst)
619 except OSError, err:
619 except OSError, err: # FIXME: check err (EEXIST ?)
620 # on windows, rename to existing file is not allowed, so we
620 # on windows, rename to existing file is not allowed, so we
621 # must delete destination first. but if file is open, unlink
621 # must delete destination first. but if file is open, unlink
622 # schedules it for delete but does not delete it. rename
622 # schedules it for delete but does not delete it. rename
623 # happens immediately even for open files, so we create
623 # happens immediately even for open files, so we create
624 # temporary file, delete it, rename destination to that name,
624 # temporary file, delete it, rename destination to that name,
625 # then delete that. then rename is safe to do.
625 # then delete that. then rename is safe to do.
626 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
626 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
627 os.close(fd)
627 os.close(fd)
628 os.unlink(temp)
628 os.unlink(temp)
629 os.rename(dst, temp)
629 os.rename(dst, temp)
630 os.unlink(temp)
630 os.unlink(temp)
631 os.rename(src, dst)
631 os.rename(src, dst)
632
632
633 def unlink(f):
633 def unlink(f):
634 """unlink and remove the directory if it is empty"""
634 """unlink and remove the directory if it is empty"""
635 os.unlink(f)
635 os.unlink(f)
636 # try removing directories that might now be empty
636 # try removing directories that might now be empty
637 try:
637 try:
638 os.removedirs(os.path.dirname(f))
638 os.removedirs(os.path.dirname(f))
639 except OSError:
639 except OSError:
640 pass
640 pass
641
641
642 def copyfile(src, dest):
642 def copyfile(src, dest):
643 "copy a file, preserving mode"
643 "copy a file, preserving mode"
644 if os.path.islink(src):
644 if os.path.islink(src):
645 try:
645 try:
646 os.unlink(dest)
646 os.unlink(dest)
647 except:
647 except:
648 pass
648 pass
649 os.symlink(os.readlink(src), dest)
649 os.symlink(os.readlink(src), dest)
650 else:
650 else:
651 try:
651 try:
652 shutil.copyfile(src, dest)
652 shutil.copyfile(src, dest)
653 shutil.copymode(src, dest)
653 shutil.copymode(src, dest)
654 except shutil.Error, inst:
654 except shutil.Error, inst:
655 raise Abort(str(inst))
655 raise Abort(str(inst))
656
656
657 def copyfiles(src, dst, hardlink=None):
657 def copyfiles(src, dst, hardlink=None):
658 """Copy a directory tree using hardlinks if possible"""
658 """Copy a directory tree using hardlinks if possible"""
659
659
660 if hardlink is None:
660 if hardlink is None:
661 hardlink = (os.stat(src).st_dev ==
661 hardlink = (os.stat(src).st_dev ==
662 os.stat(os.path.dirname(dst)).st_dev)
662 os.stat(os.path.dirname(dst)).st_dev)
663
663
664 if os.path.isdir(src):
664 if os.path.isdir(src):
665 os.mkdir(dst)
665 os.mkdir(dst)
666 for name in os.listdir(src):
666 for name in os.listdir(src):
667 srcname = os.path.join(src, name)
667 srcname = os.path.join(src, name)
668 dstname = os.path.join(dst, name)
668 dstname = os.path.join(dst, name)
669 copyfiles(srcname, dstname, hardlink)
669 copyfiles(srcname, dstname, hardlink)
670 else:
670 else:
671 if hardlink:
671 if hardlink:
672 try:
672 try:
673 os_link(src, dst)
673 os_link(src, dst)
674 except (IOError, OSError):
674 except (IOError, OSError):
675 hardlink = False
675 hardlink = False
676 shutil.copy(src, dst)
676 shutil.copy(src, dst)
677 else:
677 else:
678 shutil.copy(src, dst)
678 shutil.copy(src, dst)
679
679
680 def audit_path(path):
680 def audit_path(path):
681 """Abort if path contains dangerous components"""
681 """Abort if path contains dangerous components"""
682 parts = os.path.normcase(path).split(os.sep)
682 parts = os.path.normcase(path).split(os.sep)
683 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
683 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
684 or os.pardir in parts):
684 or os.pardir in parts):
685 raise Abort(_("path contains illegal component: %s") % path)
685 raise Abort(_("path contains illegal component: %s") % path)
686
686
687 def _makelock_file(info, pathname):
687 def _makelock_file(info, pathname):
688 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
688 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
689 os.write(ld, info)
689 os.write(ld, info)
690 os.close(ld)
690 os.close(ld)
691
691
692 def _readlock_file(pathname):
692 def _readlock_file(pathname):
693 return posixfile(pathname).read()
693 return posixfile(pathname).read()
694
694
695 def nlinks(pathname):
695 def nlinks(pathname):
696 """Return number of hardlinks for the given file."""
696 """Return number of hardlinks for the given file."""
697 return os.lstat(pathname).st_nlink
697 return os.lstat(pathname).st_nlink
698
698
699 if hasattr(os, 'link'):
699 if hasattr(os, 'link'):
700 os_link = os.link
700 os_link = os.link
701 else:
701 else:
702 def os_link(src, dst):
702 def os_link(src, dst):
703 raise OSError(0, _("Hardlinks not supported"))
703 raise OSError(0, _("Hardlinks not supported"))
704
704
705 def fstat(fp):
705 def fstat(fp):
706 '''stat file object that may not have fileno method.'''
706 '''stat file object that may not have fileno method.'''
707 try:
707 try:
708 return os.fstat(fp.fileno())
708 return os.fstat(fp.fileno())
709 except AttributeError:
709 except AttributeError:
710 return os.stat(fp.name)
710 return os.stat(fp.name)
711
711
712 posixfile = file
712 posixfile = file
713
713
714 def is_win_9x():
714 def is_win_9x():
715 '''return true if run on windows 95, 98 or me.'''
715 '''return true if run on windows 95, 98 or me.'''
716 try:
716 try:
717 return sys.getwindowsversion()[3] == 1
717 return sys.getwindowsversion()[3] == 1
718 except AttributeError:
718 except AttributeError:
719 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
719 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
720
720
721 getuser_fallback = None
721 getuser_fallback = None
722
722
723 def getuser():
723 def getuser():
724 '''return name of current user'''
724 '''return name of current user'''
725 try:
725 try:
726 return getpass.getuser()
726 return getpass.getuser()
727 except ImportError:
727 except ImportError:
728 # import of pwd will fail on windows - try fallback
728 # import of pwd will fail on windows - try fallback
729 if getuser_fallback:
729 if getuser_fallback:
730 return getuser_fallback()
730 return getuser_fallback()
731 # raised if win32api not available
731 # raised if win32api not available
732 raise Abort(_('user name not available - set USERNAME '
732 raise Abort(_('user name not available - set USERNAME '
733 'environment variable'))
733 'environment variable'))
734
734
735 def username(uid=None):
735 def username(uid=None):
736 """Return the name of the user with the given uid.
736 """Return the name of the user with the given uid.
737
737
738 If uid is None, return the name of the current user."""
738 If uid is None, return the name of the current user."""
739 try:
739 try:
740 import pwd
740 import pwd
741 if uid is None:
741 if uid is None:
742 uid = os.getuid()
742 uid = os.getuid()
743 try:
743 try:
744 return pwd.getpwuid(uid)[0]
744 return pwd.getpwuid(uid)[0]
745 except KeyError:
745 except KeyError:
746 return str(uid)
746 return str(uid)
747 except ImportError:
747 except ImportError:
748 return None
748 return None
749
749
750 def groupname(gid=None):
750 def groupname(gid=None):
751 """Return the name of the group with the given gid.
751 """Return the name of the group with the given gid.
752
752
753 If gid is None, return the name of the current group."""
753 If gid is None, return the name of the current group."""
754 try:
754 try:
755 import grp
755 import grp
756 if gid is None:
756 if gid is None:
757 gid = os.getgid()
757 gid = os.getgid()
758 try:
758 try:
759 return grp.getgrgid(gid)[0]
759 return grp.getgrgid(gid)[0]
760 except KeyError:
760 except KeyError:
761 return str(gid)
761 return str(gid)
762 except ImportError:
762 except ImportError:
763 return None
763 return None
764
764
765 # File system features
765 # File system features
766
766
767 def checkfolding(path):
767 def checkfolding(path):
768 """
768 """
769 Check whether the given path is on a case-sensitive filesystem
769 Check whether the given path is on a case-sensitive filesystem
770
770
771 Requires a path (like /foo/.hg) ending with a foldable final
771 Requires a path (like /foo/.hg) ending with a foldable final
772 directory component.
772 directory component.
773 """
773 """
774 s1 = os.stat(path)
774 s1 = os.stat(path)
775 d, b = os.path.split(path)
775 d, b = os.path.split(path)
776 p2 = os.path.join(d, b.upper())
776 p2 = os.path.join(d, b.upper())
777 if path == p2:
777 if path == p2:
778 p2 = os.path.join(d, b.lower())
778 p2 = os.path.join(d, b.lower())
779 try:
779 try:
780 s2 = os.stat(p2)
780 s2 = os.stat(p2)
781 if s2 == s1:
781 if s2 == s1:
782 return False
782 return False
783 return True
783 return True
784 except:
784 except:
785 return True
785 return True
786
786
787 def checkexec(path):
787 def checkexec(path):
788 """
788 """
789 Check whether the given path is on a filesystem with UNIX-like exec flags
789 Check whether the given path is on a filesystem with UNIX-like exec flags
790
790
791 Requires a directory (like /foo/.hg)
791 Requires a directory (like /foo/.hg)
792 """
792 """
793 fh, fn = tempfile.mkstemp("", "", path)
793 fh, fn = tempfile.mkstemp("", "", path)
794 os.close(fh)
794 os.close(fh)
795 m = os.stat(fn).st_mode
795 m = os.stat(fn).st_mode
796 os.chmod(fn, m ^ 0111)
796 os.chmod(fn, m ^ 0111)
797 r = (os.stat(fn).st_mode != m)
797 r = (os.stat(fn).st_mode != m)
798 os.unlink(fn)
798 os.unlink(fn)
799 return r
799 return r
800
800
801 def execfunc(path, fallback):
801 def execfunc(path, fallback):
802 '''return an is_exec() function with default to fallback'''
802 '''return an is_exec() function with default to fallback'''
803 if checkexec(path):
803 if checkexec(path):
804 return lambda x: is_exec(os.path.join(path, x))
804 return lambda x: is_exec(os.path.join(path, x))
805 return fallback
805 return fallback
806
806
807 def checklink(path):
807 def checklink(path):
808 """check whether the given path is on a symlink-capable filesystem"""
808 """check whether the given path is on a symlink-capable filesystem"""
809 # mktemp is not racy because symlink creation will fail if the
809 # mktemp is not racy because symlink creation will fail if the
810 # file already exists
810 # file already exists
811 name = tempfile.mktemp(dir=path)
811 name = tempfile.mktemp(dir=path)
812 try:
812 try:
813 os.symlink(".", name)
813 os.symlink(".", name)
814 os.unlink(name)
814 os.unlink(name)
815 return True
815 return True
816 except (OSError, AttributeError):
816 except (OSError, AttributeError):
817 return False
817 return False
818
818
819 def linkfunc(path, fallback):
819 def linkfunc(path, fallback):
820 '''return an is_link() function with default to fallback'''
820 '''return an is_link() function with default to fallback'''
821 if checklink(path):
821 if checklink(path):
822 return lambda x: os.path.islink(os.path.join(path, x))
822 return lambda x: os.path.islink(os.path.join(path, x))
823 return fallback
823 return fallback
824
824
825 _umask = os.umask(0)
825 _umask = os.umask(0)
826 os.umask(_umask)
826 os.umask(_umask)
827
827
828 def needbinarypatch():
828 def needbinarypatch():
829 """return True if patches should be applied in binary mode by default."""
829 """return True if patches should be applied in binary mode by default."""
830 return os.name == 'nt'
830 return os.name == 'nt'
831
831
832 # Platform specific variants
832 # Platform specific variants
833 if os.name == 'nt':
833 if os.name == 'nt':
834 import msvcrt
834 import msvcrt
835 nulldev = 'NUL:'
835 nulldev = 'NUL:'
836
836
837 class winstdout:
837 class winstdout:
838 '''stdout on windows misbehaves if sent through a pipe'''
838 '''stdout on windows misbehaves if sent through a pipe'''
839
839
840 def __init__(self, fp):
840 def __init__(self, fp):
841 self.fp = fp
841 self.fp = fp
842
842
843 def __getattr__(self, key):
843 def __getattr__(self, key):
844 return getattr(self.fp, key)
844 return getattr(self.fp, key)
845
845
846 def close(self):
846 def close(self):
847 try:
847 try:
848 self.fp.close()
848 self.fp.close()
849 except: pass
849 except: pass
850
850
851 def write(self, s):
851 def write(self, s):
852 try:
852 try:
853 return self.fp.write(s)
853 return self.fp.write(s)
854 except IOError, inst:
854 except IOError, inst:
855 if inst.errno != 0: raise
855 if inst.errno != 0: raise
856 self.close()
856 self.close()
857 raise IOError(errno.EPIPE, 'Broken pipe')
857 raise IOError(errno.EPIPE, 'Broken pipe')
858
858
859 def flush(self):
859 def flush(self):
860 try:
860 try:
861 return self.fp.flush()
861 return self.fp.flush()
862 except IOError, inst:
862 except IOError, inst:
863 if inst.errno != errno.EINVAL: raise
863 if inst.errno != errno.EINVAL: raise
864 self.close()
864 self.close()
865 raise IOError(errno.EPIPE, 'Broken pipe')
865 raise IOError(errno.EPIPE, 'Broken pipe')
866
866
867 sys.stdout = winstdout(sys.stdout)
867 sys.stdout = winstdout(sys.stdout)
868
868
869 def system_rcpath():
869 def system_rcpath():
870 try:
870 try:
871 return system_rcpath_win32()
871 return system_rcpath_win32()
872 except:
872 except:
873 return [r'c:\mercurial\mercurial.ini']
873 return [r'c:\mercurial\mercurial.ini']
874
874
875 def user_rcpath():
875 def user_rcpath():
876 '''return os-specific hgrc search path to the user dir'''
876 '''return os-specific hgrc search path to the user dir'''
877 try:
877 try:
878 userrc = user_rcpath_win32()
878 userrc = user_rcpath_win32()
879 except:
879 except:
880 userrc = os.path.join(os.path.expanduser('~'), 'mercurial.ini')
880 userrc = os.path.join(os.path.expanduser('~'), 'mercurial.ini')
881 path = [userrc]
881 path = [userrc]
882 userprofile = os.environ.get('USERPROFILE')
882 userprofile = os.environ.get('USERPROFILE')
883 if userprofile:
883 if userprofile:
884 path.append(os.path.join(userprofile, 'mercurial.ini'))
884 path.append(os.path.join(userprofile, 'mercurial.ini'))
885 return path
885 return path
886
886
887 def parse_patch_output(output_line):
887 def parse_patch_output(output_line):
888 """parses the output produced by patch and returns the file name"""
888 """parses the output produced by patch and returns the file name"""
889 pf = output_line[14:]
889 pf = output_line[14:]
890 if pf[0] == '`':
890 if pf[0] == '`':
891 pf = pf[1:-1] # Remove the quotes
891 pf = pf[1:-1] # Remove the quotes
892 return pf
892 return pf
893
893
894 def testpid(pid):
894 def testpid(pid):
895 '''return False if pid dead, True if running or not known'''
895 '''return False if pid dead, True if running or not known'''
896 return True
896 return True
897
897
898 def set_exec(f, mode):
898 def set_exec(f, mode):
899 pass
899 pass
900
900
901 def set_link(f, mode):
901 def set_link(f, mode):
902 pass
902 pass
903
903
904 def set_binary(fd):
904 def set_binary(fd):
905 msvcrt.setmode(fd.fileno(), os.O_BINARY)
905 msvcrt.setmode(fd.fileno(), os.O_BINARY)
906
906
907 def pconvert(path):
907 def pconvert(path):
908 return path.replace("\\", "/")
908 return path.replace("\\", "/")
909
909
910 def localpath(path):
910 def localpath(path):
911 return path.replace('/', '\\')
911 return path.replace('/', '\\')
912
912
913 def normpath(path):
913 def normpath(path):
914 return pconvert(os.path.normpath(path))
914 return pconvert(os.path.normpath(path))
915
915
916 makelock = _makelock_file
916 makelock = _makelock_file
917 readlock = _readlock_file
917 readlock = _readlock_file
918
918
919 def samestat(s1, s2):
919 def samestat(s1, s2):
920 return False
920 return False
921
921
922 # A sequence of backslashes is special iff it precedes a double quote:
922 # A sequence of backslashes is special iff it precedes a double quote:
923 # - if there's an even number of backslashes, the double quote is not
923 # - if there's an even number of backslashes, the double quote is not
924 # quoted (i.e. it ends the quoted region)
924 # quoted (i.e. it ends the quoted region)
925 # - if there's an odd number of backslashes, the double quote is quoted
925 # - if there's an odd number of backslashes, the double quote is quoted
926 # - in both cases, every pair of backslashes is unquoted into a single
926 # - in both cases, every pair of backslashes is unquoted into a single
927 # backslash
927 # backslash
928 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
928 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
929 # So, to quote a string, we must surround it in double quotes, double
929 # So, to quote a string, we must surround it in double quotes, double
930 # the number of backslashes that preceed double quotes and add another
930 # the number of backslashes that preceed double quotes and add another
931 # backslash before every double quote (being careful with the double
931 # backslash before every double quote (being careful with the double
932 # quote we've appended to the end)
932 # quote we've appended to the end)
933 _quotere = None
933 _quotere = None
934 def shellquote(s):
934 def shellquote(s):
935 global _quotere
935 global _quotere
936 if _quotere is None:
936 if _quotere is None:
937 _quotere = re.compile(r'(\\*)("|\\$)')
937 _quotere = re.compile(r'(\\*)("|\\$)')
938 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
938 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
939
939
940 def explain_exit(code):
940 def explain_exit(code):
941 return _("exited with status %d") % code, code
941 return _("exited with status %d") % code, code
942
942
943 # if you change this stub into a real check, please try to implement the
943 # if you change this stub into a real check, please try to implement the
944 # username and groupname functions above, too.
944 # username and groupname functions above, too.
945 def isowner(fp, st=None):
945 def isowner(fp, st=None):
946 return True
946 return True
947
947
948 def find_in_path(name, path, default=None):
948 def find_in_path(name, path, default=None):
949 '''find name in search path. path can be string (will be split
949 '''find name in search path. path can be string (will be split
950 with os.pathsep), or iterable thing that returns strings. if name
950 with os.pathsep), or iterable thing that returns strings. if name
951 found, return path to name. else return default. name is looked up
951 found, return path to name. else return default. name is looked up
952 using cmd.exe rules, using PATHEXT.'''
952 using cmd.exe rules, using PATHEXT.'''
953 if isinstance(path, str):
953 if isinstance(path, str):
954 path = path.split(os.pathsep)
954 path = path.split(os.pathsep)
955
955
956 pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
956 pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
957 pathext = pathext.lower().split(os.pathsep)
957 pathext = pathext.lower().split(os.pathsep)
958 isexec = os.path.splitext(name)[1].lower() in pathext
958 isexec = os.path.splitext(name)[1].lower() in pathext
959
959
960 for p in path:
960 for p in path:
961 p_name = os.path.join(p, name)
961 p_name = os.path.join(p, name)
962
962
963 if isexec and os.path.exists(p_name):
963 if isexec and os.path.exists(p_name):
964 return p_name
964 return p_name
965
965
966 for ext in pathext:
966 for ext in pathext:
967 p_name_ext = p_name + ext
967 p_name_ext = p_name + ext
968 if os.path.exists(p_name_ext):
968 if os.path.exists(p_name_ext):
969 return p_name_ext
969 return p_name_ext
970 return default
970 return default
971
971
972 def set_signal_handler():
972 def set_signal_handler():
973 try:
973 try:
974 set_signal_handler_win32()
974 set_signal_handler_win32()
975 except NameError:
975 except NameError:
976 pass
976 pass
977
977
978 try:
978 try:
979 # override functions with win32 versions if possible
979 # override functions with win32 versions if possible
980 from util_win32 import *
980 from util_win32 import *
981 if not is_win_9x():
981 if not is_win_9x():
982 posixfile = posixfile_nt
982 posixfile = posixfile_nt
983 except ImportError:
983 except ImportError:
984 pass
984 pass
985
985
986 else:
986 else:
987 nulldev = '/dev/null'
987 nulldev = '/dev/null'
988
988
989 def rcfiles(path):
989 def rcfiles(path):
990 rcs = [os.path.join(path, 'hgrc')]
990 rcs = [os.path.join(path, 'hgrc')]
991 rcdir = os.path.join(path, 'hgrc.d')
991 rcdir = os.path.join(path, 'hgrc.d')
992 try:
992 try:
993 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
993 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
994 if f.endswith(".rc")])
994 if f.endswith(".rc")])
995 except OSError:
995 except OSError:
996 pass
996 pass
997 return rcs
997 return rcs
998
998
999 def system_rcpath():
999 def system_rcpath():
1000 path = []
1000 path = []
1001 # old mod_python does not set sys.argv
1001 # old mod_python does not set sys.argv
1002 if len(getattr(sys, 'argv', [])) > 0:
1002 if len(getattr(sys, 'argv', [])) > 0:
1003 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
1003 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
1004 '/../etc/mercurial'))
1004 '/../etc/mercurial'))
1005 path.extend(rcfiles('/etc/mercurial'))
1005 path.extend(rcfiles('/etc/mercurial'))
1006 return path
1006 return path
1007
1007
1008 def user_rcpath():
1008 def user_rcpath():
1009 return [os.path.expanduser('~/.hgrc')]
1009 return [os.path.expanduser('~/.hgrc')]
1010
1010
1011 def parse_patch_output(output_line):
1011 def parse_patch_output(output_line):
1012 """parses the output produced by patch and returns the file name"""
1012 """parses the output produced by patch and returns the file name"""
1013 pf = output_line[14:]
1013 pf = output_line[14:]
1014 if os.sys.platform == 'OpenVMS':
1014 if os.sys.platform == 'OpenVMS':
1015 if pf[0] == '`':
1015 if pf[0] == '`':
1016 pf = pf[1:-1] # Remove the quotes
1016 pf = pf[1:-1] # Remove the quotes
1017 else:
1017 else:
1018 if pf.startswith("'") and pf.endswith("'") and " " in pf:
1018 if pf.startswith("'") and pf.endswith("'") and " " in pf:
1019 pf = pf[1:-1] # Remove the quotes
1019 pf = pf[1:-1] # Remove the quotes
1020 return pf
1020 return pf
1021
1021
1022 def is_exec(f):
1022 def is_exec(f):
1023 """check whether a file is executable"""
1023 """check whether a file is executable"""
1024 return (os.lstat(f).st_mode & 0100 != 0)
1024 return (os.lstat(f).st_mode & 0100 != 0)
1025
1025
1026 def set_exec(f, mode):
1026 def set_exec(f, mode):
1027 s = os.lstat(f).st_mode
1027 s = os.lstat(f).st_mode
1028 if (s & 0100 != 0) == mode:
1028 if (s & 0100 != 0) == mode:
1029 return
1029 return
1030 if mode:
1030 if mode:
1031 # Turn on +x for every +r bit when making a file executable
1031 # Turn on +x for every +r bit when making a file executable
1032 # and obey umask.
1032 # and obey umask.
1033 os.chmod(f, s | (s & 0444) >> 2 & ~_umask)
1033 os.chmod(f, s | (s & 0444) >> 2 & ~_umask)
1034 else:
1034 else:
1035 os.chmod(f, s & 0666)
1035 os.chmod(f, s & 0666)
1036
1036
1037 def set_link(f, mode):
1037 def set_link(f, mode):
1038 """make a file a symbolic link/regular file
1038 """make a file a symbolic link/regular file
1039
1039
1040 if a file is changed to a link, its contents become the link data
1040 if a file is changed to a link, its contents become the link data
1041 if a link is changed to a file, its link data become its contents
1041 if a link is changed to a file, its link data become its contents
1042 """
1042 """
1043
1043
1044 m = os.path.islink(f)
1044 m = os.path.islink(f)
1045 if m == bool(mode):
1045 if m == bool(mode):
1046 return
1046 return
1047
1047
1048 if mode: # switch file to link
1048 if mode: # switch file to link
1049 data = file(f).read()
1049 data = file(f).read()
1050 os.unlink(f)
1050 os.unlink(f)
1051 os.symlink(data, f)
1051 os.symlink(data, f)
1052 else:
1052 else:
1053 data = os.readlink(f)
1053 data = os.readlink(f)
1054 os.unlink(f)
1054 os.unlink(f)
1055 file(f, "w").write(data)
1055 file(f, "w").write(data)
1056
1056
1057 def set_binary(fd):
1057 def set_binary(fd):
1058 pass
1058 pass
1059
1059
1060 def pconvert(path):
1060 def pconvert(path):
1061 return path
1061 return path
1062
1062
1063 def localpath(path):
1063 def localpath(path):
1064 return path
1064 return path
1065
1065
1066 normpath = os.path.normpath
1066 normpath = os.path.normpath
1067 samestat = os.path.samestat
1067 samestat = os.path.samestat
1068
1068
1069 def makelock(info, pathname):
1069 def makelock(info, pathname):
1070 try:
1070 try:
1071 os.symlink(info, pathname)
1071 os.symlink(info, pathname)
1072 except OSError, why:
1072 except OSError, why:
1073 if why.errno == errno.EEXIST:
1073 if why.errno == errno.EEXIST:
1074 raise
1074 raise
1075 else:
1075 else:
1076 _makelock_file(info, pathname)
1076 _makelock_file(info, pathname)
1077
1077
1078 def readlock(pathname):
1078 def readlock(pathname):
1079 try:
1079 try:
1080 return os.readlink(pathname)
1080 return os.readlink(pathname)
1081 except OSError, why:
1081 except OSError, why:
1082 if why.errno in (errno.EINVAL, errno.ENOSYS):
1082 if why.errno in (errno.EINVAL, errno.ENOSYS):
1083 return _readlock_file(pathname)
1083 return _readlock_file(pathname)
1084 else:
1084 else:
1085 raise
1085 raise
1086
1086
1087 def shellquote(s):
1087 def shellquote(s):
1088 if os.sys.platform == 'OpenVMS':
1088 if os.sys.platform == 'OpenVMS':
1089 return '"%s"' % s
1089 return '"%s"' % s
1090 else:
1090 else:
1091 return "'%s'" % s.replace("'", "'\\''")
1091 return "'%s'" % s.replace("'", "'\\''")
1092
1092
1093 def testpid(pid):
1093 def testpid(pid):
1094 '''return False if pid dead, True if running or not sure'''
1094 '''return False if pid dead, True if running or not sure'''
1095 if os.sys.platform == 'OpenVMS':
1095 if os.sys.platform == 'OpenVMS':
1096 return True
1096 return True
1097 try:
1097 try:
1098 os.kill(pid, 0)
1098 os.kill(pid, 0)
1099 return True
1099 return True
1100 except OSError, inst:
1100 except OSError, inst:
1101 return inst.errno != errno.ESRCH
1101 return inst.errno != errno.ESRCH
1102
1102
1103 def explain_exit(code):
1103 def explain_exit(code):
1104 """return a 2-tuple (desc, code) describing a process's status"""
1104 """return a 2-tuple (desc, code) describing a process's status"""
1105 if os.WIFEXITED(code):
1105 if os.WIFEXITED(code):
1106 val = os.WEXITSTATUS(code)
1106 val = os.WEXITSTATUS(code)
1107 return _("exited with status %d") % val, val
1107 return _("exited with status %d") % val, val
1108 elif os.WIFSIGNALED(code):
1108 elif os.WIFSIGNALED(code):
1109 val = os.WTERMSIG(code)
1109 val = os.WTERMSIG(code)
1110 return _("killed by signal %d") % val, val
1110 return _("killed by signal %d") % val, val
1111 elif os.WIFSTOPPED(code):
1111 elif os.WIFSTOPPED(code):
1112 val = os.WSTOPSIG(code)
1112 val = os.WSTOPSIG(code)
1113 return _("stopped by signal %d") % val, val
1113 return _("stopped by signal %d") % val, val
1114 raise ValueError(_("invalid exit code"))
1114 raise ValueError(_("invalid exit code"))
1115
1115
1116 def isowner(fp, st=None):
1116 def isowner(fp, st=None):
1117 """Return True if the file object f belongs to the current user.
1117 """Return True if the file object f belongs to the current user.
1118
1118
1119 The return value of a util.fstat(f) may be passed as the st argument.
1119 The return value of a util.fstat(f) may be passed as the st argument.
1120 """
1120 """
1121 if st is None:
1121 if st is None:
1122 st = fstat(fp)
1122 st = fstat(fp)
1123 return st.st_uid == os.getuid()
1123 return st.st_uid == os.getuid()
1124
1124
1125 def find_in_path(name, path, default=None):
1125 def find_in_path(name, path, default=None):
1126 '''find name in search path. path can be string (will be split
1126 '''find name in search path. path can be string (will be split
1127 with os.pathsep), or iterable thing that returns strings. if name
1127 with os.pathsep), or iterable thing that returns strings. if name
1128 found, return path to name. else return default.'''
1128 found, return path to name. else return default.'''
1129 if isinstance(path, str):
1129 if isinstance(path, str):
1130 path = path.split(os.pathsep)
1130 path = path.split(os.pathsep)
1131 for p in path:
1131 for p in path:
1132 p_name = os.path.join(p, name)
1132 p_name = os.path.join(p, name)
1133 if os.path.exists(p_name):
1133 if os.path.exists(p_name):
1134 return p_name
1134 return p_name
1135 return default
1135 return default
1136
1136
1137 def set_signal_handler():
1137 def set_signal_handler():
1138 pass
1138 pass
1139
1139
1140 def find_exe(name, default=None):
1140 def find_exe(name, default=None):
1141 '''find path of an executable.
1141 '''find path of an executable.
1142 if name contains a path component, return it as is. otherwise,
1142 if name contains a path component, return it as is. otherwise,
1143 use normal executable search path.'''
1143 use normal executable search path.'''
1144
1144
1145 if os.sep in name or sys.platform == 'OpenVMS':
1145 if os.sep in name or sys.platform == 'OpenVMS':
1146 # don't check the executable bit. if the file isn't
1146 # don't check the executable bit. if the file isn't
1147 # executable, whoever tries to actually run it will give a
1147 # executable, whoever tries to actually run it will give a
1148 # much more useful error message.
1148 # much more useful error message.
1149 return name
1149 return name
1150 return find_in_path(name, os.environ.get('PATH', ''), default=default)
1150 return find_in_path(name, os.environ.get('PATH', ''), default=default)
1151
1151
1152 def _buildencodefun():
1152 def _buildencodefun():
1153 e = '_'
1153 e = '_'
1154 win_reserved = [ord(x) for x in '\\:*?"<>|']
1154 win_reserved = [ord(x) for x in '\\:*?"<>|']
1155 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
1155 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
1156 for x in (range(32) + range(126, 256) + win_reserved):
1156 for x in (range(32) + range(126, 256) + win_reserved):
1157 cmap[chr(x)] = "~%02x" % x
1157 cmap[chr(x)] = "~%02x" % x
1158 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
1158 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
1159 cmap[chr(x)] = e + chr(x).lower()
1159 cmap[chr(x)] = e + chr(x).lower()
1160 dmap = {}
1160 dmap = {}
1161 for k, v in cmap.iteritems():
1161 for k, v in cmap.iteritems():
1162 dmap[v] = k
1162 dmap[v] = k
1163 def decode(s):
1163 def decode(s):
1164 i = 0
1164 i = 0
1165 while i < len(s):
1165 while i < len(s):
1166 for l in xrange(1, 4):
1166 for l in xrange(1, 4):
1167 try:
1167 try:
1168 yield dmap[s[i:i+l]]
1168 yield dmap[s[i:i+l]]
1169 i += l
1169 i += l
1170 break
1170 break
1171 except KeyError:
1171 except KeyError:
1172 pass
1172 pass
1173 else:
1173 else:
1174 raise KeyError
1174 raise KeyError
1175 return (lambda s: "".join([cmap[c] for c in s]),
1175 return (lambda s: "".join([cmap[c] for c in s]),
1176 lambda s: "".join(list(decode(s))))
1176 lambda s: "".join(list(decode(s))))
1177
1177
1178 encodefilename, decodefilename = _buildencodefun()
1178 encodefilename, decodefilename = _buildencodefun()
1179
1179
1180 def encodedopener(openerfn, fn):
1180 def encodedopener(openerfn, fn):
1181 def o(path, *args, **kw):
1181 def o(path, *args, **kw):
1182 return openerfn(fn(path), *args, **kw)
1182 return openerfn(fn(path), *args, **kw)
1183 return o
1183 return o
1184
1184
1185 def mktempcopy(name, emptyok=False):
1185 def mktempcopy(name, emptyok=False):
1186 """Create a temporary file with the same contents from name
1186 """Create a temporary file with the same contents from name
1187
1187
1188 The permission bits are copied from the original file.
1188 The permission bits are copied from the original file.
1189
1189
1190 If the temporary file is going to be truncated immediately, you
1190 If the temporary file is going to be truncated immediately, you
1191 can use emptyok=True as an optimization.
1191 can use emptyok=True as an optimization.
1192
1192
1193 Returns the name of the temporary file.
1193 Returns the name of the temporary file.
1194 """
1194 """
1195 d, fn = os.path.split(name)
1195 d, fn = os.path.split(name)
1196 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1196 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1197 os.close(fd)
1197 os.close(fd)
1198 # Temporary files are created with mode 0600, which is usually not
1198 # Temporary files are created with mode 0600, which is usually not
1199 # what we want. If the original file already exists, just copy
1199 # what we want. If the original file already exists, just copy
1200 # its mode. Otherwise, manually obey umask.
1200 # its mode. Otherwise, manually obey umask.
1201 try:
1201 try:
1202 st_mode = os.lstat(name).st_mode
1202 st_mode = os.lstat(name).st_mode
1203 except OSError, inst:
1203 except OSError, inst:
1204 if inst.errno != errno.ENOENT:
1204 if inst.errno != errno.ENOENT:
1205 raise
1205 raise
1206 st_mode = 0666 & ~_umask
1206 st_mode = 0666 & ~_umask
1207 os.chmod(temp, st_mode)
1207 os.chmod(temp, st_mode)
1208 if emptyok:
1208 if emptyok:
1209 return temp
1209 return temp
1210 try:
1210 try:
1211 try:
1211 try:
1212 ifp = posixfile(name, "rb")
1212 ifp = posixfile(name, "rb")
1213 except IOError, inst:
1213 except IOError, inst:
1214 if inst.errno == errno.ENOENT:
1214 if inst.errno == errno.ENOENT:
1215 return temp
1215 return temp
1216 if not getattr(inst, 'filename', None):
1216 if not getattr(inst, 'filename', None):
1217 inst.filename = name
1217 inst.filename = name
1218 raise
1218 raise
1219 ofp = posixfile(temp, "wb")
1219 ofp = posixfile(temp, "wb")
1220 for chunk in filechunkiter(ifp):
1220 for chunk in filechunkiter(ifp):
1221 ofp.write(chunk)
1221 ofp.write(chunk)
1222 ifp.close()
1222 ifp.close()
1223 ofp.close()
1223 ofp.close()
1224 except:
1224 except:
1225 try: os.unlink(temp)
1225 try: os.unlink(temp)
1226 except: pass
1226 except: pass
1227 raise
1227 raise
1228 return temp
1228 return temp
1229
1229
1230 class atomictempfile(posixfile):
1230 class atomictempfile(posixfile):
1231 """file-like object that atomically updates a file
1231 """file-like object that atomically updates a file
1232
1232
1233 All writes will be redirected to a temporary copy of the original
1233 All writes will be redirected to a temporary copy of the original
1234 file. When rename is called, the copy is renamed to the original
1234 file. When rename is called, the copy is renamed to the original
1235 name, making the changes visible.
1235 name, making the changes visible.
1236 """
1236 """
1237 def __init__(self, name, mode):
1237 def __init__(self, name, mode):
1238 self.__name = name
1238 self.__name = name
1239 self.temp = mktempcopy(name, emptyok=('w' in mode))
1239 self.temp = mktempcopy(name, emptyok=('w' in mode))
1240 posixfile.__init__(self, self.temp, mode)
1240 posixfile.__init__(self, self.temp, mode)
1241
1241
1242 def rename(self):
1242 def rename(self):
1243 if not self.closed:
1243 if not self.closed:
1244 posixfile.close(self)
1244 posixfile.close(self)
1245 rename(self.temp, localpath(self.__name))
1245 rename(self.temp, localpath(self.__name))
1246
1246
1247 def __del__(self):
1247 def __del__(self):
1248 if not self.closed:
1248 if not self.closed:
1249 try:
1249 try:
1250 os.unlink(self.temp)
1250 os.unlink(self.temp)
1251 except: pass
1251 except: pass
1252 posixfile.close(self)
1252 posixfile.close(self)
1253
1253
1254 class opener(object):
1254 class opener(object):
1255 """Open files relative to a base directory
1255 """Open files relative to a base directory
1256
1256
1257 This class is used to hide the details of COW semantics and
1257 This class is used to hide the details of COW semantics and
1258 remote file access from higher level code.
1258 remote file access from higher level code.
1259 """
1259 """
1260 def __init__(self, base, audit=True):
1260 def __init__(self, base, audit=True):
1261 self.base = base
1261 self.base = base
1262 self.audit = audit
1262 self.audit = audit
1263
1263
1264 def __getattr__(self, name):
1264 def __getattr__(self, name):
1265 if name == '_can_symlink':
1265 if name == '_can_symlink':
1266 self._can_symlink = checklink(self.base)
1266 self._can_symlink = checklink(self.base)
1267 return self._can_symlink
1267 return self._can_symlink
1268 raise AttributeError(name)
1268 raise AttributeError(name)
1269
1269
1270 def __call__(self, path, mode="r", text=False, atomictemp=False):
1270 def __call__(self, path, mode="r", text=False, atomictemp=False):
1271 if self.audit:
1271 if self.audit:
1272 audit_path(path)
1272 audit_path(path)
1273 f = os.path.join(self.base, path)
1273 f = os.path.join(self.base, path)
1274
1274
1275 if not text and "b" not in mode:
1275 if not text and "b" not in mode:
1276 mode += "b" # for that other OS
1276 mode += "b" # for that other OS
1277
1277
1278 if mode[0] != "r":
1278 if mode[0] != "r":
1279 try:
1279 try:
1280 nlink = nlinks(f)
1280 nlink = nlinks(f)
1281 except OSError:
1281 except OSError:
1282 nlink = 0
1282 nlink = 0
1283 d = os.path.dirname(f)
1283 d = os.path.dirname(f)
1284 if not os.path.isdir(d):
1284 if not os.path.isdir(d):
1285 os.makedirs(d)
1285 os.makedirs(d)
1286 if atomictemp:
1286 if atomictemp:
1287 return atomictempfile(f, mode)
1287 return atomictempfile(f, mode)
1288 if nlink > 1:
1288 if nlink > 1:
1289 rename(mktempcopy(f), f)
1289 rename(mktempcopy(f), f)
1290 return posixfile(f, mode)
1290 return posixfile(f, mode)
1291
1291
1292 def symlink(self, src, dst):
1292 def symlink(self, src, dst):
1293 if self.audit:
1293 if self.audit:
1294 audit_path(dst)
1294 audit_path(dst)
1295 linkname = os.path.join(self.base, dst)
1295 linkname = os.path.join(self.base, dst)
1296 try:
1296 try:
1297 os.unlink(linkname)
1297 os.unlink(linkname)
1298 except OSError:
1298 except OSError:
1299 pass
1299 pass
1300
1300
1301 dirname = os.path.dirname(linkname)
1301 dirname = os.path.dirname(linkname)
1302 if not os.path.exists(dirname):
1302 if not os.path.exists(dirname):
1303 os.makedirs(dirname)
1303 os.makedirs(dirname)
1304
1304
1305 if self._can_symlink:
1305 if self._can_symlink:
1306 try:
1306 os.symlink(src, linkname)
1307 os.symlink(src, linkname)
1308 except OSError, err:
1309 raise OSError(err.errno, _('could not symlink to %r: %s') %
1310 (src, err.strerror), linkname)
1307 else:
1311 else:
1308 f = self(self, dst, "w")
1312 f = self(self, dst, "w")
1309 f.write(src)
1313 f.write(src)
1310 f.close()
1314 f.close()
1311
1315
1312 class chunkbuffer(object):
1316 class chunkbuffer(object):
1313 """Allow arbitrary sized chunks of data to be efficiently read from an
1317 """Allow arbitrary sized chunks of data to be efficiently read from an
1314 iterator over chunks of arbitrary size."""
1318 iterator over chunks of arbitrary size."""
1315
1319
1316 def __init__(self, in_iter, targetsize = 2**16):
1320 def __init__(self, in_iter, targetsize = 2**16):
1317 """in_iter is the iterator that's iterating over the input chunks.
1321 """in_iter is the iterator that's iterating over the input chunks.
1318 targetsize is how big a buffer to try to maintain."""
1322 targetsize is how big a buffer to try to maintain."""
1319 self.in_iter = iter(in_iter)
1323 self.in_iter = iter(in_iter)
1320 self.buf = ''
1324 self.buf = ''
1321 self.targetsize = int(targetsize)
1325 self.targetsize = int(targetsize)
1322 if self.targetsize <= 0:
1326 if self.targetsize <= 0:
1323 raise ValueError(_("targetsize must be greater than 0, was %d") %
1327 raise ValueError(_("targetsize must be greater than 0, was %d") %
1324 targetsize)
1328 targetsize)
1325 self.iterempty = False
1329 self.iterempty = False
1326
1330
1327 def fillbuf(self):
1331 def fillbuf(self):
1328 """Ignore target size; read every chunk from iterator until empty."""
1332 """Ignore target size; read every chunk from iterator until empty."""
1329 if not self.iterempty:
1333 if not self.iterempty:
1330 collector = cStringIO.StringIO()
1334 collector = cStringIO.StringIO()
1331 collector.write(self.buf)
1335 collector.write(self.buf)
1332 for ch in self.in_iter:
1336 for ch in self.in_iter:
1333 collector.write(ch)
1337 collector.write(ch)
1334 self.buf = collector.getvalue()
1338 self.buf = collector.getvalue()
1335 self.iterempty = True
1339 self.iterempty = True
1336
1340
1337 def read(self, l):
1341 def read(self, l):
1338 """Read L bytes of data from the iterator of chunks of data.
1342 """Read L bytes of data from the iterator of chunks of data.
1339 Returns less than L bytes if the iterator runs dry."""
1343 Returns less than L bytes if the iterator runs dry."""
1340 if l > len(self.buf) and not self.iterempty:
1344 if l > len(self.buf) and not self.iterempty:
1341 # Clamp to a multiple of self.targetsize
1345 # Clamp to a multiple of self.targetsize
1342 targetsize = self.targetsize * ((l // self.targetsize) + 1)
1346 targetsize = self.targetsize * ((l // self.targetsize) + 1)
1343 collector = cStringIO.StringIO()
1347 collector = cStringIO.StringIO()
1344 collector.write(self.buf)
1348 collector.write(self.buf)
1345 collected = len(self.buf)
1349 collected = len(self.buf)
1346 for chunk in self.in_iter:
1350 for chunk in self.in_iter:
1347 collector.write(chunk)
1351 collector.write(chunk)
1348 collected += len(chunk)
1352 collected += len(chunk)
1349 if collected >= targetsize:
1353 if collected >= targetsize:
1350 break
1354 break
1351 if collected < targetsize:
1355 if collected < targetsize:
1352 self.iterempty = True
1356 self.iterempty = True
1353 self.buf = collector.getvalue()
1357 self.buf = collector.getvalue()
1354 s, self.buf = self.buf[:l], buffer(self.buf, l)
1358 s, self.buf = self.buf[:l], buffer(self.buf, l)
1355 return s
1359 return s
1356
1360
1357 def filechunkiter(f, size=65536, limit=None):
1361 def filechunkiter(f, size=65536, limit=None):
1358 """Create a generator that produces the data in the file size
1362 """Create a generator that produces the data in the file size
1359 (default 65536) bytes at a time, up to optional limit (default is
1363 (default 65536) bytes at a time, up to optional limit (default is
1360 to read all data). Chunks may be less than size bytes if the
1364 to read all data). Chunks may be less than size bytes if the
1361 chunk is the last chunk in the file, or the file is a socket or
1365 chunk is the last chunk in the file, or the file is a socket or
1362 some other type of file that sometimes reads less data than is
1366 some other type of file that sometimes reads less data than is
1363 requested."""
1367 requested."""
1364 assert size >= 0
1368 assert size >= 0
1365 assert limit is None or limit >= 0
1369 assert limit is None or limit >= 0
1366 while True:
1370 while True:
1367 if limit is None: nbytes = size
1371 if limit is None: nbytes = size
1368 else: nbytes = min(limit, size)
1372 else: nbytes = min(limit, size)
1369 s = nbytes and f.read(nbytes)
1373 s = nbytes and f.read(nbytes)
1370 if not s: break
1374 if not s: break
1371 if limit: limit -= len(s)
1375 if limit: limit -= len(s)
1372 yield s
1376 yield s
1373
1377
1374 def makedate():
1378 def makedate():
1375 lt = time.localtime()
1379 lt = time.localtime()
1376 if lt[8] == 1 and time.daylight:
1380 if lt[8] == 1 and time.daylight:
1377 tz = time.altzone
1381 tz = time.altzone
1378 else:
1382 else:
1379 tz = time.timezone
1383 tz = time.timezone
1380 return time.mktime(lt), tz
1384 return time.mktime(lt), tz
1381
1385
1382 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
1386 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
1383 """represent a (unixtime, offset) tuple as a localized time.
1387 """represent a (unixtime, offset) tuple as a localized time.
1384 unixtime is seconds since the epoch, and offset is the time zone's
1388 unixtime is seconds since the epoch, and offset is the time zone's
1385 number of seconds away from UTC. if timezone is false, do not
1389 number of seconds away from UTC. if timezone is false, do not
1386 append time zone to string."""
1390 append time zone to string."""
1387 t, tz = date or makedate()
1391 t, tz = date or makedate()
1388 s = time.strftime(format, time.gmtime(float(t) - tz))
1392 s = time.strftime(format, time.gmtime(float(t) - tz))
1389 if timezone:
1393 if timezone:
1390 s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
1394 s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
1391 return s
1395 return s
1392
1396
1393 def strdate(string, format, defaults):
1397 def strdate(string, format, defaults):
1394 """parse a localized time string and return a (unixtime, offset) tuple.
1398 """parse a localized time string and return a (unixtime, offset) tuple.
1395 if the string cannot be parsed, ValueError is raised."""
1399 if the string cannot be parsed, ValueError is raised."""
1396 def timezone(string):
1400 def timezone(string):
1397 tz = string.split()[-1]
1401 tz = string.split()[-1]
1398 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1402 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1399 tz = int(tz)
1403 tz = int(tz)
1400 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
1404 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
1401 return offset
1405 return offset
1402 if tz == "GMT" or tz == "UTC":
1406 if tz == "GMT" or tz == "UTC":
1403 return 0
1407 return 0
1404 return None
1408 return None
1405
1409
1406 # NOTE: unixtime = localunixtime + offset
1410 # NOTE: unixtime = localunixtime + offset
1407 offset, date = timezone(string), string
1411 offset, date = timezone(string), string
1408 if offset != None:
1412 if offset != None:
1409 date = " ".join(string.split()[:-1])
1413 date = " ".join(string.split()[:-1])
1410
1414
1411 # add missing elements from defaults
1415 # add missing elements from defaults
1412 for part in defaults:
1416 for part in defaults:
1413 found = [True for p in part if ("%"+p) in format]
1417 found = [True for p in part if ("%"+p) in format]
1414 if not found:
1418 if not found:
1415 date += "@" + defaults[part]
1419 date += "@" + defaults[part]
1416 format += "@%" + part[0]
1420 format += "@%" + part[0]
1417
1421
1418 timetuple = time.strptime(date, format)
1422 timetuple = time.strptime(date, format)
1419 localunixtime = int(calendar.timegm(timetuple))
1423 localunixtime = int(calendar.timegm(timetuple))
1420 if offset is None:
1424 if offset is None:
1421 # local timezone
1425 # local timezone
1422 unixtime = int(time.mktime(timetuple))
1426 unixtime = int(time.mktime(timetuple))
1423 offset = unixtime - localunixtime
1427 offset = unixtime - localunixtime
1424 else:
1428 else:
1425 unixtime = localunixtime + offset
1429 unixtime = localunixtime + offset
1426 return unixtime, offset
1430 return unixtime, offset
1427
1431
1428 def parsedate(string, formats=None, defaults=None):
1432 def parsedate(string, formats=None, defaults=None):
1429 """parse a localized time string and return a (unixtime, offset) tuple.
1433 """parse a localized time string and return a (unixtime, offset) tuple.
1430 The date may be a "unixtime offset" string or in one of the specified
1434 The date may be a "unixtime offset" string or in one of the specified
1431 formats."""
1435 formats."""
1432 if not string:
1436 if not string:
1433 return 0, 0
1437 return 0, 0
1434 if not formats:
1438 if not formats:
1435 formats = defaultdateformats
1439 formats = defaultdateformats
1436 string = string.strip()
1440 string = string.strip()
1437 try:
1441 try:
1438 when, offset = map(int, string.split(' '))
1442 when, offset = map(int, string.split(' '))
1439 except ValueError:
1443 except ValueError:
1440 # fill out defaults
1444 # fill out defaults
1441 if not defaults:
1445 if not defaults:
1442 defaults = {}
1446 defaults = {}
1443 now = makedate()
1447 now = makedate()
1444 for part in "d mb yY HI M S".split():
1448 for part in "d mb yY HI M S".split():
1445 if part not in defaults:
1449 if part not in defaults:
1446 if part[0] in "HMS":
1450 if part[0] in "HMS":
1447 defaults[part] = "00"
1451 defaults[part] = "00"
1448 elif part[0] in "dm":
1452 elif part[0] in "dm":
1449 defaults[part] = "1"
1453 defaults[part] = "1"
1450 else:
1454 else:
1451 defaults[part] = datestr(now, "%" + part[0], False)
1455 defaults[part] = datestr(now, "%" + part[0], False)
1452
1456
1453 for format in formats:
1457 for format in formats:
1454 try:
1458 try:
1455 when, offset = strdate(string, format, defaults)
1459 when, offset = strdate(string, format, defaults)
1456 except ValueError:
1460 except ValueError:
1457 pass
1461 pass
1458 else:
1462 else:
1459 break
1463 break
1460 else:
1464 else:
1461 raise Abort(_('invalid date: %r ') % string)
1465 raise Abort(_('invalid date: %r ') % string)
1462 # validate explicit (probably user-specified) date and
1466 # validate explicit (probably user-specified) date and
1463 # time zone offset. values must fit in signed 32 bits for
1467 # time zone offset. values must fit in signed 32 bits for
1464 # current 32-bit linux runtimes. timezones go from UTC-12
1468 # current 32-bit linux runtimes. timezones go from UTC-12
1465 # to UTC+14
1469 # to UTC+14
1466 if abs(when) > 0x7fffffff:
1470 if abs(when) > 0x7fffffff:
1467 raise Abort(_('date exceeds 32 bits: %d') % when)
1471 raise Abort(_('date exceeds 32 bits: %d') % when)
1468 if offset < -50400 or offset > 43200:
1472 if offset < -50400 or offset > 43200:
1469 raise Abort(_('impossible time zone offset: %d') % offset)
1473 raise Abort(_('impossible time zone offset: %d') % offset)
1470 return when, offset
1474 return when, offset
1471
1475
1472 def matchdate(date):
1476 def matchdate(date):
1473 """Return a function that matches a given date match specifier
1477 """Return a function that matches a given date match specifier
1474
1478
1475 Formats include:
1479 Formats include:
1476
1480
1477 '{date}' match a given date to the accuracy provided
1481 '{date}' match a given date to the accuracy provided
1478
1482
1479 '<{date}' on or before a given date
1483 '<{date}' on or before a given date
1480
1484
1481 '>{date}' on or after a given date
1485 '>{date}' on or after a given date
1482
1486
1483 """
1487 """
1484
1488
1485 def lower(date):
1489 def lower(date):
1486 return parsedate(date, extendeddateformats)[0]
1490 return parsedate(date, extendeddateformats)[0]
1487
1491
1488 def upper(date):
1492 def upper(date):
1489 d = dict(mb="12", HI="23", M="59", S="59")
1493 d = dict(mb="12", HI="23", M="59", S="59")
1490 for days in "31 30 29".split():
1494 for days in "31 30 29".split():
1491 try:
1495 try:
1492 d["d"] = days
1496 d["d"] = days
1493 return parsedate(date, extendeddateformats, d)[0]
1497 return parsedate(date, extendeddateformats, d)[0]
1494 except:
1498 except:
1495 pass
1499 pass
1496 d["d"] = "28"
1500 d["d"] = "28"
1497 return parsedate(date, extendeddateformats, d)[0]
1501 return parsedate(date, extendeddateformats, d)[0]
1498
1502
1499 if date[0] == "<":
1503 if date[0] == "<":
1500 when = upper(date[1:])
1504 when = upper(date[1:])
1501 return lambda x: x <= when
1505 return lambda x: x <= when
1502 elif date[0] == ">":
1506 elif date[0] == ">":
1503 when = lower(date[1:])
1507 when = lower(date[1:])
1504 return lambda x: x >= when
1508 return lambda x: x >= when
1505 elif date[0] == "-":
1509 elif date[0] == "-":
1506 try:
1510 try:
1507 days = int(date[1:])
1511 days = int(date[1:])
1508 except ValueError:
1512 except ValueError:
1509 raise Abort(_("invalid day spec: %s") % date[1:])
1513 raise Abort(_("invalid day spec: %s") % date[1:])
1510 when = makedate()[0] - days * 3600 * 24
1514 when = makedate()[0] - days * 3600 * 24
1511 return lambda x: x >= when
1515 return lambda x: x >= when
1512 elif " to " in date:
1516 elif " to " in date:
1513 a, b = date.split(" to ")
1517 a, b = date.split(" to ")
1514 start, stop = lower(a), upper(b)
1518 start, stop = lower(a), upper(b)
1515 return lambda x: x >= start and x <= stop
1519 return lambda x: x >= start and x <= stop
1516 else:
1520 else:
1517 start, stop = lower(date), upper(date)
1521 start, stop = lower(date), upper(date)
1518 return lambda x: x >= start and x <= stop
1522 return lambda x: x >= start and x <= stop
1519
1523
1520 def shortuser(user):
1524 def shortuser(user):
1521 """Return a short representation of a user name or email address."""
1525 """Return a short representation of a user name or email address."""
1522 f = user.find('@')
1526 f = user.find('@')
1523 if f >= 0:
1527 if f >= 0:
1524 user = user[:f]
1528 user = user[:f]
1525 f = user.find('<')
1529 f = user.find('<')
1526 if f >= 0:
1530 if f >= 0:
1527 user = user[f+1:]
1531 user = user[f+1:]
1528 f = user.find(' ')
1532 f = user.find(' ')
1529 if f >= 0:
1533 if f >= 0:
1530 user = user[:f]
1534 user = user[:f]
1531 f = user.find('.')
1535 f = user.find('.')
1532 if f >= 0:
1536 if f >= 0:
1533 user = user[:f]
1537 user = user[:f]
1534 return user
1538 return user
1535
1539
1536 def ellipsis(text, maxlength=400):
1540 def ellipsis(text, maxlength=400):
1537 """Trim string to at most maxlength (default: 400) characters."""
1541 """Trim string to at most maxlength (default: 400) characters."""
1538 if len(text) <= maxlength:
1542 if len(text) <= maxlength:
1539 return text
1543 return text
1540 else:
1544 else:
1541 return "%s..." % (text[:maxlength-3])
1545 return "%s..." % (text[:maxlength-3])
1542
1546
1543 def walkrepos(path):
1547 def walkrepos(path):
1544 '''yield every hg repository under path, recursively.'''
1548 '''yield every hg repository under path, recursively.'''
1545 def errhandler(err):
1549 def errhandler(err):
1546 if err.filename == path:
1550 if err.filename == path:
1547 raise err
1551 raise err
1548
1552
1549 for root, dirs, files in os.walk(path, onerror=errhandler):
1553 for root, dirs, files in os.walk(path, onerror=errhandler):
1550 for d in dirs:
1554 for d in dirs:
1551 if d == '.hg':
1555 if d == '.hg':
1552 yield root
1556 yield root
1553 dirs[:] = []
1557 dirs[:] = []
1554 break
1558 break
1555
1559
1556 _rcpath = None
1560 _rcpath = None
1557
1561
1558 def os_rcpath():
1562 def os_rcpath():
1559 '''return default os-specific hgrc search path'''
1563 '''return default os-specific hgrc search path'''
1560 path = system_rcpath()
1564 path = system_rcpath()
1561 path.extend(user_rcpath())
1565 path.extend(user_rcpath())
1562 path = [os.path.normpath(f) for f in path]
1566 path = [os.path.normpath(f) for f in path]
1563 return path
1567 return path
1564
1568
1565 def rcpath():
1569 def rcpath():
1566 '''return hgrc search path. if env var HGRCPATH is set, use it.
1570 '''return hgrc search path. if env var HGRCPATH is set, use it.
1567 for each item in path, if directory, use files ending in .rc,
1571 for each item in path, if directory, use files ending in .rc,
1568 else use item.
1572 else use item.
1569 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1573 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1570 if no HGRCPATH, use default os-specific path.'''
1574 if no HGRCPATH, use default os-specific path.'''
1571 global _rcpath
1575 global _rcpath
1572 if _rcpath is None:
1576 if _rcpath is None:
1573 if 'HGRCPATH' in os.environ:
1577 if 'HGRCPATH' in os.environ:
1574 _rcpath = []
1578 _rcpath = []
1575 for p in os.environ['HGRCPATH'].split(os.pathsep):
1579 for p in os.environ['HGRCPATH'].split(os.pathsep):
1576 if not p: continue
1580 if not p: continue
1577 if os.path.isdir(p):
1581 if os.path.isdir(p):
1578 for f in os.listdir(p):
1582 for f in os.listdir(p):
1579 if f.endswith('.rc'):
1583 if f.endswith('.rc'):
1580 _rcpath.append(os.path.join(p, f))
1584 _rcpath.append(os.path.join(p, f))
1581 else:
1585 else:
1582 _rcpath.append(p)
1586 _rcpath.append(p)
1583 else:
1587 else:
1584 _rcpath = os_rcpath()
1588 _rcpath = os_rcpath()
1585 return _rcpath
1589 return _rcpath
1586
1590
1587 def bytecount(nbytes):
1591 def bytecount(nbytes):
1588 '''return byte count formatted as readable string, with units'''
1592 '''return byte count formatted as readable string, with units'''
1589
1593
1590 units = (
1594 units = (
1591 (100, 1<<30, _('%.0f GB')),
1595 (100, 1<<30, _('%.0f GB')),
1592 (10, 1<<30, _('%.1f GB')),
1596 (10, 1<<30, _('%.1f GB')),
1593 (1, 1<<30, _('%.2f GB')),
1597 (1, 1<<30, _('%.2f GB')),
1594 (100, 1<<20, _('%.0f MB')),
1598 (100, 1<<20, _('%.0f MB')),
1595 (10, 1<<20, _('%.1f MB')),
1599 (10, 1<<20, _('%.1f MB')),
1596 (1, 1<<20, _('%.2f MB')),
1600 (1, 1<<20, _('%.2f MB')),
1597 (100, 1<<10, _('%.0f KB')),
1601 (100, 1<<10, _('%.0f KB')),
1598 (10, 1<<10, _('%.1f KB')),
1602 (10, 1<<10, _('%.1f KB')),
1599 (1, 1<<10, _('%.2f KB')),
1603 (1, 1<<10, _('%.2f KB')),
1600 (1, 1, _('%.0f bytes')),
1604 (1, 1, _('%.0f bytes')),
1601 )
1605 )
1602
1606
1603 for multiplier, divisor, format in units:
1607 for multiplier, divisor, format in units:
1604 if nbytes >= divisor * multiplier:
1608 if nbytes >= divisor * multiplier:
1605 return format % (nbytes / float(divisor))
1609 return format % (nbytes / float(divisor))
1606 return units[-1][2] % nbytes
1610 return units[-1][2] % nbytes
1607
1611
1608 def drop_scheme(scheme, path):
1612 def drop_scheme(scheme, path):
1609 sc = scheme + ':'
1613 sc = scheme + ':'
1610 if path.startswith(sc):
1614 if path.startswith(sc):
1611 path = path[len(sc):]
1615 path = path[len(sc):]
1612 if path.startswith('//'):
1616 if path.startswith('//'):
1613 path = path[2:]
1617 path = path[2:]
1614 return path
1618 return path
@@ -1,310 +1,310 b''
1 # util_win32.py - utility functions that use win32 API
1 # util_win32.py - utility functions that use win32 API
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of
6 # This software may be used and distributed according to the terms of
7 # the GNU General Public License, incorporated herein by reference.
7 # the GNU General Public License, incorporated herein by reference.
8
8
9 # Mark Hammond's win32all package allows better functionality on
9 # Mark Hammond's win32all package allows better functionality on
10 # Windows. this module overrides definitions in util.py. if not
10 # Windows. this module overrides definitions in util.py. if not
11 # available, import of this module will fail, and generic code will be
11 # available, import of this module will fail, and generic code will be
12 # used.
12 # used.
13
13
14 import win32api
14 import win32api
15
15
16 from i18n import _
16 from i18n import _
17 import errno, os, pywintypes, win32con, win32file, win32process
17 import errno, os, pywintypes, win32con, win32file, win32process
18 import cStringIO, winerror
18 import cStringIO, winerror
19 from win32com.shell import shell,shellcon
19 from win32com.shell import shell,shellcon
20
20
21 class WinError:
21 class WinError:
22 winerror_map = {
22 winerror_map = {
23 winerror.ERROR_ACCESS_DENIED: errno.EACCES,
23 winerror.ERROR_ACCESS_DENIED: errno.EACCES,
24 winerror.ERROR_ACCOUNT_DISABLED: errno.EACCES,
24 winerror.ERROR_ACCOUNT_DISABLED: errno.EACCES,
25 winerror.ERROR_ACCOUNT_RESTRICTION: errno.EACCES,
25 winerror.ERROR_ACCOUNT_RESTRICTION: errno.EACCES,
26 winerror.ERROR_ALREADY_ASSIGNED: errno.EBUSY,
26 winerror.ERROR_ALREADY_ASSIGNED: errno.EBUSY,
27 winerror.ERROR_ALREADY_EXISTS: errno.EEXIST,
27 winerror.ERROR_ALREADY_EXISTS: errno.EEXIST,
28 winerror.ERROR_ARITHMETIC_OVERFLOW: errno.ERANGE,
28 winerror.ERROR_ARITHMETIC_OVERFLOW: errno.ERANGE,
29 winerror.ERROR_BAD_COMMAND: errno.EIO,
29 winerror.ERROR_BAD_COMMAND: errno.EIO,
30 winerror.ERROR_BAD_DEVICE: errno.ENODEV,
30 winerror.ERROR_BAD_DEVICE: errno.ENODEV,
31 winerror.ERROR_BAD_DRIVER_LEVEL: errno.ENXIO,
31 winerror.ERROR_BAD_DRIVER_LEVEL: errno.ENXIO,
32 winerror.ERROR_BAD_EXE_FORMAT: errno.ENOEXEC,
32 winerror.ERROR_BAD_EXE_FORMAT: errno.ENOEXEC,
33 winerror.ERROR_BAD_FORMAT: errno.ENOEXEC,
33 winerror.ERROR_BAD_FORMAT: errno.ENOEXEC,
34 winerror.ERROR_BAD_LENGTH: errno.EINVAL,
34 winerror.ERROR_BAD_LENGTH: errno.EINVAL,
35 winerror.ERROR_BAD_PATHNAME: errno.ENOENT,
35 winerror.ERROR_BAD_PATHNAME: errno.ENOENT,
36 winerror.ERROR_BAD_PIPE: errno.EPIPE,
36 winerror.ERROR_BAD_PIPE: errno.EPIPE,
37 winerror.ERROR_BAD_UNIT: errno.ENODEV,
37 winerror.ERROR_BAD_UNIT: errno.ENODEV,
38 winerror.ERROR_BAD_USERNAME: errno.EINVAL,
38 winerror.ERROR_BAD_USERNAME: errno.EINVAL,
39 winerror.ERROR_BROKEN_PIPE: errno.EPIPE,
39 winerror.ERROR_BROKEN_PIPE: errno.EPIPE,
40 winerror.ERROR_BUFFER_OVERFLOW: errno.ENAMETOOLONG,
40 winerror.ERROR_BUFFER_OVERFLOW: errno.ENAMETOOLONG,
41 winerror.ERROR_BUSY: errno.EBUSY,
41 winerror.ERROR_BUSY: errno.EBUSY,
42 winerror.ERROR_BUSY_DRIVE: errno.EBUSY,
42 winerror.ERROR_BUSY_DRIVE: errno.EBUSY,
43 winerror.ERROR_CALL_NOT_IMPLEMENTED: errno.ENOSYS,
43 winerror.ERROR_CALL_NOT_IMPLEMENTED: errno.ENOSYS,
44 winerror.ERROR_CANNOT_MAKE: errno.EACCES,
44 winerror.ERROR_CANNOT_MAKE: errno.EACCES,
45 winerror.ERROR_CANTOPEN: errno.EIO,
45 winerror.ERROR_CANTOPEN: errno.EIO,
46 winerror.ERROR_CANTREAD: errno.EIO,
46 winerror.ERROR_CANTREAD: errno.EIO,
47 winerror.ERROR_CANTWRITE: errno.EIO,
47 winerror.ERROR_CANTWRITE: errno.EIO,
48 winerror.ERROR_CRC: errno.EIO,
48 winerror.ERROR_CRC: errno.EIO,
49 winerror.ERROR_CURRENT_DIRECTORY: errno.EACCES,
49 winerror.ERROR_CURRENT_DIRECTORY: errno.EACCES,
50 winerror.ERROR_DEVICE_IN_USE: errno.EBUSY,
50 winerror.ERROR_DEVICE_IN_USE: errno.EBUSY,
51 winerror.ERROR_DEV_NOT_EXIST: errno.ENODEV,
51 winerror.ERROR_DEV_NOT_EXIST: errno.ENODEV,
52 winerror.ERROR_DIRECTORY: errno.EINVAL,
52 winerror.ERROR_DIRECTORY: errno.EINVAL,
53 winerror.ERROR_DIR_NOT_EMPTY: errno.ENOTEMPTY,
53 winerror.ERROR_DIR_NOT_EMPTY: errno.ENOTEMPTY,
54 winerror.ERROR_DISK_CHANGE: errno.EIO,
54 winerror.ERROR_DISK_CHANGE: errno.EIO,
55 winerror.ERROR_DISK_FULL: errno.ENOSPC,
55 winerror.ERROR_DISK_FULL: errno.ENOSPC,
56 winerror.ERROR_DRIVE_LOCKED: errno.EBUSY,
56 winerror.ERROR_DRIVE_LOCKED: errno.EBUSY,
57 winerror.ERROR_ENVVAR_NOT_FOUND: errno.EINVAL,
57 winerror.ERROR_ENVVAR_NOT_FOUND: errno.EINVAL,
58 winerror.ERROR_EXE_MARKED_INVALID: errno.ENOEXEC,
58 winerror.ERROR_EXE_MARKED_INVALID: errno.ENOEXEC,
59 winerror.ERROR_FILENAME_EXCED_RANGE: errno.ENAMETOOLONG,
59 winerror.ERROR_FILENAME_EXCED_RANGE: errno.ENAMETOOLONG,
60 winerror.ERROR_FILE_EXISTS: errno.EEXIST,
60 winerror.ERROR_FILE_EXISTS: errno.EEXIST,
61 winerror.ERROR_FILE_INVALID: errno.ENODEV,
61 winerror.ERROR_FILE_INVALID: errno.ENODEV,
62 winerror.ERROR_FILE_NOT_FOUND: errno.ENOENT,
62 winerror.ERROR_FILE_NOT_FOUND: errno.ENOENT,
63 winerror.ERROR_GEN_FAILURE: errno.EIO,
63 winerror.ERROR_GEN_FAILURE: errno.EIO,
64 winerror.ERROR_HANDLE_DISK_FULL: errno.ENOSPC,
64 winerror.ERROR_HANDLE_DISK_FULL: errno.ENOSPC,
65 winerror.ERROR_INSUFFICIENT_BUFFER: errno.ENOMEM,
65 winerror.ERROR_INSUFFICIENT_BUFFER: errno.ENOMEM,
66 winerror.ERROR_INVALID_ACCESS: errno.EACCES,
66 winerror.ERROR_INVALID_ACCESS: errno.EACCES,
67 winerror.ERROR_INVALID_ADDRESS: errno.EFAULT,
67 winerror.ERROR_INVALID_ADDRESS: errno.EFAULT,
68 winerror.ERROR_INVALID_BLOCK: errno.EFAULT,
68 winerror.ERROR_INVALID_BLOCK: errno.EFAULT,
69 winerror.ERROR_INVALID_DATA: errno.EINVAL,
69 winerror.ERROR_INVALID_DATA: errno.EINVAL,
70 winerror.ERROR_INVALID_DRIVE: errno.ENODEV,
70 winerror.ERROR_INVALID_DRIVE: errno.ENODEV,
71 winerror.ERROR_INVALID_EXE_SIGNATURE: errno.ENOEXEC,
71 winerror.ERROR_INVALID_EXE_SIGNATURE: errno.ENOEXEC,
72 winerror.ERROR_INVALID_FLAGS: errno.EINVAL,
72 winerror.ERROR_INVALID_FLAGS: errno.EINVAL,
73 winerror.ERROR_INVALID_FUNCTION: errno.ENOSYS,
73 winerror.ERROR_INVALID_FUNCTION: errno.ENOSYS,
74 winerror.ERROR_INVALID_HANDLE: errno.EBADF,
74 winerror.ERROR_INVALID_HANDLE: errno.EBADF,
75 winerror.ERROR_INVALID_LOGON_HOURS: errno.EACCES,
75 winerror.ERROR_INVALID_LOGON_HOURS: errno.EACCES,
76 winerror.ERROR_INVALID_NAME: errno.EINVAL,
76 winerror.ERROR_INVALID_NAME: errno.EINVAL,
77 winerror.ERROR_INVALID_OWNER: errno.EINVAL,
77 winerror.ERROR_INVALID_OWNER: errno.EINVAL,
78 winerror.ERROR_INVALID_PARAMETER: errno.EINVAL,
78 winerror.ERROR_INVALID_PARAMETER: errno.EINVAL,
79 winerror.ERROR_INVALID_PASSWORD: errno.EPERM,
79 winerror.ERROR_INVALID_PASSWORD: errno.EPERM,
80 winerror.ERROR_INVALID_PRIMARY_GROUP: errno.EINVAL,
80 winerror.ERROR_INVALID_PRIMARY_GROUP: errno.EINVAL,
81 winerror.ERROR_INVALID_SIGNAL_NUMBER: errno.EINVAL,
81 winerror.ERROR_INVALID_SIGNAL_NUMBER: errno.EINVAL,
82 winerror.ERROR_INVALID_TARGET_HANDLE: errno.EIO,
82 winerror.ERROR_INVALID_TARGET_HANDLE: errno.EIO,
83 winerror.ERROR_INVALID_WORKSTATION: errno.EACCES,
83 winerror.ERROR_INVALID_WORKSTATION: errno.EACCES,
84 winerror.ERROR_IO_DEVICE: errno.EIO,
84 winerror.ERROR_IO_DEVICE: errno.EIO,
85 winerror.ERROR_IO_INCOMPLETE: errno.EINTR,
85 winerror.ERROR_IO_INCOMPLETE: errno.EINTR,
86 winerror.ERROR_LOCKED: errno.EBUSY,
86 winerror.ERROR_LOCKED: errno.EBUSY,
87 winerror.ERROR_LOCK_VIOLATION: errno.EACCES,
87 winerror.ERROR_LOCK_VIOLATION: errno.EACCES,
88 winerror.ERROR_LOGON_FAILURE: errno.EACCES,
88 winerror.ERROR_LOGON_FAILURE: errno.EACCES,
89 winerror.ERROR_MAPPED_ALIGNMENT: errno.EINVAL,
89 winerror.ERROR_MAPPED_ALIGNMENT: errno.EINVAL,
90 winerror.ERROR_META_EXPANSION_TOO_LONG: errno.E2BIG,
90 winerror.ERROR_META_EXPANSION_TOO_LONG: errno.E2BIG,
91 winerror.ERROR_MORE_DATA: errno.EPIPE,
91 winerror.ERROR_MORE_DATA: errno.EPIPE,
92 winerror.ERROR_NEGATIVE_SEEK: errno.ESPIPE,
92 winerror.ERROR_NEGATIVE_SEEK: errno.ESPIPE,
93 winerror.ERROR_NOACCESS: errno.EFAULT,
93 winerror.ERROR_NOACCESS: errno.EFAULT,
94 winerror.ERROR_NONE_MAPPED: errno.EINVAL,
94 winerror.ERROR_NONE_MAPPED: errno.EINVAL,
95 winerror.ERROR_NOT_ENOUGH_MEMORY: errno.ENOMEM,
95 winerror.ERROR_NOT_ENOUGH_MEMORY: errno.ENOMEM,
96 winerror.ERROR_NOT_READY: errno.EAGAIN,
96 winerror.ERROR_NOT_READY: errno.EAGAIN,
97 winerror.ERROR_NOT_SAME_DEVICE: errno.EXDEV,
97 winerror.ERROR_NOT_SAME_DEVICE: errno.EXDEV,
98 winerror.ERROR_NO_DATA: errno.EPIPE,
98 winerror.ERROR_NO_DATA: errno.EPIPE,
99 winerror.ERROR_NO_MORE_SEARCH_HANDLES: errno.EIO,
99 winerror.ERROR_NO_MORE_SEARCH_HANDLES: errno.EIO,
100 winerror.ERROR_NO_PROC_SLOTS: errno.EAGAIN,
100 winerror.ERROR_NO_PROC_SLOTS: errno.EAGAIN,
101 winerror.ERROR_NO_SUCH_PRIVILEGE: errno.EACCES,
101 winerror.ERROR_NO_SUCH_PRIVILEGE: errno.EACCES,
102 winerror.ERROR_OPEN_FAILED: errno.EIO,
102 winerror.ERROR_OPEN_FAILED: errno.EIO,
103 winerror.ERROR_OPEN_FILES: errno.EBUSY,
103 winerror.ERROR_OPEN_FILES: errno.EBUSY,
104 winerror.ERROR_OPERATION_ABORTED: errno.EINTR,
104 winerror.ERROR_OPERATION_ABORTED: errno.EINTR,
105 winerror.ERROR_OUTOFMEMORY: errno.ENOMEM,
105 winerror.ERROR_OUTOFMEMORY: errno.ENOMEM,
106 winerror.ERROR_PASSWORD_EXPIRED: errno.EACCES,
106 winerror.ERROR_PASSWORD_EXPIRED: errno.EACCES,
107 winerror.ERROR_PATH_BUSY: errno.EBUSY,
107 winerror.ERROR_PATH_BUSY: errno.EBUSY,
108 winerror.ERROR_PATH_NOT_FOUND: errno.ENOENT,
108 winerror.ERROR_PATH_NOT_FOUND: errno.ENOENT,
109 winerror.ERROR_PIPE_BUSY: errno.EBUSY,
109 winerror.ERROR_PIPE_BUSY: errno.EBUSY,
110 winerror.ERROR_PIPE_CONNECTED: errno.EPIPE,
110 winerror.ERROR_PIPE_CONNECTED: errno.EPIPE,
111 winerror.ERROR_PIPE_LISTENING: errno.EPIPE,
111 winerror.ERROR_PIPE_LISTENING: errno.EPIPE,
112 winerror.ERROR_PIPE_NOT_CONNECTED: errno.EPIPE,
112 winerror.ERROR_PIPE_NOT_CONNECTED: errno.EPIPE,
113 winerror.ERROR_PRIVILEGE_NOT_HELD: errno.EACCES,
113 winerror.ERROR_PRIVILEGE_NOT_HELD: errno.EACCES,
114 winerror.ERROR_READ_FAULT: errno.EIO,
114 winerror.ERROR_READ_FAULT: errno.EIO,
115 winerror.ERROR_SEEK: errno.EIO,
115 winerror.ERROR_SEEK: errno.EIO,
116 winerror.ERROR_SEEK_ON_DEVICE: errno.ESPIPE,
116 winerror.ERROR_SEEK_ON_DEVICE: errno.ESPIPE,
117 winerror.ERROR_SHARING_BUFFER_EXCEEDED: errno.ENFILE,
117 winerror.ERROR_SHARING_BUFFER_EXCEEDED: errno.ENFILE,
118 winerror.ERROR_SHARING_VIOLATION: errno.EACCES,
118 winerror.ERROR_SHARING_VIOLATION: errno.EACCES,
119 winerror.ERROR_STACK_OVERFLOW: errno.ENOMEM,
119 winerror.ERROR_STACK_OVERFLOW: errno.ENOMEM,
120 winerror.ERROR_SWAPERROR: errno.ENOENT,
120 winerror.ERROR_SWAPERROR: errno.ENOENT,
121 winerror.ERROR_TOO_MANY_MODULES: errno.EMFILE,
121 winerror.ERROR_TOO_MANY_MODULES: errno.EMFILE,
122 winerror.ERROR_TOO_MANY_OPEN_FILES: errno.EMFILE,
122 winerror.ERROR_TOO_MANY_OPEN_FILES: errno.EMFILE,
123 winerror.ERROR_UNRECOGNIZED_MEDIA: errno.ENXIO,
123 winerror.ERROR_UNRECOGNIZED_MEDIA: errno.ENXIO,
124 winerror.ERROR_UNRECOGNIZED_VOLUME: errno.ENODEV,
124 winerror.ERROR_UNRECOGNIZED_VOLUME: errno.ENODEV,
125 winerror.ERROR_WAIT_NO_CHILDREN: errno.ECHILD,
125 winerror.ERROR_WAIT_NO_CHILDREN: errno.ECHILD,
126 winerror.ERROR_WRITE_FAULT: errno.EIO,
126 winerror.ERROR_WRITE_FAULT: errno.EIO,
127 winerror.ERROR_WRITE_PROTECT: errno.EROFS,
127 winerror.ERROR_WRITE_PROTECT: errno.EROFS,
128 }
128 }
129
129
130 def __init__(self, err):
130 def __init__(self, err):
131 self.win_errno, self.win_function, self.win_strerror = err
131 self.win_errno, self.win_function, self.win_strerror = err
132 if self.win_strerror.endswith('.'):
132 if self.win_strerror.endswith('.'):
133 self.win_strerror = self.win_strerror[:-1]
133 self.win_strerror = self.win_strerror[:-1]
134
134
135 class WinIOError(WinError, IOError):
135 class WinIOError(WinError, IOError):
136 def __init__(self, err, filename=None):
136 def __init__(self, err, filename=None):
137 WinError.__init__(self, err)
137 WinError.__init__(self, err)
138 IOError.__init__(self, self.winerror_map.get(self.win_errno, 0),
138 IOError.__init__(self, self.winerror_map.get(self.win_errno, 0),
139 self.win_strerror)
139 self.win_strerror)
140 self.filename = filename
140 self.filename = filename
141
141
142 class WinOSError(WinError, OSError):
142 class WinOSError(WinError, OSError):
143 def __init__(self, err):
143 def __init__(self, err):
144 WinError.__init__(self, err)
144 WinError.__init__(self, err)
145 OSError.__init__(self, self.winerror_map.get(self.win_errno, 0),
145 OSError.__init__(self, self.winerror_map.get(self.win_errno, 0),
146 self.win_strerror)
146 self.win_strerror)
147
147
148 def os_link(src, dst):
148 def os_link(src, dst):
149 # NB will only succeed on NTFS
149 # NB will only succeed on NTFS
150 try:
150 try:
151 win32file.CreateHardLink(dst, src)
151 win32file.CreateHardLink(dst, src)
152 except pywintypes.error, details:
152 except pywintypes.error, details:
153 raise WinOSError(details)
153 raise WinOSError(details)
154
154
155 def nlinks(pathname):
155 def nlinks(pathname):
156 """Return number of hardlinks for the given file."""
156 """Return number of hardlinks for the given file."""
157 try:
157 try:
158 fh = win32file.CreateFile(pathname,
158 fh = win32file.CreateFile(pathname,
159 win32file.GENERIC_READ, win32file.FILE_SHARE_READ,
159 win32file.GENERIC_READ, win32file.FILE_SHARE_READ,
160 None, win32file.OPEN_EXISTING, 0, None)
160 None, win32file.OPEN_EXISTING, 0, None)
161 res = win32file.GetFileInformationByHandle(fh)
161 res = win32file.GetFileInformationByHandle(fh)
162 fh.Close()
162 fh.Close()
163 return res[7]
163 return res[7]
164 except pywintypes.error:
164 except pywintypes.error:
165 return os.lstat(pathname).st_nlink
165 return os.lstat(pathname).st_nlink
166
166
167 def testpid(pid):
167 def testpid(pid):
168 '''return True if pid is still running or unable to
168 '''return True if pid is still running or unable to
169 determine, False otherwise'''
169 determine, False otherwise'''
170 try:
170 try:
171 handle = win32api.OpenProcess(
171 handle = win32api.OpenProcess(
172 win32con.PROCESS_QUERY_INFORMATION, False, pid)
172 win32con.PROCESS_QUERY_INFORMATION, False, pid)
173 if handle:
173 if handle:
174 status = win32process.GetExitCodeProcess(handle)
174 status = win32process.GetExitCodeProcess(handle)
175 return status == win32con.STILL_ACTIVE
175 return status == win32con.STILL_ACTIVE
176 except pywintypes.error, details:
176 except pywintypes.error, details:
177 return details[0] != winerror.ERROR_INVALID_PARAMETER
177 return details[0] != winerror.ERROR_INVALID_PARAMETER
178 return True
178 return True
179
179
180 def system_rcpath_win32():
180 def system_rcpath_win32():
181 '''return default os-specific hgrc search path'''
181 '''return default os-specific hgrc search path'''
182 proc = win32api.GetCurrentProcess()
182 proc = win32api.GetCurrentProcess()
183 try:
183 try:
184 # This will fail on windows < NT
184 # This will fail on windows < NT
185 filename = win32process.GetModuleFileNameEx(proc, 0)
185 filename = win32process.GetModuleFileNameEx(proc, 0)
186 except:
186 except:
187 filename = win32api.GetModuleFileName(0)
187 filename = win32api.GetModuleFileName(0)
188 return [os.path.join(os.path.dirname(filename), 'mercurial.ini')]
188 return [os.path.join(os.path.dirname(filename), 'mercurial.ini')]
189
189
190 def user_rcpath_win32():
190 def user_rcpath_win32():
191 '''return os-specific hgrc search path to the user dir'''
191 '''return os-specific hgrc search path to the user dir'''
192 userdir = os.path.expanduser('~')
192 userdir = os.path.expanduser('~')
193 if userdir == '~':
193 if userdir == '~':
194 # We are on win < nt: fetch the APPDATA directory location and use
194 # We are on win < nt: fetch the APPDATA directory location and use
195 # the parent directory as the user home dir.
195 # the parent directory as the user home dir.
196 appdir = shell.SHGetPathFromIDList(
196 appdir = shell.SHGetPathFromIDList(
197 shell.SHGetSpecialFolderLocation(0, shellcon.CSIDL_APPDATA))
197 shell.SHGetSpecialFolderLocation(0, shellcon.CSIDL_APPDATA))
198 userdir = os.path.dirname(appdir)
198 userdir = os.path.dirname(appdir)
199 return os.path.join(userdir, 'mercurial.ini')
199 return os.path.join(userdir, 'mercurial.ini')
200
200
201 class posixfile_nt(object):
201 class posixfile_nt(object):
202 '''file object with posix-like semantics. on windows, normal
202 '''file object with posix-like semantics. on windows, normal
203 files can not be deleted or renamed if they are open. must open
203 files can not be deleted or renamed if they are open. must open
204 with win32file.FILE_SHARE_DELETE. this flag does not exist on
204 with win32file.FILE_SHARE_DELETE. this flag does not exist on
205 windows < nt, so do not use this class there.'''
205 windows < nt, so do not use this class there.'''
206
206
207 # tried to use win32file._open_osfhandle to pass fd to os.fdopen,
207 # tried to use win32file._open_osfhandle to pass fd to os.fdopen,
208 # but does not work at all. wrap win32 file api instead.
208 # but does not work at all. wrap win32 file api instead.
209
209
210 def __init__(self, name, mode='rb'):
210 def __init__(self, name, mode='rb'):
211 access = 0
211 access = 0
212 if 'r' in mode or '+' in mode:
212 if 'r' in mode:
213 access |= win32file.GENERIC_READ
213 access |= win32file.GENERIC_READ
214 if 'w' in mode or 'a' in mode:
214 if 'w' in mode or 'a' in mode or '+' in mode:
215 access |= win32file.GENERIC_WRITE
215 access |= win32file.GENERIC_WRITE
216 if 'r' in mode:
216 if 'r' in mode:
217 creation = win32file.OPEN_EXISTING
217 creation = win32file.OPEN_EXISTING
218 elif 'a' in mode:
218 elif 'a' in mode:
219 creation = win32file.OPEN_ALWAYS
219 creation = win32file.OPEN_ALWAYS
220 else:
220 else:
221 creation = win32file.CREATE_ALWAYS
221 creation = win32file.CREATE_ALWAYS
222 try:
222 try:
223 self.handle = win32file.CreateFile(name,
223 self.handle = win32file.CreateFile(name,
224 access,
224 access,
225 win32file.FILE_SHARE_READ |
225 win32file.FILE_SHARE_READ |
226 win32file.FILE_SHARE_WRITE |
226 win32file.FILE_SHARE_WRITE |
227 win32file.FILE_SHARE_DELETE,
227 win32file.FILE_SHARE_DELETE,
228 None,
228 None,
229 creation,
229 creation,
230 win32file.FILE_ATTRIBUTE_NORMAL,
230 win32file.FILE_ATTRIBUTE_NORMAL,
231 0)
231 0)
232 except pywintypes.error, err:
232 except pywintypes.error, err:
233 raise WinIOError(err, name)
233 raise WinIOError(err, name)
234 self.closed = False
234 self.closed = False
235 self.name = name
235 self.name = name
236 self.mode = mode
236 self.mode = mode
237
237
238 def __iter__(self):
238 def __iter__(self):
239 for line in self.read().splitlines(True):
239 for line in self.read().splitlines(True):
240 yield line
240 yield line
241
241
242 def read(self, count=-1):
242 def read(self, count=-1):
243 try:
243 try:
244 cs = cStringIO.StringIO()
244 cs = cStringIO.StringIO()
245 while count:
245 while count:
246 wincount = int(count)
246 wincount = int(count)
247 if wincount == -1:
247 if wincount == -1:
248 wincount = 1048576
248 wincount = 1048576
249 val, data = win32file.ReadFile(self.handle, wincount)
249 val, data = win32file.ReadFile(self.handle, wincount)
250 if not data: break
250 if not data: break
251 cs.write(data)
251 cs.write(data)
252 if count != -1:
252 if count != -1:
253 count -= len(data)
253 count -= len(data)
254 return cs.getvalue()
254 return cs.getvalue()
255 except pywintypes.error, err:
255 except pywintypes.error, err:
256 raise WinIOError(err)
256 raise WinIOError(err)
257
257
258 def write(self, data):
258 def write(self, data):
259 try:
259 try:
260 if 'a' in self.mode:
260 if 'a' in self.mode:
261 win32file.SetFilePointer(self.handle, 0, win32file.FILE_END)
261 win32file.SetFilePointer(self.handle, 0, win32file.FILE_END)
262 nwrit = 0
262 nwrit = 0
263 while nwrit < len(data):
263 while nwrit < len(data):
264 val, nwrit = win32file.WriteFile(self.handle, data)
264 val, nwrit = win32file.WriteFile(self.handle, data)
265 data = data[nwrit:]
265 data = data[nwrit:]
266 except pywintypes.error, err:
266 except pywintypes.error, err:
267 raise WinIOError(err)
267 raise WinIOError(err)
268
268
269 def seek(self, pos, whence=0):
269 def seek(self, pos, whence=0):
270 try:
270 try:
271 win32file.SetFilePointer(self.handle, int(pos), whence)
271 win32file.SetFilePointer(self.handle, int(pos), whence)
272 except pywintypes.error, err:
272 except pywintypes.error, err:
273 raise WinIOError(err)
273 raise WinIOError(err)
274
274
275 def tell(self):
275 def tell(self):
276 try:
276 try:
277 return win32file.SetFilePointer(self.handle, 0,
277 return win32file.SetFilePointer(self.handle, 0,
278 win32file.FILE_CURRENT)
278 win32file.FILE_CURRENT)
279 except pywintypes.error, err:
279 except pywintypes.error, err:
280 raise WinIOError(err)
280 raise WinIOError(err)
281
281
282 def close(self):
282 def close(self):
283 if not self.closed:
283 if not self.closed:
284 self.handle = None
284 self.handle = None
285 self.closed = True
285 self.closed = True
286
286
287 def flush(self):
287 def flush(self):
288 try:
288 try:
289 win32file.FlushFileBuffers(self.handle)
289 win32file.FlushFileBuffers(self.handle)
290 except pywintypes.error, err:
290 except pywintypes.error, err:
291 raise WinIOError(err)
291 raise WinIOError(err)
292
292
293 def truncate(self, pos=0):
293 def truncate(self, pos=0):
294 try:
294 try:
295 win32file.SetFilePointer(self.handle, int(pos),
295 win32file.SetFilePointer(self.handle, int(pos),
296 win32file.FILE_BEGIN)
296 win32file.FILE_BEGIN)
297 win32file.SetEndOfFile(self.handle)
297 win32file.SetEndOfFile(self.handle)
298 except pywintypes.error, err:
298 except pywintypes.error, err:
299 raise WinIOError(err)
299 raise WinIOError(err)
300
300
301 getuser_fallback = win32api.GetUserName
301 getuser_fallback = win32api.GetUserName
302
302
303 def set_signal_handler_win32():
303 def set_signal_handler_win32():
304 """Register a termination handler for console events including
304 """Register a termination handler for console events including
305 CTRL+C. python signal handlers do not work well with socket
305 CTRL+C. python signal handlers do not work well with socket
306 operations.
306 operations.
307 """
307 """
308 def handler(event):
308 def handler(event):
309 win32process.ExitProcess(1)
309 win32process.ExitProcess(1)
310 win32api.SetConsoleCtrlHandler(handler)
310 win32api.SetConsoleCtrlHandler(handler)
@@ -1,66 +1,81 b''
1 #!/bin/sh
1 #!/bin/sh
2 # Test basic extension support
2 # Test basic extension support
3
3
4 cat > foobar.py <<EOF
4 cat > foobar.py <<EOF
5 import os
5 import os
6 from mercurial import commands
6 from mercurial import commands
7
7
8 def uisetup(ui):
8 def uisetup(ui):
9 ui.write("uisetup called\\n")
9 ui.write("uisetup called\\n")
10 ui.write("ui.parentui is%s None\\n" % (ui.parentui is not None
10 ui.write("ui.parentui is%s None\\n" % (ui.parentui is not None
11 and "not" or ""))
11 and "not" or ""))
12
12
13 def reposetup(ui, repo):
13 def reposetup(ui, repo):
14 ui.write("reposetup called for %s\\n" % os.path.basename(repo.root))
14 ui.write("reposetup called for %s\\n" % os.path.basename(repo.root))
15 ui.write("ui %s= repo.ui\\n" % (ui == repo.ui and "=" or "!"))
15 ui.write("ui %s= repo.ui\\n" % (ui == repo.ui and "=" or "!"))
16
16
17 def foo(ui, *args, **kwargs):
17 def foo(ui, *args, **kwargs):
18 ui.write("Foo\\n")
18 ui.write("Foo\\n")
19
19
20 def bar(ui, *args, **kwargs):
20 def bar(ui, *args, **kwargs):
21 ui.write("Bar\\n")
21 ui.write("Bar\\n")
22
22
23 cmdtable = {
23 cmdtable = {
24 "foo": (foo, [], "hg foo"),
24 "foo": (foo, [], "hg foo"),
25 "bar": (bar, [], "hg bar"),
25 "bar": (bar, [], "hg bar"),
26 }
26 }
27
27
28 commands.norepo += ' bar'
28 commands.norepo += ' bar'
29 EOF
29 EOF
30 abspath=`pwd`/foobar.py
30 abspath=`pwd`/foobar.py
31
31
32 mkdir barfoo
32 mkdir barfoo
33 cp foobar.py barfoo/__init__.py
33 cp foobar.py barfoo/__init__.py
34 barfoopath=`pwd`/barfoo
34 barfoopath=`pwd`/barfoo
35
35
36 hg init a
36 hg init a
37 cd a
37 cd a
38 echo foo > file
38 echo foo > file
39 hg add file
39 hg add file
40 hg commit -m 'add file'
40 hg commit -m 'add file'
41
41
42 echo '[extensions]' >> $HGRCPATH
42 echo '[extensions]' >> $HGRCPATH
43 echo "foobar = $abspath" >> $HGRCPATH
43 echo "foobar = $abspath" >> $HGRCPATH
44 hg foo
44 hg foo
45
45
46 cd ..
46 cd ..
47 hg clone a b
47 hg clone a b
48
48
49 hg bar
49 hg bar
50
50
51 echo '% module/__init__.py-style'
51 echo '% module/__init__.py-style'
52 echo '[extensions]' > $HGRCPATH
52 echo '[extensions]' > $HGRCPATH
53 echo "barfoo = $barfoopath" >> $HGRCPATH
53 echo "barfoo = $barfoopath" >> $HGRCPATH
54 cd a
54 cd a
55 hg foo
55 hg foo
56
56
57 cd ..
57 cd ..
58 cat > empty.py <<EOF
58 cat > empty.py <<EOF
59 '''empty cmdtable
59 '''empty cmdtable
60 '''
60 '''
61 cmdtable = {}
61 cmdtable = {}
62 EOF
62 EOF
63 emptypath=`pwd`/empty.py
63 emptypath=`pwd`/empty.py
64 echo '[extensions]' > $HGRCPATH
64 echo '[extensions]' > $HGRCPATH
65 echo "empty = $emptypath" >> $HGRCPATH
65 echo "empty = $emptypath" >> $HGRCPATH
66 hg help empty
66 hg help empty
67
68 cat > debugextension.py <<EOF
69 '''only debugcommands
70 '''
71 def debugfoobar(ui, repo, *args, **opts):
72 "yet another debug command"
73 pass
74
75 cmdtable = {"debugfoobar": (debugfoobar, (), "hg debugfoobar")}
76 EOF
77 debugpath=`pwd`/debugextension.py
78 echo '[extensions]' > $HGRCPATH
79 echo "debugextension = $debugpath" >> $HGRCPATH
80 hg help debugextension
81 hg --debug help debugextension
@@ -1,24 +1,51 b''
1 uisetup called
1 uisetup called
2 ui.parentui isnot None
2 ui.parentui isnot None
3 reposetup called for a
3 reposetup called for a
4 ui == repo.ui
4 ui == repo.ui
5 Foo
5 Foo
6 uisetup called
6 uisetup called
7 ui.parentui is None
7 ui.parentui is None
8 reposetup called for a
8 reposetup called for a
9 ui == repo.ui
9 ui == repo.ui
10 reposetup called for b
10 reposetup called for b
11 ui == repo.ui
11 ui == repo.ui
12 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
12 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
13 uisetup called
13 uisetup called
14 ui.parentui is None
14 ui.parentui is None
15 Bar
15 Bar
16 % module/__init__.py-style
16 % module/__init__.py-style
17 uisetup called
17 uisetup called
18 ui.parentui isnot None
18 ui.parentui isnot None
19 reposetup called for a
19 reposetup called for a
20 ui == repo.ui
20 ui == repo.ui
21 Foo
21 Foo
22 empty extension - empty cmdtable
22 empty extension - empty cmdtable
23
23
24 no commands defined
24 no commands defined
25 debugextension extension - only debugcommands
26
27 no commands defined
28 debugextension extension - only debugcommands
29
30 list of commands:
31
32 debugfoobar:
33 yet another debug command
34
35 global options:
36 -R --repository repository root directory or symbolic path name
37 --cwd change working directory
38 -y --noninteractive do not prompt, assume 'yes' for any required answers
39 -q --quiet suppress output
40 -v --verbose enable additional output
41 --config set/override config option
42 --debug enable debugging output
43 --debugger start debugger
44 --encoding set the charset encoding (default: ascii)
45 --encodingmode set the charset encoding mode (default: strict)
46 --lsprof print improved command execution profile
47 --traceback print traceback on exception
48 --time time how long the command takes
49 --profile print command execution profile
50 --version output version information and exit
51 -h --help display help and exit
@@ -1,43 +1,47 b''
1 #!/bin/sh
1 #!/bin/sh
2
2
3 hg init
3 hg init
4 echo a > a
4 echo a > a
5 hg add a
5 hg add a
6 hg commit -m "test" -d "1000000 0"
6 hg commit -m "test" -d "1000000 0"
7 hg history
7 hg history
8 hg tag -d "1000000 0" "bleah"
8 hg tag -d "1000000 0" "bleah"
9 hg history
9 hg history
10
10
11 echo foo >> .hgtags
11 echo foo >> .hgtags
12 hg tag -d "1000000 0" "bleah2" || echo "failed"
12 hg tag -d "1000000 0" "bleah2" || echo "failed"
13 hg tag -d "1000000 0" -r 0 "bleah2" 1 || echo "failed"
13 hg tag -d "1000000 0" -r 0 "bleah2" 1 || echo "failed"
14
14
15 hg revert .hgtags
15 hg revert .hgtags
16 hg tag -d "1000000 0" -r 0 "bleah0"
16 hg tag -d "1000000 0" -r 0 "bleah0"
17 hg tag -l -d "1000000 0" "bleah1" 1
17 hg tag -l -d "1000000 0" "bleah1" 1
18
18
19 cat .hgtags
19 cat .hgtags
20 cat .hg/localtags
20 cat .hg/localtags
21
21
22 hg update 0
22 hg update 0
23 hg tag -d "1000000 0" "foobar"
23 hg tag -d "1000000 0" "foobar"
24 cat .hgtags
24 cat .hgtags
25 cat .hg/localtags
25 cat .hg/localtags
26
26
27 hg tag -l 'xx
27 hg tag -l 'xx
28 newline'
28 newline'
29 hg tag -l 'xx:xx'
29 hg tag -l 'xx:xx'
30
30
31 echo % issue 601
31 echo % issue 601
32 mv .hg/localtags .hg/ltags
32 python << EOF
33 head -1 .hg/ltags | tr -d '\n' > .hg/localtags
33 f = file('.hg/localtags'); last = f.readlines()[-1][:-1]; f.close()
34 f = file('.hg/localtags', 'w'); f.write(last); f.close()
35 EOF
34 cat .hg/localtags
36 cat .hg/localtags
35 hg tag -l localnewline
37 hg tag -l localnewline
36 cat .hg/localtags
38 cat .hg/localtags
37
39
38 mv .hgtags hgtags
40 python << EOF
39 head -1 hgtags | tr -d '\n' > .hgtags
41 f = file('.hgtags'); last = f.readlines()[-1][:-1]; f.close()
42 f = file('.hgtags', 'w'); f.write(last); f.close()
43 EOF
40 hg ci -d '1000000 0' -m'broken manual edit of .hgtags'
44 hg ci -d '1000000 0' -m'broken manual edit of .hgtags'
41 cat .hgtags
45 cat .hgtags
42 hg tag -d '1000000 0' newline
46 hg tag -d '1000000 0' newline
43 cat .hgtags
47 cat .hgtags
General Comments 0
You need to be logged in to leave comments. Login now