##// END OF EJS Templates
backout most of 4f8067c94729
Matt Mackall -
r12401:4cdaf1ad default
parent child Browse files
Show More
@@ -1,203 +1,203
1 # Perforce source for convert extension.
1 # Perforce source for convert extension.
2 #
2 #
3 # Copyright 2009, Frank Kingswood <frank@kingswood-consulting.co.uk>
3 # Copyright 2009, Frank Kingswood <frank@kingswood-consulting.co.uk>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from mercurial import util
8 from mercurial import util
9 from mercurial.i18n import _
9 from mercurial.i18n import _
10
10
11 from common import commit, converter_source, checktool, NoRepo
11 from common import commit, converter_source, checktool, NoRepo
12 import marshal
12 import marshal
13 import re
13 import re
14
14
15 def loaditer(f):
15 def loaditer(f):
16 "Yield the dictionary objects generated by p4"
16 "Yield the dictionary objects generated by p4"
17 try:
17 try:
18 while True:
18 while True:
19 d = marshal.load(f)
19 d = marshal.load(f)
20 if not d:
20 if not d:
21 break
21 break
22 yield d
22 yield d
23 except EOFError:
23 except EOFError:
24 pass
24 pass
25
25
26 class p4_source(converter_source):
26 class p4_source(converter_source):
27 def __init__(self, ui, path, rev=None):
27 def __init__(self, ui, path, rev=None):
28 super(p4_source, self).__init__(ui, path, rev=rev)
28 super(p4_source, self).__init__(ui, path, rev=rev)
29
29
30 if "/" in path and not path.startswith('//'):
30 if "/" in path and not path.startswith('//'):
31 raise NoRepo(_('%s does not look like a P4 repository') % path)
31 raise NoRepo(_('%s does not look like a P4 repository') % path)
32
32
33 checktool('p4', abort=False)
33 checktool('p4', abort=False)
34
34
35 self.p4changes = {}
35 self.p4changes = {}
36 self.heads = {}
36 self.heads = {}
37 self.changeset = {}
37 self.changeset = {}
38 self.files = {}
38 self.files = {}
39 self.tags = {}
39 self.tags = {}
40 self.lastbranch = {}
40 self.lastbranch = {}
41 self.parent = {}
41 self.parent = {}
42 self.encoding = "latin_1"
42 self.encoding = "latin_1"
43 self.depotname = {} # mapping from local name to depot name
43 self.depotname = {} # mapping from local name to depot name
44 self.re_type = re.compile(
44 self.re_type = re.compile(
45 "([a-z]+)?(text|binary|symlink|apple|resource|unicode|utf\d+)"
45 "([a-z]+)?(text|binary|symlink|apple|resource|unicode|utf\d+)"
46 "(\+\w+)?$")
46 "(\+\w+)?$")
47 self.re_keywords = re.compile(
47 self.re_keywords = re.compile(
48 r"\$(Id|Header|Date|DateTime|Change|File|Revision|Author)"
48 r"\$(Id|Header|Date|DateTime|Change|File|Revision|Author)"
49 r":[^$\n]*\$")
49 r":[^$\n]*\$")
50 self.re_keywords_old = re.compile("\$(Id|Header):[^$\n]*\$")
50 self.re_keywords_old = re.compile("\$(Id|Header):[^$\n]*\$")
51
51
52 self._parse(ui, path)
52 self._parse(ui, path)
53
53
54 def _parse_view(self, path):
54 def _parse_view(self, path):
55 "Read changes affecting the path"
55 "Read changes affecting the path"
56 cmd = 'p4 -G changes -s submitted %s' % util.shellquote(path)
56 cmd = 'p4 -G changes -s submitted %s' % util.shellquote(path)
57 stdout = util.popen(cmd, mode='rb')
57 stdout = util.popen(cmd, mode='rb')
58 for d in loaditer(stdout):
58 for d in loaditer(stdout):
59 c = d.get("change", None)
59 c = d.get("change", None)
60 if c:
60 if c:
61 self.p4changes[c] = True
61 self.p4changes[c] = True
62
62
63 def _parse(self, ui, path):
63 def _parse(self, ui, path):
64 "Prepare list of P4 filenames and revisions to import"
64 "Prepare list of P4 filenames and revisions to import"
65 ui.status(_('reading p4 views\n'))
65 ui.status(_('reading p4 views\n'))
66
66
67 # read client spec or view
67 # read client spec or view
68 if "/" in path:
68 if "/" in path:
69 self._parse_view(path)
69 self._parse_view(path)
70 if path.startswith("//") and path.endswith("/..."):
70 if path.startswith("//") and path.endswith("/..."):
71 views = {path[:-3]:""}
71 views = {path[:-3]:""}
72 else:
72 else:
73 views = {"//": ""}
73 views = {"//": ""}
74 else:
74 else:
75 cmd = 'p4 -G client -o %s' % util.shellquote(path)
75 cmd = 'p4 -G client -o %s' % util.shellquote(path)
76 clientspec = marshal.load(util.popen(cmd, mode='rb'))
76 clientspec = marshal.load(util.popen(cmd, mode='rb'))
77
77
78 views = {}
78 views = {}
79 for client in clientspec:
79 for client in clientspec:
80 if client.startswith("View"):
80 if client.startswith("View"):
81 sview, cview = clientspec[client].split()
81 sview, cview = clientspec[client].split()
82 self._parse_view(sview)
82 self._parse_view(sview)
83 if sview.endswith("...") and cview.endswith("..."):
83 if sview.endswith("...") and cview.endswith("..."):
84 sview = sview[:-3]
84 sview = sview[:-3]
85 cview = cview[:-3]
85 cview = cview[:-3]
86 cview = cview[2:]
86 cview = cview[2:]
87 cview = cview[cview.find("/") + 1:]
87 cview = cview[cview.find("/") + 1:]
88 views[sview] = cview
88 views[sview] = cview
89
89
90 # list of changes that affect our source files
90 # list of changes that affect our source files
91 self.p4changes = self.p4changes.keys()
91 self.p4changes = self.p4changes.keys()
92 self.p4changes.sort(key=int)
92 self.p4changes.sort(key=int)
93
93
94 # list with depot pathnames, longest first
94 # list with depot pathnames, longest first
95 vieworder = views.keys()
95 vieworder = views.keys()
96 vieworder.sort(key=len, reverse=True)
96 vieworder.sort(key=len, reverse=True)
97
97
98 # handle revision limiting
98 # handle revision limiting
99 startrev = self.ui.config('convert', 'p4.startrev', default=0)
99 startrev = self.ui.config('convert', 'p4.startrev', default=0)
100 self.p4changes = [x for x in self.p4changes
100 self.p4changes = [x for x in self.p4changes
101 if ((not startrev or int(x) >= int(startrev)) and
101 if ((not startrev or int(x) >= int(startrev)) and
102 (not self.rev or int(x) <= int(self.rev)))]
102 (not self.rev or int(x) <= int(self.rev)))]
103
103
104 # now read the full changelists to get the list of file revisions
104 # now read the full changelists to get the list of file revisions
105 ui.status(_('collecting p4 changelists\n'))
105 ui.status(_('collecting p4 changelists\n'))
106 lastid = None
106 lastid = None
107 for change in self.p4changes:
107 for change in self.p4changes:
108 cmd = "p4 -G describe %s" % change
108 cmd = "p4 -G describe %s" % change
109 stdout = util.popen(cmd, mode='rb')
109 stdout = util.popen(cmd, mode='rb')
110 d = marshal.load(stdout)
110 d = marshal.load(stdout)
111
111
112 desc = self.recode(d["desc"])
112 desc = self.recode(d["desc"])
113 shortdesc = desc.split("\n", 1)[0]
113 shortdesc = desc.split("\n", 1)[0]
114 t = '%s %s' % (d["change"], repr(shortdesc)[1:-1])
114 t = '%s %s' % (d["change"], repr(shortdesc)[1:-1])
115 ui.status(util.ellipsis(t, 80) + '\n')
115 ui.status(util.ellipsis(t, 80) + '\n')
116
116
117 if lastid:
117 if lastid:
118 parents = [lastid]
118 parents = [lastid]
119 else:
119 else:
120 parents = []
120 parents = []
121
121
122 date = (int(d["time"]), 0) # timezone not set
122 date = (int(d["time"]), 0) # timezone not set
123 c = commit(author=self.recode(d["user"]), date=util.datestr(date),
123 c = commit(author=self.recode(d["user"]), date=util.datestr(date),
124 parents=parents, desc=desc, branch='',
124 parents=parents, desc=desc, branch='',
125 extra={"p4": change})
125 extra={"p4": change})
126
126
127 files = []
127 files = []
128 i = 0
128 i = 0
129 while ("depotFile%d" % i) in d and ("rev%d" % i) in d:
129 while ("depotFile%d" % i) in d and ("rev%d" % i) in d:
130 oldname = d["depotFile%d" % i]
130 oldname = d["depotFile%d" % i]
131 filename = None
131 filename = None
132 for v in vieworder:
132 for v in vieworder:
133 if oldname.startswith(v):
133 if oldname.startswith(v):
134 filename = views[v] + oldname[len(v):]
134 filename = views[v] + oldname[len(v):]
135 break
135 break
136 if filename:
136 if filename:
137 files.append((filename, d["rev%d" % i]))
137 files.append((filename, d["rev%d" % i]))
138 self.depotname[filename] = oldname
138 self.depotname[filename] = oldname
139 i += 1
139 i += 1
140 self.changeset[change] = c
140 self.changeset[change] = c
141 self.files[change] = files
141 self.files[change] = files
142 lastid = change
142 lastid = change
143
143
144 if lastid:
144 if lastid:
145 self.heads = [lastid]
145 self.heads = [lastid]
146
146
147 def getheads(self):
147 def getheads(self):
148 return self.heads
148 return self.heads
149
149
150 def getfile(self, name, rev):
150 def getfile(self, name, rev):
151 cmd = 'p4 -G print %s' \
151 cmd = 'p4 -G print %s' \
152 % util.shellquote("%s#%s" % (self.depotname[name], rev))
152 % util.shellquote("%s#%s" % (self.depotname[name], rev))
153 stdout = util.popen(cmd, mode='rb')
153 stdout = util.popen(cmd, mode='rb')
154
154
155 mode = None
155 mode = None
156 contents = ""
156 contents = ""
157 keywords = None
157 keywords = None
158
158
159 for d in loaditer(stdout):
159 for d in loaditer(stdout):
160 code = d["code"]
160 code = d["code"]
161 data = d.get("data")
161 data = d.get("data")
162
162
163 if code == "error":
163 if code == "error":
164 raise IOError(d["generic"], data)
164 raise IOError(d["generic"], data)
165
165
166 elif code == "stat":
166 elif code == "stat":
167 p4type = self.re_type.match(d["type"])
167 p4type = self.re_type.match(d["type"])
168 if p4type:
168 if p4type:
169 mode = ""
169 mode = ""
170 flags = (p4type.group(1) or "") + (p4type.group(3) or "")
170 flags = (p4type.group(1) or "") + (p4type.group(3) or "")
171 if "x" in flags:
171 if "x" in flags:
172 mode = "x"
172 mode = "x"
173 if p4type.group(2) == "symlink":
173 if p4type.group(2) == "symlink":
174 mode = "l"
174 mode = "l"
175 if "ko" in flags:
175 if "ko" in flags:
176 keywords = self.re_keywords_old
176 keywords = self.re_keywords_old
177 elif "k" in flags:
177 elif "k" in flags:
178 keywords = self.re_keywords
178 keywords = self.re_keywords
179
179
180 elif code in ("text", "binary"):
180 elif code == "text" or code == "binary":
181 contents += data
181 contents += data
182
182
183 if mode is None:
183 if mode is None:
184 raise IOError(0, "bad stat")
184 raise IOError(0, "bad stat")
185
185
186 if keywords:
186 if keywords:
187 contents = keywords.sub("$\\1$", contents)
187 contents = keywords.sub("$\\1$", contents)
188 if mode == "l" and contents.endswith("\n"):
188 if mode == "l" and contents.endswith("\n"):
189 contents = contents[:-1]
189 contents = contents[:-1]
190
190
191 return contents, mode
191 return contents, mode
192
192
193 def getchanges(self, rev):
193 def getchanges(self, rev):
194 return self.files[rev], {}
194 return self.files[rev], {}
195
195
196 def getcommit(self, rev):
196 def getcommit(self, rev):
197 return self.changeset[rev]
197 return self.changeset[rev]
198
198
199 def gettags(self):
199 def gettags(self):
200 return self.tags
200 return self.tags
201
201
202 def getchangedfiles(self, rev, i):
202 def getchangedfiles(self, rev, i):
203 return sorted([x[0] for x in self.files[rev]])
203 return sorted([x[0] for x in self.files[rev]])
@@ -1,88 +1,88
1 # ancestor.py - generic DAG ancestor algorithm for mercurial
1 # ancestor.py - generic DAG ancestor algorithm for mercurial
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import heapq
8 import heapq
9
9
10 def ancestor(a, b, pfunc):
10 def ancestor(a, b, pfunc):
11 """
11 """
12 return a minimal-distance ancestor of nodes a and b, or None if there is no
12 return a minimal-distance ancestor of nodes a and b, or None if there is no
13 such ancestor. Note that there can be several ancestors with the same
13 such ancestor. Note that there can be several ancestors with the same
14 (minimal) distance, and the one returned is arbitrary.
14 (minimal) distance, and the one returned is arbitrary.
15
15
16 pfunc must return a list of parent vertices for a given vertex
16 pfunc must return a list of parent vertices for a given vertex
17 """
17 """
18
18
19 if a == b:
19 if a == b:
20 return a
20 return a
21
21
22 a, b = sorted([a, b])
22 a, b = sorted([a, b])
23
23
24 # find depth from root of all ancestors
24 # find depth from root of all ancestors
25 parentcache = {}
25 parentcache = {}
26 visit = [a, b]
26 visit = [a, b]
27 depth = {}
27 depth = {}
28 while visit:
28 while visit:
29 vertex = visit[-1]
29 vertex = visit[-1]
30 pl = pfunc(vertex)
30 pl = pfunc(vertex)
31 parentcache[vertex] = pl
31 parentcache[vertex] = pl
32 if not pl:
32 if not pl:
33 depth[vertex] = 0
33 depth[vertex] = 0
34 visit.pop()
34 visit.pop()
35 else:
35 else:
36 for p in pl:
36 for p in pl:
37 if p in (a, b): # did we find a or b as a parent?
37 if p == a or p == b: # did we find a or b as a parent?
38 return p # we're done
38 return p # we're done
39 if p not in depth:
39 if p not in depth:
40 visit.append(p)
40 visit.append(p)
41 if visit[-1] == vertex:
41 if visit[-1] == vertex:
42 depth[vertex] = min([depth[p] for p in pl]) - 1
42 depth[vertex] = min([depth[p] for p in pl]) - 1
43 visit.pop()
43 visit.pop()
44
44
45 # traverse ancestors in order of decreasing distance from root
45 # traverse ancestors in order of decreasing distance from root
46 def ancestors(vertex):
46 def ancestors(vertex):
47 h = [(depth[vertex], vertex)]
47 h = [(depth[vertex], vertex)]
48 seen = set()
48 seen = set()
49 while h:
49 while h:
50 d, n = heapq.heappop(h)
50 d, n = heapq.heappop(h)
51 if n not in seen:
51 if n not in seen:
52 seen.add(n)
52 seen.add(n)
53 yield (d, n)
53 yield (d, n)
54 for p in parentcache[n]:
54 for p in parentcache[n]:
55 heapq.heappush(h, (depth[p], p))
55 heapq.heappush(h, (depth[p], p))
56
56
57 def generations(vertex):
57 def generations(vertex):
58 sg, s = None, set()
58 sg, s = None, set()
59 for g, v in ancestors(vertex):
59 for g, v in ancestors(vertex):
60 if g != sg:
60 if g != sg:
61 if sg:
61 if sg:
62 yield sg, s
62 yield sg, s
63 sg, s = g, set((v,))
63 sg, s = g, set((v,))
64 else:
64 else:
65 s.add(v)
65 s.add(v)
66 yield sg, s
66 yield sg, s
67
67
68 x = generations(a)
68 x = generations(a)
69 y = generations(b)
69 y = generations(b)
70 gx = x.next()
70 gx = x.next()
71 gy = y.next()
71 gy = y.next()
72
72
73 # increment each ancestor list until it is closer to root than
73 # increment each ancestor list until it is closer to root than
74 # the other, or they match
74 # the other, or they match
75 try:
75 try:
76 while 1:
76 while 1:
77 if gx[0] == gy[0]:
77 if gx[0] == gy[0]:
78 for v in gx[1]:
78 for v in gx[1]:
79 if v in gy[1]:
79 if v in gy[1]:
80 return v
80 return v
81 gy = y.next()
81 gy = y.next()
82 gx = x.next()
82 gx = x.next()
83 elif gx[0] > gy[0]:
83 elif gx[0] > gy[0]:
84 gy = y.next()
84 gy = y.next()
85 else:
85 else:
86 gx = x.next()
86 gx = x.next()
87 except StopIteration:
87 except StopIteration:
88 return None
88 return None
@@ -1,680 +1,680
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid
8 from node import nullid
9 from i18n import _
9 from i18n import _
10 import util, ignore, osutil, parsers
10 import util, ignore, osutil, parsers
11 import struct, os, stat, errno
11 import struct, os, stat, errno
12 import cStringIO
12 import cStringIO
13
13
14 _format = ">cllll"
14 _format = ">cllll"
15 propertycache = util.propertycache
15 propertycache = util.propertycache
16
16
17 def _finddirs(path):
17 def _finddirs(path):
18 pos = path.rfind('/')
18 pos = path.rfind('/')
19 while pos != -1:
19 while pos != -1:
20 yield path[:pos]
20 yield path[:pos]
21 pos = path.rfind('/', 0, pos)
21 pos = path.rfind('/', 0, pos)
22
22
23 def _incdirs(dirs, path):
23 def _incdirs(dirs, path):
24 for base in _finddirs(path):
24 for base in _finddirs(path):
25 if base in dirs:
25 if base in dirs:
26 dirs[base] += 1
26 dirs[base] += 1
27 return
27 return
28 dirs[base] = 1
28 dirs[base] = 1
29
29
30 def _decdirs(dirs, path):
30 def _decdirs(dirs, path):
31 for base in _finddirs(path):
31 for base in _finddirs(path):
32 if dirs[base] > 1:
32 if dirs[base] > 1:
33 dirs[base] -= 1
33 dirs[base] -= 1
34 return
34 return
35 del dirs[base]
35 del dirs[base]
36
36
37 class dirstate(object):
37 class dirstate(object):
38
38
39 def __init__(self, opener, ui, root):
39 def __init__(self, opener, ui, root):
40 '''Create a new dirstate object.
40 '''Create a new dirstate object.
41
41
42 opener is an open()-like callable that can be used to open the
42 opener is an open()-like callable that can be used to open the
43 dirstate file; root is the root of the directory tracked by
43 dirstate file; root is the root of the directory tracked by
44 the dirstate.
44 the dirstate.
45 '''
45 '''
46 self._opener = opener
46 self._opener = opener
47 self._root = root
47 self._root = root
48 self._rootdir = os.path.join(root, '')
48 self._rootdir = os.path.join(root, '')
49 self._dirty = False
49 self._dirty = False
50 self._dirtypl = False
50 self._dirtypl = False
51 self._ui = ui
51 self._ui = ui
52
52
53 @propertycache
53 @propertycache
54 def _map(self):
54 def _map(self):
55 '''Return the dirstate contents as a map from filename to
55 '''Return the dirstate contents as a map from filename to
56 (state, mode, size, time).'''
56 (state, mode, size, time).'''
57 self._read()
57 self._read()
58 return self._map
58 return self._map
59
59
60 @propertycache
60 @propertycache
61 def _copymap(self):
61 def _copymap(self):
62 self._read()
62 self._read()
63 return self._copymap
63 return self._copymap
64
64
65 @propertycache
65 @propertycache
66 def _foldmap(self):
66 def _foldmap(self):
67 f = {}
67 f = {}
68 for name in self._map:
68 for name in self._map:
69 f[os.path.normcase(name)] = name
69 f[os.path.normcase(name)] = name
70 return f
70 return f
71
71
72 @propertycache
72 @propertycache
73 def _branch(self):
73 def _branch(self):
74 try:
74 try:
75 return self._opener("branch").read().strip() or "default"
75 return self._opener("branch").read().strip() or "default"
76 except IOError:
76 except IOError:
77 return "default"
77 return "default"
78
78
79 @propertycache
79 @propertycache
80 def _pl(self):
80 def _pl(self):
81 try:
81 try:
82 st = self._opener("dirstate").read(40)
82 st = self._opener("dirstate").read(40)
83 l = len(st)
83 l = len(st)
84 if l == 40:
84 if l == 40:
85 return st[:20], st[20:40]
85 return st[:20], st[20:40]
86 elif l > 0 and l < 40:
86 elif l > 0 and l < 40:
87 raise util.Abort(_('working directory state appears damaged!'))
87 raise util.Abort(_('working directory state appears damaged!'))
88 except IOError, err:
88 except IOError, err:
89 if err.errno != errno.ENOENT:
89 if err.errno != errno.ENOENT:
90 raise
90 raise
91 return [nullid, nullid]
91 return [nullid, nullid]
92
92
93 @propertycache
93 @propertycache
94 def _dirs(self):
94 def _dirs(self):
95 dirs = {}
95 dirs = {}
96 for f, s in self._map.iteritems():
96 for f, s in self._map.iteritems():
97 if s[0] != 'r':
97 if s[0] != 'r':
98 _incdirs(dirs, f)
98 _incdirs(dirs, f)
99 return dirs
99 return dirs
100
100
101 @propertycache
101 @propertycache
102 def _ignore(self):
102 def _ignore(self):
103 files = [self._join('.hgignore')]
103 files = [self._join('.hgignore')]
104 for name, path in self._ui.configitems("ui"):
104 for name, path in self._ui.configitems("ui"):
105 if name == 'ignore' or name.startswith('ignore.'):
105 if name == 'ignore' or name.startswith('ignore.'):
106 files.append(util.expandpath(path))
106 files.append(util.expandpath(path))
107 return ignore.ignore(self._root, files, self._ui.warn)
107 return ignore.ignore(self._root, files, self._ui.warn)
108
108
109 @propertycache
109 @propertycache
110 def _slash(self):
110 def _slash(self):
111 return self._ui.configbool('ui', 'slash') and os.sep != '/'
111 return self._ui.configbool('ui', 'slash') and os.sep != '/'
112
112
113 @propertycache
113 @propertycache
114 def _checklink(self):
114 def _checklink(self):
115 return util.checklink(self._root)
115 return util.checklink(self._root)
116
116
117 @propertycache
117 @propertycache
118 def _checkexec(self):
118 def _checkexec(self):
119 return util.checkexec(self._root)
119 return util.checkexec(self._root)
120
120
121 @propertycache
121 @propertycache
122 def _checkcase(self):
122 def _checkcase(self):
123 return not util.checkcase(self._join('.hg'))
123 return not util.checkcase(self._join('.hg'))
124
124
125 def _join(self, f):
125 def _join(self, f):
126 # much faster than os.path.join()
126 # much faster than os.path.join()
127 # it's safe because f is always a relative path
127 # it's safe because f is always a relative path
128 return self._rootdir + f
128 return self._rootdir + f
129
129
130 def flagfunc(self, fallback):
130 def flagfunc(self, fallback):
131 if self._checklink:
131 if self._checklink:
132 if self._checkexec:
132 if self._checkexec:
133 def f(x):
133 def f(x):
134 p = self._join(x)
134 p = self._join(x)
135 if os.path.islink(p):
135 if os.path.islink(p):
136 return 'l'
136 return 'l'
137 if util.is_exec(p):
137 if util.is_exec(p):
138 return 'x'
138 return 'x'
139 return ''
139 return ''
140 return f
140 return f
141 def f(x):
141 def f(x):
142 if os.path.islink(self._join(x)):
142 if os.path.islink(self._join(x)):
143 return 'l'
143 return 'l'
144 if 'x' in fallback(x):
144 if 'x' in fallback(x):
145 return 'x'
145 return 'x'
146 return ''
146 return ''
147 return f
147 return f
148 if self._checkexec:
148 if self._checkexec:
149 def f(x):
149 def f(x):
150 if 'l' in fallback(x):
150 if 'l' in fallback(x):
151 return 'l'
151 return 'l'
152 if util.is_exec(self._join(x)):
152 if util.is_exec(self._join(x)):
153 return 'x'
153 return 'x'
154 return ''
154 return ''
155 return f
155 return f
156 return fallback
156 return fallback
157
157
158 def getcwd(self):
158 def getcwd(self):
159 cwd = os.getcwd()
159 cwd = os.getcwd()
160 if cwd == self._root:
160 if cwd == self._root:
161 return ''
161 return ''
162 # self._root ends with a path separator if self._root is '/' or 'C:\'
162 # self._root ends with a path separator if self._root is '/' or 'C:\'
163 rootsep = self._root
163 rootsep = self._root
164 if not util.endswithsep(rootsep):
164 if not util.endswithsep(rootsep):
165 rootsep += os.sep
165 rootsep += os.sep
166 if cwd.startswith(rootsep):
166 if cwd.startswith(rootsep):
167 return cwd[len(rootsep):]
167 return cwd[len(rootsep):]
168 else:
168 else:
169 # we're outside the repo. return an absolute path.
169 # we're outside the repo. return an absolute path.
170 return cwd
170 return cwd
171
171
172 def pathto(self, f, cwd=None):
172 def pathto(self, f, cwd=None):
173 if cwd is None:
173 if cwd is None:
174 cwd = self.getcwd()
174 cwd = self.getcwd()
175 path = util.pathto(self._root, cwd, f)
175 path = util.pathto(self._root, cwd, f)
176 if self._slash:
176 if self._slash:
177 return util.normpath(path)
177 return util.normpath(path)
178 return path
178 return path
179
179
180 def __getitem__(self, key):
180 def __getitem__(self, key):
181 '''Return the current state of key (a filename) in the dirstate.
181 '''Return the current state of key (a filename) in the dirstate.
182
182
183 States are:
183 States are:
184 n normal
184 n normal
185 m needs merging
185 m needs merging
186 r marked for removal
186 r marked for removal
187 a marked for addition
187 a marked for addition
188 ? not tracked
188 ? not tracked
189 '''
189 '''
190 return self._map.get(key, ("?",))[0]
190 return self._map.get(key, ("?",))[0]
191
191
192 def __contains__(self, key):
192 def __contains__(self, key):
193 return key in self._map
193 return key in self._map
194
194
195 def __iter__(self):
195 def __iter__(self):
196 for x in sorted(self._map):
196 for x in sorted(self._map):
197 yield x
197 yield x
198
198
199 def parents(self):
199 def parents(self):
200 return self._pl
200 return self._pl
201
201
202 def branch(self):
202 def branch(self):
203 return self._branch
203 return self._branch
204
204
205 def setparents(self, p1, p2=nullid):
205 def setparents(self, p1, p2=nullid):
206 self._dirty = self._dirtypl = True
206 self._dirty = self._dirtypl = True
207 self._pl = p1, p2
207 self._pl = p1, p2
208
208
209 def setbranch(self, branch):
209 def setbranch(self, branch):
210 if branch in ['tip', '.', 'null']:
210 if branch in ['tip', '.', 'null']:
211 raise util.Abort(_('the name \'%s\' is reserved') % branch)
211 raise util.Abort(_('the name \'%s\' is reserved') % branch)
212 self._branch = branch
212 self._branch = branch
213 self._opener("branch", "w").write(branch + '\n')
213 self._opener("branch", "w").write(branch + '\n')
214
214
215 def _read(self):
215 def _read(self):
216 self._map = {}
216 self._map = {}
217 self._copymap = {}
217 self._copymap = {}
218 try:
218 try:
219 st = self._opener("dirstate").read()
219 st = self._opener("dirstate").read()
220 except IOError, err:
220 except IOError, err:
221 if err.errno != errno.ENOENT:
221 if err.errno != errno.ENOENT:
222 raise
222 raise
223 return
223 return
224 if not st:
224 if not st:
225 return
225 return
226
226
227 p = parsers.parse_dirstate(self._map, self._copymap, st)
227 p = parsers.parse_dirstate(self._map, self._copymap, st)
228 if not self._dirtypl:
228 if not self._dirtypl:
229 self._pl = p
229 self._pl = p
230
230
231 def invalidate(self):
231 def invalidate(self):
232 for a in "_map _copymap _foldmap _branch _pl _dirs _ignore".split():
232 for a in "_map _copymap _foldmap _branch _pl _dirs _ignore".split():
233 if a in self.__dict__:
233 if a in self.__dict__:
234 delattr(self, a)
234 delattr(self, a)
235 self._dirty = False
235 self._dirty = False
236
236
237 def copy(self, source, dest):
237 def copy(self, source, dest):
238 """Mark dest as a copy of source. Unmark dest if source is None."""
238 """Mark dest as a copy of source. Unmark dest if source is None."""
239 if source == dest:
239 if source == dest:
240 return
240 return
241 self._dirty = True
241 self._dirty = True
242 if source is not None:
242 if source is not None:
243 self._copymap[dest] = source
243 self._copymap[dest] = source
244 elif dest in self._copymap:
244 elif dest in self._copymap:
245 del self._copymap[dest]
245 del self._copymap[dest]
246
246
247 def copied(self, file):
247 def copied(self, file):
248 return self._copymap.get(file, None)
248 return self._copymap.get(file, None)
249
249
250 def copies(self):
250 def copies(self):
251 return self._copymap
251 return self._copymap
252
252
253 def _droppath(self, f):
253 def _droppath(self, f):
254 if self[f] not in "?r" and "_dirs" in self.__dict__:
254 if self[f] not in "?r" and "_dirs" in self.__dict__:
255 _decdirs(self._dirs, f)
255 _decdirs(self._dirs, f)
256
256
257 def _addpath(self, f, check=False):
257 def _addpath(self, f, check=False):
258 oldstate = self[f]
258 oldstate = self[f]
259 if check or oldstate == "r":
259 if check or oldstate == "r":
260 if '\r' in f or '\n' in f:
260 if '\r' in f or '\n' in f:
261 raise util.Abort(
261 raise util.Abort(
262 _("'\\n' and '\\r' disallowed in filenames: %r") % f)
262 _("'\\n' and '\\r' disallowed in filenames: %r") % f)
263 if f in self._dirs:
263 if f in self._dirs:
264 raise util.Abort(_('directory %r already in dirstate') % f)
264 raise util.Abort(_('directory %r already in dirstate') % f)
265 # shadows
265 # shadows
266 for d in _finddirs(f):
266 for d in _finddirs(f):
267 if d in self._dirs:
267 if d in self._dirs:
268 break
268 break
269 if d in self._map and self[d] != 'r':
269 if d in self._map and self[d] != 'r':
270 raise util.Abort(
270 raise util.Abort(
271 _('file %r in dirstate clashes with %r') % (d, f))
271 _('file %r in dirstate clashes with %r') % (d, f))
272 if oldstate in "?r" and "_dirs" in self.__dict__:
272 if oldstate in "?r" and "_dirs" in self.__dict__:
273 _incdirs(self._dirs, f)
273 _incdirs(self._dirs, f)
274
274
275 def normal(self, f):
275 def normal(self, f):
276 '''Mark a file normal and clean.'''
276 '''Mark a file normal and clean.'''
277 self._dirty = True
277 self._dirty = True
278 self._addpath(f)
278 self._addpath(f)
279 s = os.lstat(self._join(f))
279 s = os.lstat(self._join(f))
280 self._map[f] = ('n', s.st_mode, s.st_size, int(s.st_mtime))
280 self._map[f] = ('n', s.st_mode, s.st_size, int(s.st_mtime))
281 if f in self._copymap:
281 if f in self._copymap:
282 del self._copymap[f]
282 del self._copymap[f]
283
283
284 def normallookup(self, f):
284 def normallookup(self, f):
285 '''Mark a file normal, but possibly dirty.'''
285 '''Mark a file normal, but possibly dirty.'''
286 if self._pl[1] != nullid and f in self._map:
286 if self._pl[1] != nullid and f in self._map:
287 # if there is a merge going on and the file was either
287 # if there is a merge going on and the file was either
288 # in state 'm' (-1) or coming from other parent (-2) before
288 # in state 'm' (-1) or coming from other parent (-2) before
289 # being removed, restore that state.
289 # being removed, restore that state.
290 entry = self._map[f]
290 entry = self._map[f]
291 if entry[0] == 'r' and entry[2] in (-1, -2):
291 if entry[0] == 'r' and entry[2] in (-1, -2):
292 source = self._copymap.get(f)
292 source = self._copymap.get(f)
293 if entry[2] == -1:
293 if entry[2] == -1:
294 self.merge(f)
294 self.merge(f)
295 elif entry[2] == -2:
295 elif entry[2] == -2:
296 self.otherparent(f)
296 self.otherparent(f)
297 if source:
297 if source:
298 self.copy(source, f)
298 self.copy(source, f)
299 return
299 return
300 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
300 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
301 return
301 return
302 self._dirty = True
302 self._dirty = True
303 self._addpath(f)
303 self._addpath(f)
304 self._map[f] = ('n', 0, -1, -1)
304 self._map[f] = ('n', 0, -1, -1)
305 if f in self._copymap:
305 if f in self._copymap:
306 del self._copymap[f]
306 del self._copymap[f]
307
307
308 def otherparent(self, f):
308 def otherparent(self, f):
309 '''Mark as coming from the other parent, always dirty.'''
309 '''Mark as coming from the other parent, always dirty.'''
310 if self._pl[1] == nullid:
310 if self._pl[1] == nullid:
311 raise util.Abort(_("setting %r to other parent "
311 raise util.Abort(_("setting %r to other parent "
312 "only allowed in merges") % f)
312 "only allowed in merges") % f)
313 self._dirty = True
313 self._dirty = True
314 self._addpath(f)
314 self._addpath(f)
315 self._map[f] = ('n', 0, -2, -1)
315 self._map[f] = ('n', 0, -2, -1)
316 if f in self._copymap:
316 if f in self._copymap:
317 del self._copymap[f]
317 del self._copymap[f]
318
318
319 def add(self, f):
319 def add(self, f):
320 '''Mark a file added.'''
320 '''Mark a file added.'''
321 self._dirty = True
321 self._dirty = True
322 self._addpath(f, True)
322 self._addpath(f, True)
323 self._map[f] = ('a', 0, -1, -1)
323 self._map[f] = ('a', 0, -1, -1)
324 if f in self._copymap:
324 if f in self._copymap:
325 del self._copymap[f]
325 del self._copymap[f]
326
326
327 def remove(self, f):
327 def remove(self, f):
328 '''Mark a file removed.'''
328 '''Mark a file removed.'''
329 self._dirty = True
329 self._dirty = True
330 self._droppath(f)
330 self._droppath(f)
331 size = 0
331 size = 0
332 if self._pl[1] != nullid and f in self._map:
332 if self._pl[1] != nullid and f in self._map:
333 # backup the previous state
333 # backup the previous state
334 entry = self._map[f]
334 entry = self._map[f]
335 if entry[0] == 'm': # merge
335 if entry[0] == 'm': # merge
336 size = -1
336 size = -1
337 elif entry[0] == 'n' and entry[2] == -2: # other parent
337 elif entry[0] == 'n' and entry[2] == -2: # other parent
338 size = -2
338 size = -2
339 self._map[f] = ('r', 0, size, 0)
339 self._map[f] = ('r', 0, size, 0)
340 if size == 0 and f in self._copymap:
340 if size == 0 and f in self._copymap:
341 del self._copymap[f]
341 del self._copymap[f]
342
342
343 def merge(self, f):
343 def merge(self, f):
344 '''Mark a file merged.'''
344 '''Mark a file merged.'''
345 self._dirty = True
345 self._dirty = True
346 s = os.lstat(self._join(f))
346 s = os.lstat(self._join(f))
347 self._addpath(f)
347 self._addpath(f)
348 self._map[f] = ('m', s.st_mode, s.st_size, int(s.st_mtime))
348 self._map[f] = ('m', s.st_mode, s.st_size, int(s.st_mtime))
349 if f in self._copymap:
349 if f in self._copymap:
350 del self._copymap[f]
350 del self._copymap[f]
351
351
352 def forget(self, f):
352 def forget(self, f):
353 '''Forget a file.'''
353 '''Forget a file.'''
354 self._dirty = True
354 self._dirty = True
355 try:
355 try:
356 self._droppath(f)
356 self._droppath(f)
357 del self._map[f]
357 del self._map[f]
358 except KeyError:
358 except KeyError:
359 self._ui.warn(_("not in dirstate: %s\n") % f)
359 self._ui.warn(_("not in dirstate: %s\n") % f)
360
360
361 def _normalize(self, path, knownpath):
361 def _normalize(self, path, knownpath):
362 norm_path = os.path.normcase(path)
362 norm_path = os.path.normcase(path)
363 fold_path = self._foldmap.get(norm_path, None)
363 fold_path = self._foldmap.get(norm_path, None)
364 if fold_path is None:
364 if fold_path is None:
365 if knownpath or not os.path.lexists(os.path.join(self._root, path)):
365 if knownpath or not os.path.lexists(os.path.join(self._root, path)):
366 fold_path = path
366 fold_path = path
367 else:
367 else:
368 fold_path = self._foldmap.setdefault(norm_path,
368 fold_path = self._foldmap.setdefault(norm_path,
369 util.fspath(path, self._root))
369 util.fspath(path, self._root))
370 return fold_path
370 return fold_path
371
371
372 def clear(self):
372 def clear(self):
373 self._map = {}
373 self._map = {}
374 if "_dirs" in self.__dict__:
374 if "_dirs" in self.__dict__:
375 delattr(self, "_dirs")
375 delattr(self, "_dirs")
376 self._copymap = {}
376 self._copymap = {}
377 self._pl = [nullid, nullid]
377 self._pl = [nullid, nullid]
378 self._dirty = True
378 self._dirty = True
379
379
380 def rebuild(self, parent, files):
380 def rebuild(self, parent, files):
381 self.clear()
381 self.clear()
382 for f in files:
382 for f in files:
383 if 'x' in files.flags(f):
383 if 'x' in files.flags(f):
384 self._map[f] = ('n', 0777, -1, 0)
384 self._map[f] = ('n', 0777, -1, 0)
385 else:
385 else:
386 self._map[f] = ('n', 0666, -1, 0)
386 self._map[f] = ('n', 0666, -1, 0)
387 self._pl = (parent, nullid)
387 self._pl = (parent, nullid)
388 self._dirty = True
388 self._dirty = True
389
389
390 def write(self):
390 def write(self):
391 if not self._dirty:
391 if not self._dirty:
392 return
392 return
393 st = self._opener("dirstate", "w", atomictemp=True)
393 st = self._opener("dirstate", "w", atomictemp=True)
394
394
395 # use the modification time of the newly created temporary file as the
395 # use the modification time of the newly created temporary file as the
396 # filesystem's notion of 'now'
396 # filesystem's notion of 'now'
397 now = int(util.fstat(st).st_mtime)
397 now = int(util.fstat(st).st_mtime)
398
398
399 cs = cStringIO.StringIO()
399 cs = cStringIO.StringIO()
400 copymap = self._copymap
400 copymap = self._copymap
401 pack = struct.pack
401 pack = struct.pack
402 write = cs.write
402 write = cs.write
403 write("".join(self._pl))
403 write("".join(self._pl))
404 for f, e in self._map.iteritems():
404 for f, e in self._map.iteritems():
405 if e[0] == 'n' and e[3] == now:
405 if e[0] == 'n' and e[3] == now:
406 # The file was last modified "simultaneously" with the current
406 # The file was last modified "simultaneously" with the current
407 # write to dirstate (i.e. within the same second for file-
407 # write to dirstate (i.e. within the same second for file-
408 # systems with a granularity of 1 sec). This commonly happens
408 # systems with a granularity of 1 sec). This commonly happens
409 # for at least a couple of files on 'update'.
409 # for at least a couple of files on 'update'.
410 # The user could change the file without changing its size
410 # The user could change the file without changing its size
411 # within the same second. Invalidate the file's stat data in
411 # within the same second. Invalidate the file's stat data in
412 # dirstate, forcing future 'status' calls to compare the
412 # dirstate, forcing future 'status' calls to compare the
413 # contents of the file. This prevents mistakenly treating such
413 # contents of the file. This prevents mistakenly treating such
414 # files as clean.
414 # files as clean.
415 e = (e[0], 0, -1, -1) # mark entry as 'unset'
415 e = (e[0], 0, -1, -1) # mark entry as 'unset'
416 self._map[f] = e
416 self._map[f] = e
417
417
418 if f in copymap:
418 if f in copymap:
419 f = "%s\0%s" % (f, copymap[f])
419 f = "%s\0%s" % (f, copymap[f])
420 e = pack(_format, e[0], e[1], e[2], e[3], len(f))
420 e = pack(_format, e[0], e[1], e[2], e[3], len(f))
421 write(e)
421 write(e)
422 write(f)
422 write(f)
423 st.write(cs.getvalue())
423 st.write(cs.getvalue())
424 st.rename()
424 st.rename()
425 self._dirty = self._dirtypl = False
425 self._dirty = self._dirtypl = False
426
426
427 def _dirignore(self, f):
427 def _dirignore(self, f):
428 if f == '.':
428 if f == '.':
429 return False
429 return False
430 if self._ignore(f):
430 if self._ignore(f):
431 return True
431 return True
432 for p in _finddirs(f):
432 for p in _finddirs(f):
433 if self._ignore(p):
433 if self._ignore(p):
434 return True
434 return True
435 return False
435 return False
436
436
437 def walk(self, match, subrepos, unknown, ignored):
437 def walk(self, match, subrepos, unknown, ignored):
438 '''
438 '''
439 Walk recursively through the directory tree, finding all files
439 Walk recursively through the directory tree, finding all files
440 matched by match.
440 matched by match.
441
441
442 Return a dict mapping filename to stat-like object (either
442 Return a dict mapping filename to stat-like object (either
443 mercurial.osutil.stat instance or return value of os.stat()).
443 mercurial.osutil.stat instance or return value of os.stat()).
444 '''
444 '''
445
445
446 def fwarn(f, msg):
446 def fwarn(f, msg):
447 self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
447 self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
448 return False
448 return False
449
449
450 def badtype(mode):
450 def badtype(mode):
451 kind = _('unknown')
451 kind = _('unknown')
452 if stat.S_ISCHR(mode):
452 if stat.S_ISCHR(mode):
453 kind = _('character device')
453 kind = _('character device')
454 elif stat.S_ISBLK(mode):
454 elif stat.S_ISBLK(mode):
455 kind = _('block device')
455 kind = _('block device')
456 elif stat.S_ISFIFO(mode):
456 elif stat.S_ISFIFO(mode):
457 kind = _('fifo')
457 kind = _('fifo')
458 elif stat.S_ISSOCK(mode):
458 elif stat.S_ISSOCK(mode):
459 kind = _('socket')
459 kind = _('socket')
460 elif stat.S_ISDIR(mode):
460 elif stat.S_ISDIR(mode):
461 kind = _('directory')
461 kind = _('directory')
462 return _('unsupported file type (type is %s)') % kind
462 return _('unsupported file type (type is %s)') % kind
463
463
464 ignore = self._ignore
464 ignore = self._ignore
465 dirignore = self._dirignore
465 dirignore = self._dirignore
466 if ignored:
466 if ignored:
467 ignore = util.never
467 ignore = util.never
468 dirignore = util.never
468 dirignore = util.never
469 elif not unknown:
469 elif not unknown:
470 # if unknown and ignored are False, skip step 2
470 # if unknown and ignored are False, skip step 2
471 ignore = util.always
471 ignore = util.always
472 dirignore = util.always
472 dirignore = util.always
473
473
474 matchfn = match.matchfn
474 matchfn = match.matchfn
475 badfn = match.bad
475 badfn = match.bad
476 dmap = self._map
476 dmap = self._map
477 normpath = util.normpath
477 normpath = util.normpath
478 listdir = osutil.listdir
478 listdir = osutil.listdir
479 lstat = os.lstat
479 lstat = os.lstat
480 getkind = stat.S_IFMT
480 getkind = stat.S_IFMT
481 dirkind = stat.S_IFDIR
481 dirkind = stat.S_IFDIR
482 regkind = stat.S_IFREG
482 regkind = stat.S_IFREG
483 lnkkind = stat.S_IFLNK
483 lnkkind = stat.S_IFLNK
484 join = self._join
484 join = self._join
485 work = []
485 work = []
486 wadd = work.append
486 wadd = work.append
487
487
488 if self._checkcase:
488 if self._checkcase:
489 normalize = self._normalize
489 normalize = self._normalize
490 else:
490 else:
491 normalize = lambda x, y: x
491 normalize = lambda x, y: x
492
492
493 exact = skipstep3 = False
493 exact = skipstep3 = False
494 if matchfn == match.exact: # match.exact
494 if matchfn == match.exact: # match.exact
495 exact = True
495 exact = True
496 dirignore = util.always # skip step 2
496 dirignore = util.always # skip step 2
497 elif match.files() and not match.anypats(): # match.match, no patterns
497 elif match.files() and not match.anypats(): # match.match, no patterns
498 skipstep3 = True
498 skipstep3 = True
499
499
500 files = sorted(match.files())
500 files = sorted(match.files())
501 subrepos.sort()
501 subrepos.sort()
502 i, j = 0, 0
502 i, j = 0, 0
503 while i < len(files) and j < len(subrepos):
503 while i < len(files) and j < len(subrepos):
504 subpath = subrepos[j] + "/"
504 subpath = subrepos[j] + "/"
505 if not files[i].startswith(subpath):
505 if not files[i].startswith(subpath):
506 i += 1
506 i += 1
507 continue
507 continue
508 while files and files[i].startswith(subpath):
508 while files and files[i].startswith(subpath):
509 del files[i]
509 del files[i]
510 j += 1
510 j += 1
511
511
512 if not files or '.' in files:
512 if not files or '.' in files:
513 files = ['']
513 files = ['']
514 results = dict.fromkeys(subrepos)
514 results = dict.fromkeys(subrepos)
515 results['.hg'] = None
515 results['.hg'] = None
516
516
517 # step 1: find all explicit files
517 # step 1: find all explicit files
518 for ff in files:
518 for ff in files:
519 nf = normalize(normpath(ff), False)
519 nf = normalize(normpath(ff), False)
520 if nf in results:
520 if nf in results:
521 continue
521 continue
522
522
523 try:
523 try:
524 st = lstat(join(nf))
524 st = lstat(join(nf))
525 kind = getkind(st.st_mode)
525 kind = getkind(st.st_mode)
526 if kind == dirkind:
526 if kind == dirkind:
527 skipstep3 = False
527 skipstep3 = False
528 if nf in dmap:
528 if nf in dmap:
529 #file deleted on disk but still in dirstate
529 #file deleted on disk but still in dirstate
530 results[nf] = None
530 results[nf] = None
531 match.dir(nf)
531 match.dir(nf)
532 if not dirignore(nf):
532 if not dirignore(nf):
533 wadd(nf)
533 wadd(nf)
534 elif kind in (regkind, lnkkind):
534 elif kind == regkind or kind == lnkkind:
535 results[nf] = st
535 results[nf] = st
536 else:
536 else:
537 badfn(ff, badtype(kind))
537 badfn(ff, badtype(kind))
538 if nf in dmap:
538 if nf in dmap:
539 results[nf] = None
539 results[nf] = None
540 except OSError, inst:
540 except OSError, inst:
541 if nf in dmap: # does it exactly match a file?
541 if nf in dmap: # does it exactly match a file?
542 results[nf] = None
542 results[nf] = None
543 else: # does it match a directory?
543 else: # does it match a directory?
544 prefix = nf + "/"
544 prefix = nf + "/"
545 for fn in dmap:
545 for fn in dmap:
546 if fn.startswith(prefix):
546 if fn.startswith(prefix):
547 match.dir(nf)
547 match.dir(nf)
548 skipstep3 = False
548 skipstep3 = False
549 break
549 break
550 else:
550 else:
551 badfn(ff, inst.strerror)
551 badfn(ff, inst.strerror)
552
552
553 # step 2: visit subdirectories
553 # step 2: visit subdirectories
554 while work:
554 while work:
555 nd = work.pop()
555 nd = work.pop()
556 skip = None
556 skip = None
557 if nd == '.':
557 if nd == '.':
558 nd = ''
558 nd = ''
559 else:
559 else:
560 skip = '.hg'
560 skip = '.hg'
561 try:
561 try:
562 entries = listdir(join(nd), stat=True, skip=skip)
562 entries = listdir(join(nd), stat=True, skip=skip)
563 except OSError, inst:
563 except OSError, inst:
564 if inst.errno == errno.EACCES:
564 if inst.errno == errno.EACCES:
565 fwarn(nd, inst.strerror)
565 fwarn(nd, inst.strerror)
566 continue
566 continue
567 raise
567 raise
568 for f, kind, st in entries:
568 for f, kind, st in entries:
569 nf = normalize(nd and (nd + "/" + f) or f, True)
569 nf = normalize(nd and (nd + "/" + f) or f, True)
570 if nf not in results:
570 if nf not in results:
571 if kind == dirkind:
571 if kind == dirkind:
572 if not ignore(nf):
572 if not ignore(nf):
573 match.dir(nf)
573 match.dir(nf)
574 wadd(nf)
574 wadd(nf)
575 if nf in dmap and matchfn(nf):
575 if nf in dmap and matchfn(nf):
576 results[nf] = None
576 results[nf] = None
577 elif kind == regkind or kind == lnkkind:
577 elif kind == regkind or kind == lnkkind:
578 if nf in dmap:
578 if nf in dmap:
579 if matchfn(nf):
579 if matchfn(nf):
580 results[nf] = st
580 results[nf] = st
581 elif matchfn(nf) and not ignore(nf):
581 elif matchfn(nf) and not ignore(nf):
582 results[nf] = st
582 results[nf] = st
583 elif nf in dmap and matchfn(nf):
583 elif nf in dmap and matchfn(nf):
584 results[nf] = None
584 results[nf] = None
585
585
586 # step 3: report unseen items in the dmap hash
586 # step 3: report unseen items in the dmap hash
587 if not skipstep3 and not exact:
587 if not skipstep3 and not exact:
588 visit = sorted([f for f in dmap if f not in results and matchfn(f)])
588 visit = sorted([f for f in dmap if f not in results and matchfn(f)])
589 for nf, st in zip(visit, util.statfiles([join(i) for i in visit])):
589 for nf, st in zip(visit, util.statfiles([join(i) for i in visit])):
590 if not st is None and not getkind(st.st_mode) in (regkind, lnkkind):
590 if not st is None and not getkind(st.st_mode) in (regkind, lnkkind):
591 st = None
591 st = None
592 results[nf] = st
592 results[nf] = st
593 for s in subrepos:
593 for s in subrepos:
594 del results[s]
594 del results[s]
595 del results['.hg']
595 del results['.hg']
596 return results
596 return results
597
597
598 def status(self, match, subrepos, ignored, clean, unknown):
598 def status(self, match, subrepos, ignored, clean, unknown):
599 '''Determine the status of the working copy relative to the
599 '''Determine the status of the working copy relative to the
600 dirstate and return a tuple of lists (unsure, modified, added,
600 dirstate and return a tuple of lists (unsure, modified, added,
601 removed, deleted, unknown, ignored, clean), where:
601 removed, deleted, unknown, ignored, clean), where:
602
602
603 unsure:
603 unsure:
604 files that might have been modified since the dirstate was
604 files that might have been modified since the dirstate was
605 written, but need to be read to be sure (size is the same
605 written, but need to be read to be sure (size is the same
606 but mtime differs)
606 but mtime differs)
607 modified:
607 modified:
608 files that have definitely been modified since the dirstate
608 files that have definitely been modified since the dirstate
609 was written (different size or mode)
609 was written (different size or mode)
610 added:
610 added:
611 files that have been explicitly added with hg add
611 files that have been explicitly added with hg add
612 removed:
612 removed:
613 files that have been explicitly removed with hg remove
613 files that have been explicitly removed with hg remove
614 deleted:
614 deleted:
615 files that have been deleted through other means ("missing")
615 files that have been deleted through other means ("missing")
616 unknown:
616 unknown:
617 files not in the dirstate that are not ignored
617 files not in the dirstate that are not ignored
618 ignored:
618 ignored:
619 files not in the dirstate that are ignored
619 files not in the dirstate that are ignored
620 (by _dirignore())
620 (by _dirignore())
621 clean:
621 clean:
622 files that have definitely not been modified since the
622 files that have definitely not been modified since the
623 dirstate was written
623 dirstate was written
624 '''
624 '''
625 listignored, listclean, listunknown = ignored, clean, unknown
625 listignored, listclean, listunknown = ignored, clean, unknown
626 lookup, modified, added, unknown, ignored = [], [], [], [], []
626 lookup, modified, added, unknown, ignored = [], [], [], [], []
627 removed, deleted, clean = [], [], []
627 removed, deleted, clean = [], [], []
628
628
629 dmap = self._map
629 dmap = self._map
630 ladd = lookup.append # aka "unsure"
630 ladd = lookup.append # aka "unsure"
631 madd = modified.append
631 madd = modified.append
632 aadd = added.append
632 aadd = added.append
633 uadd = unknown.append
633 uadd = unknown.append
634 iadd = ignored.append
634 iadd = ignored.append
635 radd = removed.append
635 radd = removed.append
636 dadd = deleted.append
636 dadd = deleted.append
637 cadd = clean.append
637 cadd = clean.append
638
638
639 lnkkind = stat.S_IFLNK
639 lnkkind = stat.S_IFLNK
640
640
641 for fn, st in self.walk(match, subrepos, listunknown,
641 for fn, st in self.walk(match, subrepos, listunknown,
642 listignored).iteritems():
642 listignored).iteritems():
643 if fn not in dmap:
643 if fn not in dmap:
644 if (listignored or match.exact(fn)) and self._dirignore(fn):
644 if (listignored or match.exact(fn)) and self._dirignore(fn):
645 if listignored:
645 if listignored:
646 iadd(fn)
646 iadd(fn)
647 elif listunknown:
647 elif listunknown:
648 uadd(fn)
648 uadd(fn)
649 continue
649 continue
650
650
651 state, mode, size, time = dmap[fn]
651 state, mode, size, time = dmap[fn]
652
652
653 if not st and state in "nma":
653 if not st and state in "nma":
654 dadd(fn)
654 dadd(fn)
655 elif state == 'n':
655 elif state == 'n':
656 # The "mode & lnkkind != lnkkind or self._checklink"
656 # The "mode & lnkkind != lnkkind or self._checklink"
657 # lines are an expansion of "islink => checklink"
657 # lines are an expansion of "islink => checklink"
658 # where islink means "is this a link?" and checklink
658 # where islink means "is this a link?" and checklink
659 # means "can we check links?".
659 # means "can we check links?".
660 if (size >= 0 and
660 if (size >= 0 and
661 (size != st.st_size
661 (size != st.st_size
662 or ((mode ^ st.st_mode) & 0100 and self._checkexec))
662 or ((mode ^ st.st_mode) & 0100 and self._checkexec))
663 and (mode & lnkkind != lnkkind or self._checklink)
663 and (mode & lnkkind != lnkkind or self._checklink)
664 or size == -2 # other parent
664 or size == -2 # other parent
665 or fn in self._copymap):
665 or fn in self._copymap):
666 madd(fn)
666 madd(fn)
667 elif (time != int(st.st_mtime)
667 elif (time != int(st.st_mtime)
668 and (mode & lnkkind != lnkkind or self._checklink)):
668 and (mode & lnkkind != lnkkind or self._checklink)):
669 ladd(fn)
669 ladd(fn)
670 elif listclean:
670 elif listclean:
671 cadd(fn)
671 cadd(fn)
672 elif state == 'm':
672 elif state == 'm':
673 madd(fn)
673 madd(fn)
674 elif state == 'a':
674 elif state == 'a':
675 aadd(fn)
675 aadd(fn)
676 elif state == 'r':
676 elif state == 'r':
677 radd(fn)
677 radd(fn)
678
678
679 return (lookup, modified, added, removed, deleted, unknown, ignored,
679 return (lookup, modified, added, removed, deleted, unknown, ignored,
680 clean)
680 clean)
@@ -1,103 +1,103
1 # help.py - help data for mercurial
1 # help.py - help data for mercurial
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import gettext, _
8 from i18n import gettext, _
9 import sys, os
9 import sys, os
10 import extensions
10 import extensions
11
11
12
12
13 def moduledoc(file):
13 def moduledoc(file):
14 '''return the top-level python documentation for the given file
14 '''return the top-level python documentation for the given file
15
15
16 Loosely inspired by pydoc.source_synopsis(), but rewritten to
16 Loosely inspired by pydoc.source_synopsis(), but rewritten to
17 handle triple quotes and to return the whole text instead of just
17 handle triple quotes and to return the whole text instead of just
18 the synopsis'''
18 the synopsis'''
19 result = []
19 result = []
20
20
21 line = file.readline()
21 line = file.readline()
22 while line[:1] == '#' or not line.strip():
22 while line[:1] == '#' or not line.strip():
23 line = file.readline()
23 line = file.readline()
24 if not line:
24 if not line:
25 break
25 break
26
26
27 start = line[:3]
27 start = line[:3]
28 if start in ('"""', "'''"):
28 if start == '"""' or start == "'''":
29 line = line[3:]
29 line = line[3:]
30 while line:
30 while line:
31 if line.rstrip().endswith(start):
31 if line.rstrip().endswith(start):
32 line = line.split(start)[0]
32 line = line.split(start)[0]
33 if line:
33 if line:
34 result.append(line)
34 result.append(line)
35 break
35 break
36 elif not line:
36 elif not line:
37 return None # unmatched delimiter
37 return None # unmatched delimiter
38 result.append(line)
38 result.append(line)
39 line = file.readline()
39 line = file.readline()
40 else:
40 else:
41 return None
41 return None
42
42
43 return ''.join(result)
43 return ''.join(result)
44
44
45 def listexts(header, exts, maxlength, indent=1):
45 def listexts(header, exts, maxlength, indent=1):
46 '''return a text listing of the given extensions'''
46 '''return a text listing of the given extensions'''
47 if not exts:
47 if not exts:
48 return ''
48 return ''
49 result = '\n%s\n\n' % header
49 result = '\n%s\n\n' % header
50 for name, desc in sorted(exts.iteritems()):
50 for name, desc in sorted(exts.iteritems()):
51 result += '%s%-*s %s\n' % (' ' * indent, maxlength + 2,
51 result += '%s%-*s %s\n' % (' ' * indent, maxlength + 2,
52 ':%s:' % name, desc)
52 ':%s:' % name, desc)
53 return result
53 return result
54
54
55 def extshelp():
55 def extshelp():
56 doc = loaddoc('extensions')()
56 doc = loaddoc('extensions')()
57
57
58 exts, maxlength = extensions.enabled()
58 exts, maxlength = extensions.enabled()
59 doc += listexts(_('enabled extensions:'), exts, maxlength)
59 doc += listexts(_('enabled extensions:'), exts, maxlength)
60
60
61 exts, maxlength = extensions.disabled()
61 exts, maxlength = extensions.disabled()
62 doc += listexts(_('disabled extensions:'), exts, maxlength)
62 doc += listexts(_('disabled extensions:'), exts, maxlength)
63
63
64 return doc
64 return doc
65
65
66 def loaddoc(topic):
66 def loaddoc(topic):
67 """Return a delayed loader for help/topic.txt."""
67 """Return a delayed loader for help/topic.txt."""
68
68
69 def loader():
69 def loader():
70 if hasattr(sys, 'frozen'):
70 if hasattr(sys, 'frozen'):
71 module = sys.executable
71 module = sys.executable
72 else:
72 else:
73 module = __file__
73 module = __file__
74 base = os.path.dirname(module)
74 base = os.path.dirname(module)
75
75
76 for dir in ('.', '..'):
76 for dir in ('.', '..'):
77 docdir = os.path.join(base, dir, 'help')
77 docdir = os.path.join(base, dir, 'help')
78 if os.path.isdir(docdir):
78 if os.path.isdir(docdir):
79 break
79 break
80
80
81 path = os.path.join(docdir, topic + ".txt")
81 path = os.path.join(docdir, topic + ".txt")
82 return gettext(open(path).read())
82 return gettext(open(path).read())
83 return loader
83 return loader
84
84
85 helptable = [
85 helptable = [
86 (["config", "hgrc"], _("Configuration Files"), loaddoc('config')),
86 (["config", "hgrc"], _("Configuration Files"), loaddoc('config')),
87 (["dates"], _("Date Formats"), loaddoc('dates')),
87 (["dates"], _("Date Formats"), loaddoc('dates')),
88 (["patterns"], _("File Name Patterns"), loaddoc('patterns')),
88 (["patterns"], _("File Name Patterns"), loaddoc('patterns')),
89 (['environment', 'env'], _('Environment Variables'),
89 (['environment', 'env'], _('Environment Variables'),
90 loaddoc('environment')),
90 loaddoc('environment')),
91 (['revs', 'revisions'], _('Specifying Single Revisions'),
91 (['revs', 'revisions'], _('Specifying Single Revisions'),
92 loaddoc('revisions')),
92 loaddoc('revisions')),
93 (['mrevs', 'multirevs'], _('Specifying Multiple Revisions'),
93 (['mrevs', 'multirevs'], _('Specifying Multiple Revisions'),
94 loaddoc('multirevs')),
94 loaddoc('multirevs')),
95 (['revsets'], _("Specifying Revision Sets"), loaddoc('revsets')),
95 (['revsets'], _("Specifying Revision Sets"), loaddoc('revsets')),
96 (['diffs'], _('Diff Formats'), loaddoc('diffs')),
96 (['diffs'], _('Diff Formats'), loaddoc('diffs')),
97 (['templating', 'templates'], _('Template Usage'),
97 (['templating', 'templates'], _('Template Usage'),
98 loaddoc('templates')),
98 loaddoc('templates')),
99 (['urls'], _('URL Paths'), loaddoc('urls')),
99 (['urls'], _('URL Paths'), loaddoc('urls')),
100 (["extensions"], _("Using additional features"), extshelp),
100 (["extensions"], _("Using additional features"), extshelp),
101 (["hgweb"], _("Configuring hgweb"), loaddoc('hgweb')),
101 (["hgweb"], _("Configuring hgweb"), loaddoc('hgweb')),
102 (["glossary"], _("Glossary"), loaddoc('glossary')),
102 (["glossary"], _("Glossary"), loaddoc('glossary')),
103 ]
103 ]
@@ -1,539 +1,539
1 # merge.py - directory-level update/merge handling for Mercurial
1 # merge.py - directory-level update/merge handling for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid, nullrev, hex, bin
8 from node import nullid, nullrev, hex, bin
9 from i18n import _
9 from i18n import _
10 import util, filemerge, copies, subrepo
10 import util, filemerge, copies, subrepo
11 import errno, os, shutil
11 import errno, os, shutil
12
12
13 class mergestate(object):
13 class mergestate(object):
14 '''track 3-way merge state of individual files'''
14 '''track 3-way merge state of individual files'''
15 def __init__(self, repo):
15 def __init__(self, repo):
16 self._repo = repo
16 self._repo = repo
17 self._dirty = False
17 self._dirty = False
18 self._read()
18 self._read()
19 def reset(self, node=None):
19 def reset(self, node=None):
20 self._state = {}
20 self._state = {}
21 if node:
21 if node:
22 self._local = node
22 self._local = node
23 shutil.rmtree(self._repo.join("merge"), True)
23 shutil.rmtree(self._repo.join("merge"), True)
24 self._dirty = False
24 self._dirty = False
25 def _read(self):
25 def _read(self):
26 self._state = {}
26 self._state = {}
27 try:
27 try:
28 f = self._repo.opener("merge/state")
28 f = self._repo.opener("merge/state")
29 for i, l in enumerate(f):
29 for i, l in enumerate(f):
30 if i == 0:
30 if i == 0:
31 self._local = bin(l[:-1])
31 self._local = bin(l[:-1])
32 else:
32 else:
33 bits = l[:-1].split("\0")
33 bits = l[:-1].split("\0")
34 self._state[bits[0]] = bits[1:]
34 self._state[bits[0]] = bits[1:]
35 except IOError, err:
35 except IOError, err:
36 if err.errno != errno.ENOENT:
36 if err.errno != errno.ENOENT:
37 raise
37 raise
38 self._dirty = False
38 self._dirty = False
39 def commit(self):
39 def commit(self):
40 if self._dirty:
40 if self._dirty:
41 f = self._repo.opener("merge/state", "w")
41 f = self._repo.opener("merge/state", "w")
42 f.write(hex(self._local) + "\n")
42 f.write(hex(self._local) + "\n")
43 for d, v in self._state.iteritems():
43 for d, v in self._state.iteritems():
44 f.write("\0".join([d] + v) + "\n")
44 f.write("\0".join([d] + v) + "\n")
45 self._dirty = False
45 self._dirty = False
46 def add(self, fcl, fco, fca, fd, flags):
46 def add(self, fcl, fco, fca, fd, flags):
47 hash = util.sha1(fcl.path()).hexdigest()
47 hash = util.sha1(fcl.path()).hexdigest()
48 self._repo.opener("merge/" + hash, "w").write(fcl.data())
48 self._repo.opener("merge/" + hash, "w").write(fcl.data())
49 self._state[fd] = ['u', hash, fcl.path(), fca.path(),
49 self._state[fd] = ['u', hash, fcl.path(), fca.path(),
50 hex(fca.filenode()), fco.path(), flags]
50 hex(fca.filenode()), fco.path(), flags]
51 self._dirty = True
51 self._dirty = True
52 def __contains__(self, dfile):
52 def __contains__(self, dfile):
53 return dfile in self._state
53 return dfile in self._state
54 def __getitem__(self, dfile):
54 def __getitem__(self, dfile):
55 return self._state[dfile][0]
55 return self._state[dfile][0]
56 def __iter__(self):
56 def __iter__(self):
57 l = self._state.keys()
57 l = self._state.keys()
58 l.sort()
58 l.sort()
59 for f in l:
59 for f in l:
60 yield f
60 yield f
61 def mark(self, dfile, state):
61 def mark(self, dfile, state):
62 self._state[dfile][0] = state
62 self._state[dfile][0] = state
63 self._dirty = True
63 self._dirty = True
64 def resolve(self, dfile, wctx, octx):
64 def resolve(self, dfile, wctx, octx):
65 if self[dfile] == 'r':
65 if self[dfile] == 'r':
66 return 0
66 return 0
67 state, hash, lfile, afile, anode, ofile, flags = self._state[dfile]
67 state, hash, lfile, afile, anode, ofile, flags = self._state[dfile]
68 f = self._repo.opener("merge/" + hash)
68 f = self._repo.opener("merge/" + hash)
69 self._repo.wwrite(dfile, f.read(), flags)
69 self._repo.wwrite(dfile, f.read(), flags)
70 fcd = wctx[dfile]
70 fcd = wctx[dfile]
71 fco = octx[ofile]
71 fco = octx[ofile]
72 fca = self._repo.filectx(afile, fileid=anode)
72 fca = self._repo.filectx(afile, fileid=anode)
73 r = filemerge.filemerge(self._repo, self._local, lfile, fcd, fco, fca)
73 r = filemerge.filemerge(self._repo, self._local, lfile, fcd, fco, fca)
74 if not r:
74 if not r:
75 self.mark(dfile, 'r')
75 self.mark(dfile, 'r')
76 return r
76 return r
77
77
78 def _checkunknown(wctx, mctx):
78 def _checkunknown(wctx, mctx):
79 "check for collisions between unknown files and files in mctx"
79 "check for collisions between unknown files and files in mctx"
80 for f in wctx.unknown():
80 for f in wctx.unknown():
81 if f in mctx and mctx[f].cmp(wctx[f]):
81 if f in mctx and mctx[f].cmp(wctx[f]):
82 raise util.Abort(_("untracked file in working directory differs"
82 raise util.Abort(_("untracked file in working directory differs"
83 " from file in requested revision: '%s'") % f)
83 " from file in requested revision: '%s'") % f)
84
84
85 def _checkcollision(mctx):
85 def _checkcollision(mctx):
86 "check for case folding collisions in the destination context"
86 "check for case folding collisions in the destination context"
87 folded = {}
87 folded = {}
88 for fn in mctx:
88 for fn in mctx:
89 fold = fn.lower()
89 fold = fn.lower()
90 if fold in folded:
90 if fold in folded:
91 raise util.Abort(_("case-folding collision between %s and %s")
91 raise util.Abort(_("case-folding collision between %s and %s")
92 % (fn, folded[fold]))
92 % (fn, folded[fold]))
93 folded[fold] = fn
93 folded[fold] = fn
94
94
95 def _forgetremoved(wctx, mctx, branchmerge):
95 def _forgetremoved(wctx, mctx, branchmerge):
96 """
96 """
97 Forget removed files
97 Forget removed files
98
98
99 If we're jumping between revisions (as opposed to merging), and if
99 If we're jumping between revisions (as opposed to merging), and if
100 neither the working directory nor the target rev has the file,
100 neither the working directory nor the target rev has the file,
101 then we need to remove it from the dirstate, to prevent the
101 then we need to remove it from the dirstate, to prevent the
102 dirstate from listing the file when it is no longer in the
102 dirstate from listing the file when it is no longer in the
103 manifest.
103 manifest.
104
104
105 If we're merging, and the other revision has removed a file
105 If we're merging, and the other revision has removed a file
106 that is not present in the working directory, we need to mark it
106 that is not present in the working directory, we need to mark it
107 as removed.
107 as removed.
108 """
108 """
109
109
110 action = []
110 action = []
111 state = branchmerge and 'r' or 'f'
111 state = branchmerge and 'r' or 'f'
112 for f in wctx.deleted():
112 for f in wctx.deleted():
113 if f not in mctx:
113 if f not in mctx:
114 action.append((f, state))
114 action.append((f, state))
115
115
116 if not branchmerge:
116 if not branchmerge:
117 for f in wctx.removed():
117 for f in wctx.removed():
118 if f not in mctx:
118 if f not in mctx:
119 action.append((f, "f"))
119 action.append((f, "f"))
120
120
121 return action
121 return action
122
122
123 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
123 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
124 """
124 """
125 Merge p1 and p2 with ancestor pa and generate merge action list
125 Merge p1 and p2 with ancestor pa and generate merge action list
126
126
127 overwrite = whether we clobber working files
127 overwrite = whether we clobber working files
128 partial = function to filter file lists
128 partial = function to filter file lists
129 """
129 """
130
130
131 def fmerge(f, f2, fa):
131 def fmerge(f, f2, fa):
132 """merge flags"""
132 """merge flags"""
133 a, m, n = ma.flags(fa), m1.flags(f), m2.flags(f2)
133 a, m, n = ma.flags(fa), m1.flags(f), m2.flags(f2)
134 if m == n: # flags agree
134 if m == n: # flags agree
135 return m # unchanged
135 return m # unchanged
136 if m and n and not a: # flags set, don't agree, differ from parent
136 if m and n and not a: # flags set, don't agree, differ from parent
137 r = repo.ui.promptchoice(
137 r = repo.ui.promptchoice(
138 _(" conflicting flags for %s\n"
138 _(" conflicting flags for %s\n"
139 "(n)one, e(x)ec or sym(l)ink?") % f,
139 "(n)one, e(x)ec or sym(l)ink?") % f,
140 (_("&None"), _("E&xec"), _("Sym&link")), 0)
140 (_("&None"), _("E&xec"), _("Sym&link")), 0)
141 if r == 1:
141 if r == 1:
142 return "x" # Exec
142 return "x" # Exec
143 if r == 2:
143 if r == 2:
144 return "l" # Symlink
144 return "l" # Symlink
145 return ""
145 return ""
146 if m and m != a: # changed from a to m
146 if m and m != a: # changed from a to m
147 return m
147 return m
148 if n and n != a: # changed from a to n
148 if n and n != a: # changed from a to n
149 return n
149 return n
150 return '' # flag was cleared
150 return '' # flag was cleared
151
151
152 def act(msg, m, f, *args):
152 def act(msg, m, f, *args):
153 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
153 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
154 action.append((f, m) + args)
154 action.append((f, m) + args)
155
155
156 action, copy = [], {}
156 action, copy = [], {}
157
157
158 if overwrite:
158 if overwrite:
159 pa = p1
159 pa = p1
160 elif pa == p2: # backwards
160 elif pa == p2: # backwards
161 pa = p1.p1()
161 pa = p1.p1()
162 elif pa and repo.ui.configbool("merge", "followcopies", True):
162 elif pa and repo.ui.configbool("merge", "followcopies", True):
163 dirs = repo.ui.configbool("merge", "followdirs", True)
163 dirs = repo.ui.configbool("merge", "followdirs", True)
164 copy, diverge = copies.copies(repo, p1, p2, pa, dirs)
164 copy, diverge = copies.copies(repo, p1, p2, pa, dirs)
165 for of, fl in diverge.iteritems():
165 for of, fl in diverge.iteritems():
166 act("divergent renames", "dr", of, fl)
166 act("divergent renames", "dr", of, fl)
167
167
168 repo.ui.note(_("resolving manifests\n"))
168 repo.ui.note(_("resolving manifests\n"))
169 repo.ui.debug(" overwrite %s partial %s\n" % (overwrite, bool(partial)))
169 repo.ui.debug(" overwrite %s partial %s\n" % (overwrite, bool(partial)))
170 repo.ui.debug(" ancestor %s local %s remote %s\n" % (pa, p1, p2))
170 repo.ui.debug(" ancestor %s local %s remote %s\n" % (pa, p1, p2))
171
171
172 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
172 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
173 copied = set(copy.values())
173 copied = set(copy.values())
174
174
175 if '.hgsubstate' in m1:
175 if '.hgsubstate' in m1:
176 # check whether sub state is modified
176 # check whether sub state is modified
177 for s in p1.substate:
177 for s in p1.substate:
178 if p1.sub(s).dirty():
178 if p1.sub(s).dirty():
179 m1['.hgsubstate'] += "+"
179 m1['.hgsubstate'] += "+"
180 break
180 break
181
181
182 # Compare manifests
182 # Compare manifests
183 for f, n in m1.iteritems():
183 for f, n in m1.iteritems():
184 if partial and not partial(f):
184 if partial and not partial(f):
185 continue
185 continue
186 if f in m2:
186 if f in m2:
187 rflags = fmerge(f, f, f)
187 rflags = fmerge(f, f, f)
188 a = ma.get(f, nullid)
188 a = ma.get(f, nullid)
189 if n == m2[f] or m2[f] == a: # same or local newer
189 if n == m2[f] or m2[f] == a: # same or local newer
190 # is file locally modified or flags need changing?
190 # is file locally modified or flags need changing?
191 # dirstate flags may need to be made current
191 # dirstate flags may need to be made current
192 if m1.flags(f) != rflags or n[20:]:
192 if m1.flags(f) != rflags or n[20:]:
193 act("update permissions", "e", f, rflags)
193 act("update permissions", "e", f, rflags)
194 elif n == a: # remote newer
194 elif n == a: # remote newer
195 act("remote is newer", "g", f, rflags)
195 act("remote is newer", "g", f, rflags)
196 else: # both changed
196 else: # both changed
197 act("versions differ", "m", f, f, f, rflags, False)
197 act("versions differ", "m", f, f, f, rflags, False)
198 elif f in copied: # files we'll deal with on m2 side
198 elif f in copied: # files we'll deal with on m2 side
199 pass
199 pass
200 elif f in copy:
200 elif f in copy:
201 f2 = copy[f]
201 f2 = copy[f]
202 if f2 not in m2: # directory rename
202 if f2 not in m2: # directory rename
203 act("remote renamed directory to " + f2, "d",
203 act("remote renamed directory to " + f2, "d",
204 f, None, f2, m1.flags(f))
204 f, None, f2, m1.flags(f))
205 else: # case 2 A,B/B/B or case 4,21 A/B/B
205 else: # case 2 A,B/B/B or case 4,21 A/B/B
206 act("local copied/moved to " + f2, "m",
206 act("local copied/moved to " + f2, "m",
207 f, f2, f, fmerge(f, f2, f2), False)
207 f, f2, f, fmerge(f, f2, f2), False)
208 elif f in ma: # clean, a different, no remote
208 elif f in ma: # clean, a different, no remote
209 if n != ma[f]:
209 if n != ma[f]:
210 if repo.ui.promptchoice(
210 if repo.ui.promptchoice(
211 _(" local changed %s which remote deleted\n"
211 _(" local changed %s which remote deleted\n"
212 "use (c)hanged version or (d)elete?") % f,
212 "use (c)hanged version or (d)elete?") % f,
213 (_("&Changed"), _("&Delete")), 0):
213 (_("&Changed"), _("&Delete")), 0):
214 act("prompt delete", "r", f)
214 act("prompt delete", "r", f)
215 else:
215 else:
216 act("prompt keep", "a", f)
216 act("prompt keep", "a", f)
217 elif n[20:] == "a": # added, no remote
217 elif n[20:] == "a": # added, no remote
218 act("remote deleted", "f", f)
218 act("remote deleted", "f", f)
219 elif n[20:] != "u":
219 elif n[20:] != "u":
220 act("other deleted", "r", f)
220 act("other deleted", "r", f)
221
221
222 for f, n in m2.iteritems():
222 for f, n in m2.iteritems():
223 if partial and not partial(f):
223 if partial and not partial(f):
224 continue
224 continue
225 if f in m1 or f in copied: # files already visited
225 if f in m1 or f in copied: # files already visited
226 continue
226 continue
227 if f in copy:
227 if f in copy:
228 f2 = copy[f]
228 f2 = copy[f]
229 if f2 not in m1: # directory rename
229 if f2 not in m1: # directory rename
230 act("local renamed directory to " + f2, "d",
230 act("local renamed directory to " + f2, "d",
231 None, f, f2, m2.flags(f))
231 None, f, f2, m2.flags(f))
232 elif f2 in m2: # rename case 1, A/A,B/A
232 elif f2 in m2: # rename case 1, A/A,B/A
233 act("remote copied to " + f, "m",
233 act("remote copied to " + f, "m",
234 f2, f, f, fmerge(f2, f, f2), False)
234 f2, f, f, fmerge(f2, f, f2), False)
235 else: # case 3,20 A/B/A
235 else: # case 3,20 A/B/A
236 act("remote moved to " + f, "m",
236 act("remote moved to " + f, "m",
237 f2, f, f, fmerge(f2, f, f2), True)
237 f2, f, f, fmerge(f2, f, f2), True)
238 elif f not in ma:
238 elif f not in ma:
239 act("remote created", "g", f, m2.flags(f))
239 act("remote created", "g", f, m2.flags(f))
240 elif n != ma[f]:
240 elif n != ma[f]:
241 if repo.ui.promptchoice(
241 if repo.ui.promptchoice(
242 _("remote changed %s which local deleted\n"
242 _("remote changed %s which local deleted\n"
243 "use (c)hanged version or leave (d)eleted?") % f,
243 "use (c)hanged version or leave (d)eleted?") % f,
244 (_("&Changed"), _("&Deleted")), 0) == 0:
244 (_("&Changed"), _("&Deleted")), 0) == 0:
245 act("prompt recreating", "g", f, m2.flags(f))
245 act("prompt recreating", "g", f, m2.flags(f))
246
246
247 return action
247 return action
248
248
249 def actionkey(a):
249 def actionkey(a):
250 return a[1] == 'r' and -1 or 0, a
250 return a[1] == 'r' and -1 or 0, a
251
251
252 def applyupdates(repo, action, wctx, mctx, actx):
252 def applyupdates(repo, action, wctx, mctx, actx):
253 """apply the merge action list to the working directory
253 """apply the merge action list to the working directory
254
254
255 wctx is the working copy context
255 wctx is the working copy context
256 mctx is the context to be merged into the working copy
256 mctx is the context to be merged into the working copy
257 actx is the context of the common ancestor
257 actx is the context of the common ancestor
258 """
258 """
259
259
260 updated, merged, removed, unresolved = 0, 0, 0, 0
260 updated, merged, removed, unresolved = 0, 0, 0, 0
261 ms = mergestate(repo)
261 ms = mergestate(repo)
262 ms.reset(wctx.parents()[0].node())
262 ms.reset(wctx.parents()[0].node())
263 moves = []
263 moves = []
264 action.sort(key=actionkey)
264 action.sort(key=actionkey)
265 substate = wctx.substate # prime
265 substate = wctx.substate # prime
266
266
267 # prescan for merges
267 # prescan for merges
268 u = repo.ui
268 u = repo.ui
269 for a in action:
269 for a in action:
270 f, m = a[:2]
270 f, m = a[:2]
271 if m == 'm': # merge
271 if m == 'm': # merge
272 f2, fd, flags, move = a[2:]
272 f2, fd, flags, move = a[2:]
273 if f == '.hgsubstate': # merged internally
273 if f == '.hgsubstate': # merged internally
274 continue
274 continue
275 repo.ui.debug("preserving %s for resolve of %s\n" % (f, fd))
275 repo.ui.debug("preserving %s for resolve of %s\n" % (f, fd))
276 fcl = wctx[f]
276 fcl = wctx[f]
277 fco = mctx[f2]
277 fco = mctx[f2]
278 if mctx == actx: # backwards, use working dir parent as ancestor
278 if mctx == actx: # backwards, use working dir parent as ancestor
279 fca = fcl.parents()[0]
279 fca = fcl.parents()[0]
280 else:
280 else:
281 fca = fcl.ancestor(fco, actx)
281 fca = fcl.ancestor(fco, actx)
282 if not fca:
282 if not fca:
283 fca = repo.filectx(f, fileid=nullrev)
283 fca = repo.filectx(f, fileid=nullrev)
284 ms.add(fcl, fco, fca, fd, flags)
284 ms.add(fcl, fco, fca, fd, flags)
285 if f != fd and move:
285 if f != fd and move:
286 moves.append(f)
286 moves.append(f)
287
287
288 # remove renamed files after safely stored
288 # remove renamed files after safely stored
289 for f in moves:
289 for f in moves:
290 if os.path.lexists(repo.wjoin(f)):
290 if os.path.lexists(repo.wjoin(f)):
291 repo.ui.debug("removing %s\n" % f)
291 repo.ui.debug("removing %s\n" % f)
292 os.unlink(repo.wjoin(f))
292 os.unlink(repo.wjoin(f))
293
293
294 audit_path = util.path_auditor(repo.root)
294 audit_path = util.path_auditor(repo.root)
295
295
296 numupdates = len(action)
296 numupdates = len(action)
297 for i, a in enumerate(action):
297 for i, a in enumerate(action):
298 f, m = a[:2]
298 f, m = a[:2]
299 u.progress(_('updating'), i + 1, item=f, total=numupdates, unit='files')
299 u.progress(_('updating'), i + 1, item=f, total=numupdates, unit='files')
300 if f and f[0] == "/":
300 if f and f[0] == "/":
301 continue
301 continue
302 if m == "r": # remove
302 if m == "r": # remove
303 repo.ui.note(_("removing %s\n") % f)
303 repo.ui.note(_("removing %s\n") % f)
304 audit_path(f)
304 audit_path(f)
305 if f == '.hgsubstate': # subrepo states need updating
305 if f == '.hgsubstate': # subrepo states need updating
306 subrepo.submerge(repo, wctx, mctx, wctx)
306 subrepo.submerge(repo, wctx, mctx, wctx)
307 try:
307 try:
308 util.unlink(repo.wjoin(f))
308 util.unlink(repo.wjoin(f))
309 except OSError, inst:
309 except OSError, inst:
310 if inst.errno != errno.ENOENT:
310 if inst.errno != errno.ENOENT:
311 repo.ui.warn(_("update failed to remove %s: %s!\n") %
311 repo.ui.warn(_("update failed to remove %s: %s!\n") %
312 (f, inst.strerror))
312 (f, inst.strerror))
313 removed += 1
313 removed += 1
314 elif m == "m": # merge
314 elif m == "m": # merge
315 if f == '.hgsubstate': # subrepo states need updating
315 if f == '.hgsubstate': # subrepo states need updating
316 subrepo.submerge(repo, wctx, mctx, wctx.ancestor(mctx))
316 subrepo.submerge(repo, wctx, mctx, wctx.ancestor(mctx))
317 continue
317 continue
318 f2, fd, flags, move = a[2:]
318 f2, fd, flags, move = a[2:]
319 r = ms.resolve(fd, wctx, mctx)
319 r = ms.resolve(fd, wctx, mctx)
320 if r is not None and r > 0:
320 if r is not None and r > 0:
321 unresolved += 1
321 unresolved += 1
322 else:
322 else:
323 if r is None:
323 if r is None:
324 updated += 1
324 updated += 1
325 else:
325 else:
326 merged += 1
326 merged += 1
327 util.set_flags(repo.wjoin(fd), 'l' in flags, 'x' in flags)
327 util.set_flags(repo.wjoin(fd), 'l' in flags, 'x' in flags)
328 if f != fd and move and os.path.lexists(repo.wjoin(f)):
328 if f != fd and move and os.path.lexists(repo.wjoin(f)):
329 repo.ui.debug("removing %s\n" % f)
329 repo.ui.debug("removing %s\n" % f)
330 os.unlink(repo.wjoin(f))
330 os.unlink(repo.wjoin(f))
331 elif m == "g": # get
331 elif m == "g": # get
332 flags = a[2]
332 flags = a[2]
333 repo.ui.note(_("getting %s\n") % f)
333 repo.ui.note(_("getting %s\n") % f)
334 t = mctx.filectx(f).data()
334 t = mctx.filectx(f).data()
335 repo.wwrite(f, t, flags)
335 repo.wwrite(f, t, flags)
336 t = None
336 t = None
337 updated += 1
337 updated += 1
338 if f == '.hgsubstate': # subrepo states need updating
338 if f == '.hgsubstate': # subrepo states need updating
339 subrepo.submerge(repo, wctx, mctx, wctx)
339 subrepo.submerge(repo, wctx, mctx, wctx)
340 elif m == "d": # directory rename
340 elif m == "d": # directory rename
341 f2, fd, flags = a[2:]
341 f2, fd, flags = a[2:]
342 if f:
342 if f:
343 repo.ui.note(_("moving %s to %s\n") % (f, fd))
343 repo.ui.note(_("moving %s to %s\n") % (f, fd))
344 t = wctx.filectx(f).data()
344 t = wctx.filectx(f).data()
345 repo.wwrite(fd, t, flags)
345 repo.wwrite(fd, t, flags)
346 util.unlink(repo.wjoin(f))
346 util.unlink(repo.wjoin(f))
347 if f2:
347 if f2:
348 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
348 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
349 t = mctx.filectx(f2).data()
349 t = mctx.filectx(f2).data()
350 repo.wwrite(fd, t, flags)
350 repo.wwrite(fd, t, flags)
351 updated += 1
351 updated += 1
352 elif m == "dr": # divergent renames
352 elif m == "dr": # divergent renames
353 fl = a[2]
353 fl = a[2]
354 repo.ui.warn(_("warning: detected divergent renames of %s to:\n") % f)
354 repo.ui.warn(_("warning: detected divergent renames of %s to:\n") % f)
355 for nf in fl:
355 for nf in fl:
356 repo.ui.warn(" %s\n" % nf)
356 repo.ui.warn(" %s\n" % nf)
357 elif m == "e": # exec
357 elif m == "e": # exec
358 flags = a[2]
358 flags = a[2]
359 util.set_flags(repo.wjoin(f), 'l' in flags, 'x' in flags)
359 util.set_flags(repo.wjoin(f), 'l' in flags, 'x' in flags)
360 ms.commit()
360 ms.commit()
361 u.progress(_('updating'), None, total=numupdates, unit='files')
361 u.progress(_('updating'), None, total=numupdates, unit='files')
362
362
363 return updated, merged, removed, unresolved
363 return updated, merged, removed, unresolved
364
364
365 def recordupdates(repo, action, branchmerge):
365 def recordupdates(repo, action, branchmerge):
366 "record merge actions to the dirstate"
366 "record merge actions to the dirstate"
367
367
368 for a in action:
368 for a in action:
369 f, m = a[:2]
369 f, m = a[:2]
370 if m == "r": # remove
370 if m == "r": # remove
371 if branchmerge:
371 if branchmerge:
372 repo.dirstate.remove(f)
372 repo.dirstate.remove(f)
373 else:
373 else:
374 repo.dirstate.forget(f)
374 repo.dirstate.forget(f)
375 elif m == "a": # re-add
375 elif m == "a": # re-add
376 if not branchmerge:
376 if not branchmerge:
377 repo.dirstate.add(f)
377 repo.dirstate.add(f)
378 elif m == "f": # forget
378 elif m == "f": # forget
379 repo.dirstate.forget(f)
379 repo.dirstate.forget(f)
380 elif m == "e": # exec change
380 elif m == "e": # exec change
381 repo.dirstate.normallookup(f)
381 repo.dirstate.normallookup(f)
382 elif m == "g": # get
382 elif m == "g": # get
383 if branchmerge:
383 if branchmerge:
384 repo.dirstate.otherparent(f)
384 repo.dirstate.otherparent(f)
385 else:
385 else:
386 repo.dirstate.normal(f)
386 repo.dirstate.normal(f)
387 elif m == "m": # merge
387 elif m == "m": # merge
388 f2, fd, flag, move = a[2:]
388 f2, fd, flag, move = a[2:]
389 if branchmerge:
389 if branchmerge:
390 # We've done a branch merge, mark this file as merged
390 # We've done a branch merge, mark this file as merged
391 # so that we properly record the merger later
391 # so that we properly record the merger later
392 repo.dirstate.merge(fd)
392 repo.dirstate.merge(fd)
393 if f != f2: # copy/rename
393 if f != f2: # copy/rename
394 if move:
394 if move:
395 repo.dirstate.remove(f)
395 repo.dirstate.remove(f)
396 if f != fd:
396 if f != fd:
397 repo.dirstate.copy(f, fd)
397 repo.dirstate.copy(f, fd)
398 else:
398 else:
399 repo.dirstate.copy(f2, fd)
399 repo.dirstate.copy(f2, fd)
400 else:
400 else:
401 # We've update-merged a locally modified file, so
401 # We've update-merged a locally modified file, so
402 # we set the dirstate to emulate a normal checkout
402 # we set the dirstate to emulate a normal checkout
403 # of that file some time in the past. Thus our
403 # of that file some time in the past. Thus our
404 # merge will appear as a normal local file
404 # merge will appear as a normal local file
405 # modification.
405 # modification.
406 if f2 == fd: # file not locally copied/moved
406 if f2 == fd: # file not locally copied/moved
407 repo.dirstate.normallookup(fd)
407 repo.dirstate.normallookup(fd)
408 if move:
408 if move:
409 repo.dirstate.forget(f)
409 repo.dirstate.forget(f)
410 elif m == "d": # directory rename
410 elif m == "d": # directory rename
411 f2, fd, flag = a[2:]
411 f2, fd, flag = a[2:]
412 if not f2 and f not in repo.dirstate:
412 if not f2 and f not in repo.dirstate:
413 # untracked file moved
413 # untracked file moved
414 continue
414 continue
415 if branchmerge:
415 if branchmerge:
416 repo.dirstate.add(fd)
416 repo.dirstate.add(fd)
417 if f:
417 if f:
418 repo.dirstate.remove(f)
418 repo.dirstate.remove(f)
419 repo.dirstate.copy(f, fd)
419 repo.dirstate.copy(f, fd)
420 if f2:
420 if f2:
421 repo.dirstate.copy(f2, fd)
421 repo.dirstate.copy(f2, fd)
422 else:
422 else:
423 repo.dirstate.normal(fd)
423 repo.dirstate.normal(fd)
424 if f:
424 if f:
425 repo.dirstate.forget(f)
425 repo.dirstate.forget(f)
426
426
427 def update(repo, node, branchmerge, force, partial):
427 def update(repo, node, branchmerge, force, partial):
428 """
428 """
429 Perform a merge between the working directory and the given node
429 Perform a merge between the working directory and the given node
430
430
431 node = the node to update to, or None if unspecified
431 node = the node to update to, or None if unspecified
432 branchmerge = whether to merge between branches
432 branchmerge = whether to merge between branches
433 force = whether to force branch merging or file overwriting
433 force = whether to force branch merging or file overwriting
434 partial = a function to filter file lists (dirstate not updated)
434 partial = a function to filter file lists (dirstate not updated)
435
435
436 The table below shows all the behaviors of the update command
436 The table below shows all the behaviors of the update command
437 given the -c and -C or no options, whether the working directory
437 given the -c and -C or no options, whether the working directory
438 is dirty, whether a revision is specified, and the relationship of
438 is dirty, whether a revision is specified, and the relationship of
439 the parent rev to the target rev (linear, on the same named
439 the parent rev to the target rev (linear, on the same named
440 branch, or on another named branch).
440 branch, or on another named branch).
441
441
442 This logic is tested by test-update-branches.t.
442 This logic is tested by test-update-branches.t.
443
443
444 -c -C dirty rev | linear same cross
444 -c -C dirty rev | linear same cross
445 n n n n | ok (1) x
445 n n n n | ok (1) x
446 n n n y | ok ok ok
446 n n n y | ok ok ok
447 n n y * | merge (2) (2)
447 n n y * | merge (2) (2)
448 n y * * | --- discard ---
448 n y * * | --- discard ---
449 y n y * | --- (3) ---
449 y n y * | --- (3) ---
450 y n n * | --- ok ---
450 y n n * | --- ok ---
451 y y * * | --- (4) ---
451 y y * * | --- (4) ---
452
452
453 x = can't happen
453 x = can't happen
454 * = don't-care
454 * = don't-care
455 1 = abort: crosses branches (use 'hg merge' or 'hg update -c')
455 1 = abort: crosses branches (use 'hg merge' or 'hg update -c')
456 2 = abort: crosses branches (use 'hg merge' to merge or
456 2 = abort: crosses branches (use 'hg merge' to merge or
457 use 'hg update -C' to discard changes)
457 use 'hg update -C' to discard changes)
458 3 = abort: uncommitted local changes
458 3 = abort: uncommitted local changes
459 4 = incompatible options (checked in commands.py)
459 4 = incompatible options (checked in commands.py)
460 """
460 """
461
461
462 onode = node
462 onode = node
463 wlock = repo.wlock()
463 wlock = repo.wlock()
464 try:
464 try:
465 wc = repo[None]
465 wc = repo[None]
466 if node is None:
466 if node is None:
467 # tip of current branch
467 # tip of current branch
468 try:
468 try:
469 node = repo.branchtags()[wc.branch()]
469 node = repo.branchtags()[wc.branch()]
470 except KeyError:
470 except KeyError:
471 if wc.branch() == "default": # no default branch!
471 if wc.branch() == "default": # no default branch!
472 node = repo.lookup("tip") # update to tip
472 node = repo.lookup("tip") # update to tip
473 else:
473 else:
474 raise util.Abort(_("branch %s not found") % wc.branch())
474 raise util.Abort(_("branch %s not found") % wc.branch())
475 overwrite = force and not branchmerge
475 overwrite = force and not branchmerge
476 pl = wc.parents()
476 pl = wc.parents()
477 p1, p2 = pl[0], repo[node]
477 p1, p2 = pl[0], repo[node]
478 pa = p1.ancestor(p2)
478 pa = p1.ancestor(p2)
479 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
479 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
480 fastforward = False
480 fastforward = False
481
481
482 ### check phase
482 ### check phase
483 if not overwrite and len(pl) > 1:
483 if not overwrite and len(pl) > 1:
484 raise util.Abort(_("outstanding uncommitted merges"))
484 raise util.Abort(_("outstanding uncommitted merges"))
485 if branchmerge:
485 if branchmerge:
486 if pa == p2:
486 if pa == p2:
487 raise util.Abort(_("merging with a working directory ancestor"
487 raise util.Abort(_("merging with a working directory ancestor"
488 " has no effect"))
488 " has no effect"))
489 elif pa == p1:
489 elif pa == p1:
490 if p1.branch() != p2.branch():
490 if p1.branch() != p2.branch():
491 fastforward = True
491 fastforward = True
492 else:
492 else:
493 raise util.Abort(_("nothing to merge (use 'hg update'"
493 raise util.Abort(_("nothing to merge (use 'hg update'"
494 " or check 'hg heads')"))
494 " or check 'hg heads')"))
495 if not force and (wc.files() or wc.deleted()):
495 if not force and (wc.files() or wc.deleted()):
496 raise util.Abort(_("outstanding uncommitted changes "
496 raise util.Abort(_("outstanding uncommitted changes "
497 "(use 'hg status' to list changes)"))
497 "(use 'hg status' to list changes)"))
498 elif not overwrite:
498 elif not overwrite:
499 if pa in (p1, p2): # linear
499 if pa == p1 or pa == p2: # linear
500 pass # all good
500 pass # all good
501 elif wc.files() or wc.deleted():
501 elif wc.files() or wc.deleted():
502 raise util.Abort(_("crosses branches (use 'hg merge' to merge "
502 raise util.Abort(_("crosses branches (use 'hg merge' to merge "
503 "or use 'hg update -C' to discard changes)"))
503 "or use 'hg update -C' to discard changes)"))
504 elif onode is None:
504 elif onode is None:
505 raise util.Abort(_("crosses branches (use 'hg merge' or use "
505 raise util.Abort(_("crosses branches (use 'hg merge' or use "
506 "'hg update -c')"))
506 "'hg update -c')"))
507 else:
507 else:
508 # Allow jumping branches if clean and specific rev given
508 # Allow jumping branches if clean and specific rev given
509 overwrite = True
509 overwrite = True
510
510
511 ### calculate phase
511 ### calculate phase
512 action = []
512 action = []
513 wc.status(unknown=True) # prime cache
513 wc.status(unknown=True) # prime cache
514 if not force:
514 if not force:
515 _checkunknown(wc, p2)
515 _checkunknown(wc, p2)
516 if not util.checkcase(repo.path):
516 if not util.checkcase(repo.path):
517 _checkcollision(p2)
517 _checkcollision(p2)
518 action += _forgetremoved(wc, p2, branchmerge)
518 action += _forgetremoved(wc, p2, branchmerge)
519 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
519 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
520
520
521 ### apply phase
521 ### apply phase
522 if not branchmerge: # just jump to the new rev
522 if not branchmerge: # just jump to the new rev
523 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
523 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
524 if not partial:
524 if not partial:
525 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
525 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
526
526
527 stats = applyupdates(repo, action, wc, p2, pa)
527 stats = applyupdates(repo, action, wc, p2, pa)
528
528
529 if not partial:
529 if not partial:
530 repo.dirstate.setparents(fp1, fp2)
530 repo.dirstate.setparents(fp1, fp2)
531 recordupdates(repo, action, branchmerge)
531 recordupdates(repo, action, branchmerge)
532 if not branchmerge and not fastforward:
532 if not branchmerge and not fastforward:
533 repo.dirstate.setbranch(p2.branch())
533 repo.dirstate.setbranch(p2.branch())
534 finally:
534 finally:
535 wlock.release()
535 wlock.release()
536
536
537 if not partial:
537 if not partial:
538 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
538 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
539 return stats
539 return stats
@@ -1,591 +1,591
1 # revset.py - revision set queries for mercurial
1 # revset.py - revision set queries for mercurial
2 #
2 #
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import re
8 import re
9 import parser, util, error, discovery
9 import parser, util, error, discovery
10 import match as matchmod
10 import match as matchmod
11 from i18n import _
11 from i18n import _
12
12
13 elements = {
13 elements = {
14 "(": (20, ("group", 1, ")"), ("func", 1, ")")),
14 "(": (20, ("group", 1, ")"), ("func", 1, ")")),
15 "-": (19, ("negate", 19), ("minus", 19)),
15 "-": (19, ("negate", 19), ("minus", 19)),
16 "::": (17, ("dagrangepre", 17), ("dagrange", 17),
16 "::": (17, ("dagrangepre", 17), ("dagrange", 17),
17 ("dagrangepost", 17)),
17 ("dagrangepost", 17)),
18 "..": (17, ("dagrangepre", 17), ("dagrange", 17),
18 "..": (17, ("dagrangepre", 17), ("dagrange", 17),
19 ("dagrangepost", 17)),
19 ("dagrangepost", 17)),
20 ":": (15, ("rangepre", 15), ("range", 15), ("rangepost", 15)),
20 ":": (15, ("rangepre", 15), ("range", 15), ("rangepost", 15)),
21 "not": (10, ("not", 10)),
21 "not": (10, ("not", 10)),
22 "!": (10, ("not", 10)),
22 "!": (10, ("not", 10)),
23 "and": (5, None, ("and", 5)),
23 "and": (5, None, ("and", 5)),
24 "&": (5, None, ("and", 5)),
24 "&": (5, None, ("and", 5)),
25 "or": (4, None, ("or", 4)),
25 "or": (4, None, ("or", 4)),
26 "|": (4, None, ("or", 4)),
26 "|": (4, None, ("or", 4)),
27 "+": (4, None, ("or", 4)),
27 "+": (4, None, ("or", 4)),
28 ",": (2, None, ("list", 2)),
28 ",": (2, None, ("list", 2)),
29 ")": (0, None, None),
29 ")": (0, None, None),
30 "symbol": (0, ("symbol",), None),
30 "symbol": (0, ("symbol",), None),
31 "string": (0, ("string",), None),
31 "string": (0, ("string",), None),
32 "end": (0, None, None),
32 "end": (0, None, None),
33 }
33 }
34
34
35 keywords = set(['and', 'or', 'not'])
35 keywords = set(['and', 'or', 'not'])
36
36
37 def tokenize(program):
37 def tokenize(program):
38 pos, l = 0, len(program)
38 pos, l = 0, len(program)
39 while pos < l:
39 while pos < l:
40 c = program[pos]
40 c = program[pos]
41 if c.isspace(): # skip inter-token whitespace
41 if c.isspace(): # skip inter-token whitespace
42 pass
42 pass
43 elif c == ':' and program[pos:pos + 2] == '::': # look ahead carefully
43 elif c == ':' and program[pos:pos + 2] == '::': # look ahead carefully
44 yield ('::', None, pos)
44 yield ('::', None, pos)
45 pos += 1 # skip ahead
45 pos += 1 # skip ahead
46 elif c == '.' and program[pos:pos + 2] == '..': # look ahead carefully
46 elif c == '.' and program[pos:pos + 2] == '..': # look ahead carefully
47 yield ('..', None, pos)
47 yield ('..', None, pos)
48 pos += 1 # skip ahead
48 pos += 1 # skip ahead
49 elif c in "():,-|&+!": # handle simple operators
49 elif c in "():,-|&+!": # handle simple operators
50 yield (c, None, pos)
50 yield (c, None, pos)
51 elif c in '"\'': # handle quoted strings
51 elif c in '"\'': # handle quoted strings
52 pos += 1
52 pos += 1
53 s = pos
53 s = pos
54 while pos < l: # find closing quote
54 while pos < l: # find closing quote
55 d = program[pos]
55 d = program[pos]
56 if d == '\\': # skip over escaped characters
56 if d == '\\': # skip over escaped characters
57 pos += 2
57 pos += 2
58 continue
58 continue
59 if d == c:
59 if d == c:
60 yield ('string', program[s:pos].decode('string-escape'), s)
60 yield ('string', program[s:pos].decode('string-escape'), s)
61 break
61 break
62 pos += 1
62 pos += 1
63 else:
63 else:
64 raise error.ParseError(_("unterminated string"), s)
64 raise error.ParseError(_("unterminated string"), s)
65 elif c.isalnum() or c in '._' or ord(c) > 127: # gather up a symbol/keyword
65 elif c.isalnum() or c in '._' or ord(c) > 127: # gather up a symbol/keyword
66 s = pos
66 s = pos
67 pos += 1
67 pos += 1
68 while pos < l: # find end of symbol
68 while pos < l: # find end of symbol
69 d = program[pos]
69 d = program[pos]
70 if not (d.isalnum() or d in "._" or ord(d) > 127):
70 if not (d.isalnum() or d in "._" or ord(d) > 127):
71 break
71 break
72 if d == '.' and program[pos - 1] == '.': # special case for ..
72 if d == '.' and program[pos - 1] == '.': # special case for ..
73 pos -= 1
73 pos -= 1
74 break
74 break
75 pos += 1
75 pos += 1
76 sym = program[s:pos]
76 sym = program[s:pos]
77 if sym in keywords: # operator keywords
77 if sym in keywords: # operator keywords
78 yield (sym, None, s)
78 yield (sym, None, s)
79 else:
79 else:
80 yield ('symbol', sym, s)
80 yield ('symbol', sym, s)
81 pos -= 1
81 pos -= 1
82 else:
82 else:
83 raise error.ParseError(_("syntax error"), pos)
83 raise error.ParseError(_("syntax error"), pos)
84 pos += 1
84 pos += 1
85 yield ('end', None, pos)
85 yield ('end', None, pos)
86
86
87 # helpers
87 # helpers
88
88
89 def getstring(x, err):
89 def getstring(x, err):
90 if x and x[0] in ('string', 'symbol'):
90 if x and (x[0] == 'string' or x[0] == 'symbol'):
91 return x[1]
91 return x[1]
92 raise error.ParseError(err)
92 raise error.ParseError(err)
93
93
94 def getlist(x):
94 def getlist(x):
95 if not x:
95 if not x:
96 return []
96 return []
97 if x[0] == 'list':
97 if x[0] == 'list':
98 return getlist(x[1]) + [x[2]]
98 return getlist(x[1]) + [x[2]]
99 return [x]
99 return [x]
100
100
101 def getargs(x, min, max, err):
101 def getargs(x, min, max, err):
102 l = getlist(x)
102 l = getlist(x)
103 if len(l) < min or len(l) > max:
103 if len(l) < min or len(l) > max:
104 raise error.ParseError(err)
104 raise error.ParseError(err)
105 return l
105 return l
106
106
107 def getset(repo, subset, x):
107 def getset(repo, subset, x):
108 if not x:
108 if not x:
109 raise error.ParseError(_("missing argument"))
109 raise error.ParseError(_("missing argument"))
110 return methods[x[0]](repo, subset, *x[1:])
110 return methods[x[0]](repo, subset, *x[1:])
111
111
112 # operator methods
112 # operator methods
113
113
114 def stringset(repo, subset, x):
114 def stringset(repo, subset, x):
115 x = repo[x].rev()
115 x = repo[x].rev()
116 if x == -1 and len(subset) == len(repo):
116 if x == -1 and len(subset) == len(repo):
117 return [-1]
117 return [-1]
118 if x in subset:
118 if x in subset:
119 return [x]
119 return [x]
120 return []
120 return []
121
121
122 def symbolset(repo, subset, x):
122 def symbolset(repo, subset, x):
123 if x in symbols:
123 if x in symbols:
124 raise error.ParseError(_("can't use %s here") % x)
124 raise error.ParseError(_("can't use %s here") % x)
125 return stringset(repo, subset, x)
125 return stringset(repo, subset, x)
126
126
127 def rangeset(repo, subset, x, y):
127 def rangeset(repo, subset, x, y):
128 m = getset(repo, subset, x)
128 m = getset(repo, subset, x)
129 if not m:
129 if not m:
130 m = getset(repo, range(len(repo)), x)
130 m = getset(repo, range(len(repo)), x)
131
131
132 n = getset(repo, subset, y)
132 n = getset(repo, subset, y)
133 if not n:
133 if not n:
134 n = getset(repo, range(len(repo)), y)
134 n = getset(repo, range(len(repo)), y)
135
135
136 if not m or not n:
136 if not m or not n:
137 return []
137 return []
138 m, n = m[0], n[-1]
138 m, n = m[0], n[-1]
139
139
140 if m < n:
140 if m < n:
141 r = range(m, n + 1)
141 r = range(m, n + 1)
142 else:
142 else:
143 r = range(m, n - 1, -1)
143 r = range(m, n - 1, -1)
144 s = set(subset)
144 s = set(subset)
145 return [x for x in r if x in s]
145 return [x for x in r if x in s]
146
146
147 def andset(repo, subset, x, y):
147 def andset(repo, subset, x, y):
148 return getset(repo, getset(repo, subset, x), y)
148 return getset(repo, getset(repo, subset, x), y)
149
149
150 def orset(repo, subset, x, y):
150 def orset(repo, subset, x, y):
151 s = set(getset(repo, subset, x))
151 s = set(getset(repo, subset, x))
152 s |= set(getset(repo, [r for r in subset if r not in s], y))
152 s |= set(getset(repo, [r for r in subset if r not in s], y))
153 return [r for r in subset if r in s]
153 return [r for r in subset if r in s]
154
154
155 def notset(repo, subset, x):
155 def notset(repo, subset, x):
156 s = set(getset(repo, subset, x))
156 s = set(getset(repo, subset, x))
157 return [r for r in subset if r not in s]
157 return [r for r in subset if r not in s]
158
158
159 def listset(repo, subset, a, b):
159 def listset(repo, subset, a, b):
160 raise error.ParseError(_("can't use a list in this context"))
160 raise error.ParseError(_("can't use a list in this context"))
161
161
162 def func(repo, subset, a, b):
162 def func(repo, subset, a, b):
163 if a[0] == 'symbol' and a[1] in symbols:
163 if a[0] == 'symbol' and a[1] in symbols:
164 return symbols[a[1]](repo, subset, b)
164 return symbols[a[1]](repo, subset, b)
165 raise error.ParseError(_("not a function: %s") % a[1])
165 raise error.ParseError(_("not a function: %s") % a[1])
166
166
167 # functions
167 # functions
168
168
169 def p1(repo, subset, x):
169 def p1(repo, subset, x):
170 ps = set()
170 ps = set()
171 cl = repo.changelog
171 cl = repo.changelog
172 for r in getset(repo, subset, x):
172 for r in getset(repo, subset, x):
173 ps.add(cl.parentrevs(r)[0])
173 ps.add(cl.parentrevs(r)[0])
174 return [r for r in subset if r in ps]
174 return [r for r in subset if r in ps]
175
175
176 def p2(repo, subset, x):
176 def p2(repo, subset, x):
177 ps = set()
177 ps = set()
178 cl = repo.changelog
178 cl = repo.changelog
179 for r in getset(repo, subset, x):
179 for r in getset(repo, subset, x):
180 ps.add(cl.parentrevs(r)[1])
180 ps.add(cl.parentrevs(r)[1])
181 return [r for r in subset if r in ps]
181 return [r for r in subset if r in ps]
182
182
183 def parents(repo, subset, x):
183 def parents(repo, subset, x):
184 ps = set()
184 ps = set()
185 cl = repo.changelog
185 cl = repo.changelog
186 for r in getset(repo, subset, x):
186 for r in getset(repo, subset, x):
187 ps.update(cl.parentrevs(r))
187 ps.update(cl.parentrevs(r))
188 return [r for r in subset if r in ps]
188 return [r for r in subset if r in ps]
189
189
190 def maxrev(repo, subset, x):
190 def maxrev(repo, subset, x):
191 s = getset(repo, subset, x)
191 s = getset(repo, subset, x)
192 if s:
192 if s:
193 m = max(s)
193 m = max(s)
194 if m in subset:
194 if m in subset:
195 return [m]
195 return [m]
196 return []
196 return []
197
197
198 def minrev(repo, subset, x):
198 def minrev(repo, subset, x):
199 s = getset(repo, subset, x)
199 s = getset(repo, subset, x)
200 if s:
200 if s:
201 m = min(s)
201 m = min(s)
202 if m in subset:
202 if m in subset:
203 return [m]
203 return [m]
204 return []
204 return []
205
205
206 def limit(repo, subset, x):
206 def limit(repo, subset, x):
207 l = getargs(x, 2, 2, _("limit wants two arguments"))
207 l = getargs(x, 2, 2, _("limit wants two arguments"))
208 try:
208 try:
209 lim = int(getstring(l[1], _("limit wants a number")))
209 lim = int(getstring(l[1], _("limit wants a number")))
210 except ValueError:
210 except ValueError:
211 raise error.ParseError(_("limit expects a number"))
211 raise error.ParseError(_("limit expects a number"))
212 return getset(repo, subset, l[0])[:lim]
212 return getset(repo, subset, l[0])[:lim]
213
213
214 def children(repo, subset, x):
214 def children(repo, subset, x):
215 cs = set()
215 cs = set()
216 cl = repo.changelog
216 cl = repo.changelog
217 s = set(getset(repo, subset, x))
217 s = set(getset(repo, subset, x))
218 for r in xrange(0, len(repo)):
218 for r in xrange(0, len(repo)):
219 for p in cl.parentrevs(r):
219 for p in cl.parentrevs(r):
220 if p in s:
220 if p in s:
221 cs.add(r)
221 cs.add(r)
222 return [r for r in subset if r in cs]
222 return [r for r in subset if r in cs]
223
223
224 def branch(repo, subset, x):
224 def branch(repo, subset, x):
225 s = getset(repo, range(len(repo)), x)
225 s = getset(repo, range(len(repo)), x)
226 b = set()
226 b = set()
227 for r in s:
227 for r in s:
228 b.add(repo[r].branch())
228 b.add(repo[r].branch())
229 s = set(s)
229 s = set(s)
230 return [r for r in subset if r in s or repo[r].branch() in b]
230 return [r for r in subset if r in s or repo[r].branch() in b]
231
231
232 def ancestor(repo, subset, x):
232 def ancestor(repo, subset, x):
233 l = getargs(x, 2, 2, _("ancestor wants two arguments"))
233 l = getargs(x, 2, 2, _("ancestor wants two arguments"))
234 r = range(len(repo))
234 r = range(len(repo))
235 a = getset(repo, r, l[0])
235 a = getset(repo, r, l[0])
236 b = getset(repo, r, l[1])
236 b = getset(repo, r, l[1])
237 if len(a) != 1 or len(b) != 1:
237 if len(a) != 1 or len(b) != 1:
238 raise error.ParseError(_("ancestor arguments must be single revisions"))
238 raise error.ParseError(_("ancestor arguments must be single revisions"))
239 an = [repo[a[0]].ancestor(repo[b[0]]).rev()]
239 an = [repo[a[0]].ancestor(repo[b[0]]).rev()]
240
240
241 return [r for r in an if r in subset]
241 return [r for r in an if r in subset]
242
242
243 def ancestors(repo, subset, x):
243 def ancestors(repo, subset, x):
244 args = getset(repo, range(len(repo)), x)
244 args = getset(repo, range(len(repo)), x)
245 if not args:
245 if not args:
246 return []
246 return []
247 s = set(repo.changelog.ancestors(*args)) | set(args)
247 s = set(repo.changelog.ancestors(*args)) | set(args)
248 return [r for r in subset if r in s]
248 return [r for r in subset if r in s]
249
249
250 def descendants(repo, subset, x):
250 def descendants(repo, subset, x):
251 args = getset(repo, range(len(repo)), x)
251 args = getset(repo, range(len(repo)), x)
252 if not args:
252 if not args:
253 return []
253 return []
254 s = set(repo.changelog.descendants(*args)) | set(args)
254 s = set(repo.changelog.descendants(*args)) | set(args)
255 return [r for r in subset if r in s]
255 return [r for r in subset if r in s]
256
256
257 def follow(repo, subset, x):
257 def follow(repo, subset, x):
258 getargs(x, 0, 0, _("follow takes no arguments"))
258 getargs(x, 0, 0, _("follow takes no arguments"))
259 p = repo['.'].rev()
259 p = repo['.'].rev()
260 s = set(repo.changelog.ancestors(p)) | set([p])
260 s = set(repo.changelog.ancestors(p)) | set([p])
261 return [r for r in subset if r in s]
261 return [r for r in subset if r in s]
262
262
263 def date(repo, subset, x):
263 def date(repo, subset, x):
264 ds = getstring(x, _("date wants a string"))
264 ds = getstring(x, _("date wants a string"))
265 dm = util.matchdate(ds)
265 dm = util.matchdate(ds)
266 return [r for r in subset if dm(repo[r].date()[0])]
266 return [r for r in subset if dm(repo[r].date()[0])]
267
267
268 def keyword(repo, subset, x):
268 def keyword(repo, subset, x):
269 kw = getstring(x, _("keyword wants a string")).lower()
269 kw = getstring(x, _("keyword wants a string")).lower()
270 l = []
270 l = []
271 for r in subset:
271 for r in subset:
272 c = repo[r]
272 c = repo[r]
273 t = " ".join(c.files() + [c.user(), c.description()])
273 t = " ".join(c.files() + [c.user(), c.description()])
274 if kw in t.lower():
274 if kw in t.lower():
275 l.append(r)
275 l.append(r)
276 return l
276 return l
277
277
278 def grep(repo, subset, x):
278 def grep(repo, subset, x):
279 try:
279 try:
280 gr = re.compile(getstring(x, _("grep wants a string")))
280 gr = re.compile(getstring(x, _("grep wants a string")))
281 except re.error, e:
281 except re.error, e:
282 raise error.ParseError(_('invalid match pattern: %s') % e)
282 raise error.ParseError(_('invalid match pattern: %s') % e)
283 l = []
283 l = []
284 for r in subset:
284 for r in subset:
285 c = repo[r]
285 c = repo[r]
286 for e in c.files() + [c.user(), c.description()]:
286 for e in c.files() + [c.user(), c.description()]:
287 if gr.search(e):
287 if gr.search(e):
288 l.append(r)
288 l.append(r)
289 continue
289 continue
290 return l
290 return l
291
291
292 def author(repo, subset, x):
292 def author(repo, subset, x):
293 n = getstring(x, _("author wants a string")).lower()
293 n = getstring(x, _("author wants a string")).lower()
294 return [r for r in subset if n in repo[r].user().lower()]
294 return [r for r in subset if n in repo[r].user().lower()]
295
295
296 def hasfile(repo, subset, x):
296 def hasfile(repo, subset, x):
297 pat = getstring(x, _("file wants a pattern"))
297 pat = getstring(x, _("file wants a pattern"))
298 m = matchmod.match(repo.root, repo.getcwd(), [pat])
298 m = matchmod.match(repo.root, repo.getcwd(), [pat])
299 s = []
299 s = []
300 for r in subset:
300 for r in subset:
301 for f in repo[r].files():
301 for f in repo[r].files():
302 if m(f):
302 if m(f):
303 s.append(r)
303 s.append(r)
304 continue
304 continue
305 return s
305 return s
306
306
307 def contains(repo, subset, x):
307 def contains(repo, subset, x):
308 pat = getstring(x, _("contains wants a pattern"))
308 pat = getstring(x, _("contains wants a pattern"))
309 m = matchmod.match(repo.root, repo.getcwd(), [pat])
309 m = matchmod.match(repo.root, repo.getcwd(), [pat])
310 s = []
310 s = []
311 if m.files() == [pat]:
311 if m.files() == [pat]:
312 for r in subset:
312 for r in subset:
313 if pat in repo[r]:
313 if pat in repo[r]:
314 s.append(r)
314 s.append(r)
315 continue
315 continue
316 else:
316 else:
317 for r in subset:
317 for r in subset:
318 for f in repo[r].manifest():
318 for f in repo[r].manifest():
319 if m(f):
319 if m(f):
320 s.append(r)
320 s.append(r)
321 continue
321 continue
322 return s
322 return s
323
323
324 def checkstatus(repo, subset, pat, field):
324 def checkstatus(repo, subset, pat, field):
325 m = matchmod.match(repo.root, repo.getcwd(), [pat])
325 m = matchmod.match(repo.root, repo.getcwd(), [pat])
326 s = []
326 s = []
327 fast = (m.files() == [pat])
327 fast = (m.files() == [pat])
328 for r in subset:
328 for r in subset:
329 c = repo[r]
329 c = repo[r]
330 if fast:
330 if fast:
331 if pat not in c.files():
331 if pat not in c.files():
332 continue
332 continue
333 else:
333 else:
334 for f in c.files():
334 for f in c.files():
335 if m(f):
335 if m(f):
336 break
336 break
337 else:
337 else:
338 continue
338 continue
339 files = repo.status(c.p1().node(), c.node())[field]
339 files = repo.status(c.p1().node(), c.node())[field]
340 if fast:
340 if fast:
341 if pat in files:
341 if pat in files:
342 s.append(r)
342 s.append(r)
343 continue
343 continue
344 else:
344 else:
345 for f in files:
345 for f in files:
346 if m(f):
346 if m(f):
347 s.append(r)
347 s.append(r)
348 continue
348 continue
349 return s
349 return s
350
350
351 def modifies(repo, subset, x):
351 def modifies(repo, subset, x):
352 pat = getstring(x, _("modifies wants a pattern"))
352 pat = getstring(x, _("modifies wants a pattern"))
353 return checkstatus(repo, subset, pat, 0)
353 return checkstatus(repo, subset, pat, 0)
354
354
355 def adds(repo, subset, x):
355 def adds(repo, subset, x):
356 pat = getstring(x, _("adds wants a pattern"))
356 pat = getstring(x, _("adds wants a pattern"))
357 return checkstatus(repo, subset, pat, 1)
357 return checkstatus(repo, subset, pat, 1)
358
358
359 def removes(repo, subset, x):
359 def removes(repo, subset, x):
360 pat = getstring(x, _("removes wants a pattern"))
360 pat = getstring(x, _("removes wants a pattern"))
361 return checkstatus(repo, subset, pat, 2)
361 return checkstatus(repo, subset, pat, 2)
362
362
363 def merge(repo, subset, x):
363 def merge(repo, subset, x):
364 getargs(x, 0, 0, _("merge takes no arguments"))
364 getargs(x, 0, 0, _("merge takes no arguments"))
365 cl = repo.changelog
365 cl = repo.changelog
366 return [r for r in subset if cl.parentrevs(r)[1] != -1]
366 return [r for r in subset if cl.parentrevs(r)[1] != -1]
367
367
368 def closed(repo, subset, x):
368 def closed(repo, subset, x):
369 getargs(x, 0, 0, _("closed takes no arguments"))
369 getargs(x, 0, 0, _("closed takes no arguments"))
370 return [r for r in subset if repo[r].extra().get('close')]
370 return [r for r in subset if repo[r].extra().get('close')]
371
371
372 def head(repo, subset, x):
372 def head(repo, subset, x):
373 getargs(x, 0, 0, _("head takes no arguments"))
373 getargs(x, 0, 0, _("head takes no arguments"))
374 hs = set()
374 hs = set()
375 for b, ls in repo.branchmap().iteritems():
375 for b, ls in repo.branchmap().iteritems():
376 hs.update(repo[h].rev() for h in ls)
376 hs.update(repo[h].rev() for h in ls)
377 return [r for r in subset if r in hs]
377 return [r for r in subset if r in hs]
378
378
379 def reverse(repo, subset, x):
379 def reverse(repo, subset, x):
380 l = getset(repo, subset, x)
380 l = getset(repo, subset, x)
381 l.reverse()
381 l.reverse()
382 return l
382 return l
383
383
384 def present(repo, subset, x):
384 def present(repo, subset, x):
385 try:
385 try:
386 return getset(repo, subset, x)
386 return getset(repo, subset, x)
387 except error.RepoLookupError:
387 except error.RepoLookupError:
388 return []
388 return []
389
389
390 def sort(repo, subset, x):
390 def sort(repo, subset, x):
391 l = getargs(x, 1, 2, _("sort wants one or two arguments"))
391 l = getargs(x, 1, 2, _("sort wants one or two arguments"))
392 keys = "rev"
392 keys = "rev"
393 if len(l) == 2:
393 if len(l) == 2:
394 keys = getstring(l[1], _("sort spec must be a string"))
394 keys = getstring(l[1], _("sort spec must be a string"))
395
395
396 s = l[0]
396 s = l[0]
397 keys = keys.split()
397 keys = keys.split()
398 l = []
398 l = []
399 def invert(s):
399 def invert(s):
400 return "".join(chr(255 - ord(c)) for c in s)
400 return "".join(chr(255 - ord(c)) for c in s)
401 for r in getset(repo, subset, s):
401 for r in getset(repo, subset, s):
402 c = repo[r]
402 c = repo[r]
403 e = []
403 e = []
404 for k in keys:
404 for k in keys:
405 if k == 'rev':
405 if k == 'rev':
406 e.append(r)
406 e.append(r)
407 elif k == '-rev':
407 elif k == '-rev':
408 e.append(-r)
408 e.append(-r)
409 elif k == 'branch':
409 elif k == 'branch':
410 e.append(c.branch())
410 e.append(c.branch())
411 elif k == '-branch':
411 elif k == '-branch':
412 e.append(invert(c.branch()))
412 e.append(invert(c.branch()))
413 elif k == 'desc':
413 elif k == 'desc':
414 e.append(c.description())
414 e.append(c.description())
415 elif k == '-desc':
415 elif k == '-desc':
416 e.append(invert(c.description()))
416 e.append(invert(c.description()))
417 elif k in 'user author':
417 elif k in 'user author':
418 e.append(c.user())
418 e.append(c.user())
419 elif k in '-user -author':
419 elif k in '-user -author':
420 e.append(invert(c.user()))
420 e.append(invert(c.user()))
421 elif k == 'date':
421 elif k == 'date':
422 e.append(c.date()[0])
422 e.append(c.date()[0])
423 elif k == '-date':
423 elif k == '-date':
424 e.append(-c.date()[0])
424 e.append(-c.date()[0])
425 else:
425 else:
426 raise error.ParseError(_("unknown sort key %r") % k)
426 raise error.ParseError(_("unknown sort key %r") % k)
427 e.append(r)
427 e.append(r)
428 l.append(e)
428 l.append(e)
429 l.sort()
429 l.sort()
430 return [e[-1] for e in l]
430 return [e[-1] for e in l]
431
431
432 def getall(repo, subset, x):
432 def getall(repo, subset, x):
433 getargs(x, 0, 0, _("all takes no arguments"))
433 getargs(x, 0, 0, _("all takes no arguments"))
434 return subset
434 return subset
435
435
436 def heads(repo, subset, x):
436 def heads(repo, subset, x):
437 s = getset(repo, subset, x)
437 s = getset(repo, subset, x)
438 ps = set(parents(repo, subset, x))
438 ps = set(parents(repo, subset, x))
439 return [r for r in s if r not in ps]
439 return [r for r in s if r not in ps]
440
440
441 def roots(repo, subset, x):
441 def roots(repo, subset, x):
442 s = getset(repo, subset, x)
442 s = getset(repo, subset, x)
443 cs = set(children(repo, subset, x))
443 cs = set(children(repo, subset, x))
444 return [r for r in s if r not in cs]
444 return [r for r in s if r not in cs]
445
445
446 def outgoing(repo, subset, x):
446 def outgoing(repo, subset, x):
447 import hg # avoid start-up nasties
447 import hg # avoid start-up nasties
448 l = getargs(x, 0, 1, _("outgoing wants a repository path"))
448 l = getargs(x, 0, 1, _("outgoing wants a repository path"))
449 dest = l and getstring(l[0], _("outgoing wants a repository path")) or ''
449 dest = l and getstring(l[0], _("outgoing wants a repository path")) or ''
450 dest = repo.ui.expandpath(dest or 'default-push', dest or 'default')
450 dest = repo.ui.expandpath(dest or 'default-push', dest or 'default')
451 dest, branches = hg.parseurl(dest)
451 dest, branches = hg.parseurl(dest)
452 other = hg.repository(hg.remoteui(repo, {}), dest)
452 other = hg.repository(hg.remoteui(repo, {}), dest)
453 repo.ui.pushbuffer()
453 repo.ui.pushbuffer()
454 o = discovery.findoutgoing(repo, other)
454 o = discovery.findoutgoing(repo, other)
455 repo.ui.popbuffer()
455 repo.ui.popbuffer()
456 cl = repo.changelog
456 cl = repo.changelog
457 o = set([cl.rev(r) for r in repo.changelog.nodesbetween(o, None)[0]])
457 o = set([cl.rev(r) for r in repo.changelog.nodesbetween(o, None)[0]])
458 return [r for r in subset if r in o]
458 return [r for r in subset if r in o]
459
459
460 def tagged(repo, subset, x):
460 def tagged(repo, subset, x):
461 getargs(x, 0, 0, _("tagged takes no arguments"))
461 getargs(x, 0, 0, _("tagged takes no arguments"))
462 cl = repo.changelog
462 cl = repo.changelog
463 s = set([cl.rev(n) for t, n in repo.tagslist() if t != 'tip'])
463 s = set([cl.rev(n) for t, n in repo.tagslist() if t != 'tip'])
464 return [r for r in subset if r in s]
464 return [r for r in subset if r in s]
465
465
466 symbols = {
466 symbols = {
467 "adds": adds,
467 "adds": adds,
468 "all": getall,
468 "all": getall,
469 "ancestor": ancestor,
469 "ancestor": ancestor,
470 "ancestors": ancestors,
470 "ancestors": ancestors,
471 "author": author,
471 "author": author,
472 "branch": branch,
472 "branch": branch,
473 "children": children,
473 "children": children,
474 "closed": closed,
474 "closed": closed,
475 "contains": contains,
475 "contains": contains,
476 "date": date,
476 "date": date,
477 "descendants": descendants,
477 "descendants": descendants,
478 "file": hasfile,
478 "file": hasfile,
479 "follow": follow,
479 "follow": follow,
480 "grep": grep,
480 "grep": grep,
481 "head": head,
481 "head": head,
482 "heads": heads,
482 "heads": heads,
483 "keyword": keyword,
483 "keyword": keyword,
484 "limit": limit,
484 "limit": limit,
485 "max": maxrev,
485 "max": maxrev,
486 "min": minrev,
486 "min": minrev,
487 "merge": merge,
487 "merge": merge,
488 "modifies": modifies,
488 "modifies": modifies,
489 "outgoing": outgoing,
489 "outgoing": outgoing,
490 "p1": p1,
490 "p1": p1,
491 "p2": p2,
491 "p2": p2,
492 "parents": parents,
492 "parents": parents,
493 "present": present,
493 "present": present,
494 "removes": removes,
494 "removes": removes,
495 "reverse": reverse,
495 "reverse": reverse,
496 "roots": roots,
496 "roots": roots,
497 "sort": sort,
497 "sort": sort,
498 "tagged": tagged,
498 "tagged": tagged,
499 "user": author,
499 "user": author,
500 }
500 }
501
501
502 methods = {
502 methods = {
503 "range": rangeset,
503 "range": rangeset,
504 "string": stringset,
504 "string": stringset,
505 "symbol": symbolset,
505 "symbol": symbolset,
506 "and": andset,
506 "and": andset,
507 "or": orset,
507 "or": orset,
508 "not": notset,
508 "not": notset,
509 "list": listset,
509 "list": listset,
510 "func": func,
510 "func": func,
511 }
511 }
512
512
513 def optimize(x, small):
513 def optimize(x, small):
514 if x == None:
514 if x == None:
515 return 0, x
515 return 0, x
516
516
517 smallbonus = 1
517 smallbonus = 1
518 if small:
518 if small:
519 smallbonus = .5
519 smallbonus = .5
520
520
521 op = x[0]
521 op = x[0]
522 if op == 'minus':
522 if op == 'minus':
523 return optimize(('and', x[1], ('not', x[2])), small)
523 return optimize(('and', x[1], ('not', x[2])), small)
524 elif op == 'dagrange':
524 elif op == 'dagrange':
525 return optimize(('and', ('func', ('symbol', 'descendants'), x[1]),
525 return optimize(('and', ('func', ('symbol', 'descendants'), x[1]),
526 ('func', ('symbol', 'ancestors'), x[2])), small)
526 ('func', ('symbol', 'ancestors'), x[2])), small)
527 elif op == 'dagrangepre':
527 elif op == 'dagrangepre':
528 return optimize(('func', ('symbol', 'ancestors'), x[1]), small)
528 return optimize(('func', ('symbol', 'ancestors'), x[1]), small)
529 elif op == 'dagrangepost':
529 elif op == 'dagrangepost':
530 return optimize(('func', ('symbol', 'descendants'), x[1]), small)
530 return optimize(('func', ('symbol', 'descendants'), x[1]), small)
531 elif op == 'rangepre':
531 elif op == 'rangepre':
532 return optimize(('range', ('string', '0'), x[1]), small)
532 return optimize(('range', ('string', '0'), x[1]), small)
533 elif op == 'rangepost':
533 elif op == 'rangepost':
534 return optimize(('range', x[1], ('string', 'tip')), small)
534 return optimize(('range', x[1], ('string', 'tip')), small)
535 elif op == 'negate':
535 elif op == 'negate':
536 return optimize(('string',
536 return optimize(('string',
537 '-' + getstring(x[1], _("can't negate that"))), small)
537 '-' + getstring(x[1], _("can't negate that"))), small)
538 elif op in 'string symbol negate':
538 elif op in 'string symbol negate':
539 return smallbonus, x # single revisions are small
539 return smallbonus, x # single revisions are small
540 elif op in ('and', 'dagrange'):
540 elif op == 'and' or op == 'dagrange':
541 wa, ta = optimize(x[1], True)
541 wa, ta = optimize(x[1], True)
542 wb, tb = optimize(x[2], True)
542 wb, tb = optimize(x[2], True)
543 w = min(wa, wb)
543 w = min(wa, wb)
544 if wa > wb:
544 if wa > wb:
545 return w, (op, tb, ta)
545 return w, (op, tb, ta)
546 return w, (op, ta, tb)
546 return w, (op, ta, tb)
547 elif op == 'or':
547 elif op == 'or':
548 wa, ta = optimize(x[1], False)
548 wa, ta = optimize(x[1], False)
549 wb, tb = optimize(x[2], False)
549 wb, tb = optimize(x[2], False)
550 if wb < wa:
550 if wb < wa:
551 wb, wa = wa, wb
551 wb, wa = wa, wb
552 return max(wa, wb), (op, ta, tb)
552 return max(wa, wb), (op, ta, tb)
553 elif op == 'not':
553 elif op == 'not':
554 o = optimize(x[1], not small)
554 o = optimize(x[1], not small)
555 return o[0], (op, o[1])
555 return o[0], (op, o[1])
556 elif op == 'group':
556 elif op == 'group':
557 return optimize(x[1], small)
557 return optimize(x[1], small)
558 elif op in 'range list':
558 elif op in 'range list':
559 wa, ta = optimize(x[1], small)
559 wa, ta = optimize(x[1], small)
560 wb, tb = optimize(x[2], small)
560 wb, tb = optimize(x[2], small)
561 return wa + wb, (op, ta, tb)
561 return wa + wb, (op, ta, tb)
562 elif op == 'func':
562 elif op == 'func':
563 f = getstring(x[1], _("not a symbol"))
563 f = getstring(x[1], _("not a symbol"))
564 wa, ta = optimize(x[2], small)
564 wa, ta = optimize(x[2], small)
565 if f in "grep date user author keyword branch file outgoing":
565 if f in "grep date user author keyword branch file outgoing":
566 w = 10 # slow
566 w = 10 # slow
567 elif f in "modifies adds removes":
567 elif f in "modifies adds removes":
568 w = 30 # slower
568 w = 30 # slower
569 elif f == "contains":
569 elif f == "contains":
570 w = 100 # very slow
570 w = 100 # very slow
571 elif f == "ancestor":
571 elif f == "ancestor":
572 w = 1 * smallbonus
572 w = 1 * smallbonus
573 elif f == "reverse limit":
573 elif f == "reverse limit":
574 w = 0
574 w = 0
575 elif f in "sort":
575 elif f in "sort":
576 w = 10 # assume most sorts look at changelog
576 w = 10 # assume most sorts look at changelog
577 else:
577 else:
578 w = 1
578 w = 1
579 return w + wa, (op, x[1], ta)
579 return w + wa, (op, x[1], ta)
580 return 1, x
580 return 1, x
581
581
582 parse = parser.parser(tokenize, elements).parse
582 parse = parser.parser(tokenize, elements).parse
583
583
584 def match(spec):
584 def match(spec):
585 if not spec:
585 if not spec:
586 raise error.ParseError(_("empty query"))
586 raise error.ParseError(_("empty query"))
587 tree = parse(spec)
587 tree = parse(spec)
588 weight, tree = optimize(tree, True)
588 weight, tree = optimize(tree, True)
589 def mfunc(repo, subset):
589 def mfunc(repo, subset):
590 return getset(repo, subset, tree)
590 return getset(repo, subset, tree)
591 return mfunc
591 return mfunc
@@ -1,450 +1,450
1 # Copyright (C) 2004, 2005 Canonical Ltd
1 # Copyright (C) 2004, 2005 Canonical Ltd
2 #
2 #
3 # This program is free software; you can redistribute it and/or modify
3 # This program is free software; you can redistribute it and/or modify
4 # it under the terms of the GNU General Public License as published by
4 # it under the terms of the GNU General Public License as published by
5 # the Free Software Foundation; either version 2 of the License, or
5 # the Free Software Foundation; either version 2 of the License, or
6 # (at your option) any later version.
6 # (at your option) any later version.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU General Public License
13 # You should have received a copy of the GNU General Public License
14 # along with this program; if not, write to the Free Software
14 # along with this program; if not, write to the Free Software
15 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
15 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
16
16
17 # mbp: "you know that thing where cvs gives you conflict markers?"
17 # mbp: "you know that thing where cvs gives you conflict markers?"
18 # s: "i hate that."
18 # s: "i hate that."
19
19
20 from i18n import _
20 from i18n import _
21 import util, mdiff
21 import util, mdiff
22 import sys, os
22 import sys, os
23
23
24 class CantReprocessAndShowBase(Exception):
24 class CantReprocessAndShowBase(Exception):
25 pass
25 pass
26
26
27 def intersect(ra, rb):
27 def intersect(ra, rb):
28 """Given two ranges return the range where they intersect or None.
28 """Given two ranges return the range where they intersect or None.
29
29
30 >>> intersect((0, 10), (0, 6))
30 >>> intersect((0, 10), (0, 6))
31 (0, 6)
31 (0, 6)
32 >>> intersect((0, 10), (5, 15))
32 >>> intersect((0, 10), (5, 15))
33 (5, 10)
33 (5, 10)
34 >>> intersect((0, 10), (10, 15))
34 >>> intersect((0, 10), (10, 15))
35 >>> intersect((0, 9), (10, 15))
35 >>> intersect((0, 9), (10, 15))
36 >>> intersect((0, 9), (7, 15))
36 >>> intersect((0, 9), (7, 15))
37 (7, 9)
37 (7, 9)
38 """
38 """
39 assert ra[0] <= ra[1]
39 assert ra[0] <= ra[1]
40 assert rb[0] <= rb[1]
40 assert rb[0] <= rb[1]
41
41
42 sa = max(ra[0], rb[0])
42 sa = max(ra[0], rb[0])
43 sb = min(ra[1], rb[1])
43 sb = min(ra[1], rb[1])
44 if sa < sb:
44 if sa < sb:
45 return sa, sb
45 return sa, sb
46 else:
46 else:
47 return None
47 return None
48
48
49 def compare_range(a, astart, aend, b, bstart, bend):
49 def compare_range(a, astart, aend, b, bstart, bend):
50 """Compare a[astart:aend] == b[bstart:bend], without slicing.
50 """Compare a[astart:aend] == b[bstart:bend], without slicing.
51 """
51 """
52 if (aend - astart) != (bend - bstart):
52 if (aend - astart) != (bend - bstart):
53 return False
53 return False
54 for ia, ib in zip(xrange(astart, aend), xrange(bstart, bend)):
54 for ia, ib in zip(xrange(astart, aend), xrange(bstart, bend)):
55 if a[ia] != b[ib]:
55 if a[ia] != b[ib]:
56 return False
56 return False
57 else:
57 else:
58 return True
58 return True
59
59
60 class Merge3Text(object):
60 class Merge3Text(object):
61 """3-way merge of texts.
61 """3-way merge of texts.
62
62
63 Given strings BASE, OTHER, THIS, tries to produce a combined text
63 Given strings BASE, OTHER, THIS, tries to produce a combined text
64 incorporating the changes from both BASE->OTHER and BASE->THIS."""
64 incorporating the changes from both BASE->OTHER and BASE->THIS."""
65 def __init__(self, basetext, atext, btext, base=None, a=None, b=None):
65 def __init__(self, basetext, atext, btext, base=None, a=None, b=None):
66 self.basetext = basetext
66 self.basetext = basetext
67 self.atext = atext
67 self.atext = atext
68 self.btext = btext
68 self.btext = btext
69 if base is None:
69 if base is None:
70 base = mdiff.splitnewlines(basetext)
70 base = mdiff.splitnewlines(basetext)
71 if a is None:
71 if a is None:
72 a = mdiff.splitnewlines(atext)
72 a = mdiff.splitnewlines(atext)
73 if b is None:
73 if b is None:
74 b = mdiff.splitnewlines(btext)
74 b = mdiff.splitnewlines(btext)
75 self.base = base
75 self.base = base
76 self.a = a
76 self.a = a
77 self.b = b
77 self.b = b
78
78
79 def merge_lines(self,
79 def merge_lines(self,
80 name_a=None,
80 name_a=None,
81 name_b=None,
81 name_b=None,
82 name_base=None,
82 name_base=None,
83 start_marker='<<<<<<<',
83 start_marker='<<<<<<<',
84 mid_marker='=======',
84 mid_marker='=======',
85 end_marker='>>>>>>>',
85 end_marker='>>>>>>>',
86 base_marker=None,
86 base_marker=None,
87 reprocess=False):
87 reprocess=False):
88 """Return merge in cvs-like form.
88 """Return merge in cvs-like form.
89 """
89 """
90 self.conflicts = False
90 self.conflicts = False
91 newline = '\n'
91 newline = '\n'
92 if len(self.a) > 0:
92 if len(self.a) > 0:
93 if self.a[0].endswith('\r\n'):
93 if self.a[0].endswith('\r\n'):
94 newline = '\r\n'
94 newline = '\r\n'
95 elif self.a[0].endswith('\r'):
95 elif self.a[0].endswith('\r'):
96 newline = '\r'
96 newline = '\r'
97 if base_marker and reprocess:
97 if base_marker and reprocess:
98 raise CantReprocessAndShowBase()
98 raise CantReprocessAndShowBase()
99 if name_a:
99 if name_a:
100 start_marker = start_marker + ' ' + name_a
100 start_marker = start_marker + ' ' + name_a
101 if name_b:
101 if name_b:
102 end_marker = end_marker + ' ' + name_b
102 end_marker = end_marker + ' ' + name_b
103 if name_base and base_marker:
103 if name_base and base_marker:
104 base_marker = base_marker + ' ' + name_base
104 base_marker = base_marker + ' ' + name_base
105 merge_regions = self.merge_regions()
105 merge_regions = self.merge_regions()
106 if reprocess is True:
106 if reprocess is True:
107 merge_regions = self.reprocess_merge_regions(merge_regions)
107 merge_regions = self.reprocess_merge_regions(merge_regions)
108 for t in merge_regions:
108 for t in merge_regions:
109 what = t[0]
109 what = t[0]
110 if what == 'unchanged':
110 if what == 'unchanged':
111 for i in range(t[1], t[2]):
111 for i in range(t[1], t[2]):
112 yield self.base[i]
112 yield self.base[i]
113 elif what in ('a', 'same'):
113 elif what == 'a' or what == 'same':
114 for i in range(t[1], t[2]):
114 for i in range(t[1], t[2]):
115 yield self.a[i]
115 yield self.a[i]
116 elif what == 'b':
116 elif what == 'b':
117 for i in range(t[1], t[2]):
117 for i in range(t[1], t[2]):
118 yield self.b[i]
118 yield self.b[i]
119 elif what == 'conflict':
119 elif what == 'conflict':
120 self.conflicts = True
120 self.conflicts = True
121 yield start_marker + newline
121 yield start_marker + newline
122 for i in range(t[3], t[4]):
122 for i in range(t[3], t[4]):
123 yield self.a[i]
123 yield self.a[i]
124 if base_marker is not None:
124 if base_marker is not None:
125 yield base_marker + newline
125 yield base_marker + newline
126 for i in range(t[1], t[2]):
126 for i in range(t[1], t[2]):
127 yield self.base[i]
127 yield self.base[i]
128 yield mid_marker + newline
128 yield mid_marker + newline
129 for i in range(t[5], t[6]):
129 for i in range(t[5], t[6]):
130 yield self.b[i]
130 yield self.b[i]
131 yield end_marker + newline
131 yield end_marker + newline
132 else:
132 else:
133 raise ValueError(what)
133 raise ValueError(what)
134
134
135 def merge_annotated(self):
135 def merge_annotated(self):
136 """Return merge with conflicts, showing origin of lines.
136 """Return merge with conflicts, showing origin of lines.
137
137
138 Most useful for debugging merge.
138 Most useful for debugging merge.
139 """
139 """
140 for t in self.merge_regions():
140 for t in self.merge_regions():
141 what = t[0]
141 what = t[0]
142 if what == 'unchanged':
142 if what == 'unchanged':
143 for i in range(t[1], t[2]):
143 for i in range(t[1], t[2]):
144 yield 'u | ' + self.base[i]
144 yield 'u | ' + self.base[i]
145 elif what in ('a', 'same'):
145 elif what == 'a' or what == 'same':
146 for i in range(t[1], t[2]):
146 for i in range(t[1], t[2]):
147 yield what[0] + ' | ' + self.a[i]
147 yield what[0] + ' | ' + self.a[i]
148 elif what == 'b':
148 elif what == 'b':
149 for i in range(t[1], t[2]):
149 for i in range(t[1], t[2]):
150 yield 'b | ' + self.b[i]
150 yield 'b | ' + self.b[i]
151 elif what == 'conflict':
151 elif what == 'conflict':
152 yield '<<<<\n'
152 yield '<<<<\n'
153 for i in range(t[3], t[4]):
153 for i in range(t[3], t[4]):
154 yield 'A | ' + self.a[i]
154 yield 'A | ' + self.a[i]
155 yield '----\n'
155 yield '----\n'
156 for i in range(t[5], t[6]):
156 for i in range(t[5], t[6]):
157 yield 'B | ' + self.b[i]
157 yield 'B | ' + self.b[i]
158 yield '>>>>\n'
158 yield '>>>>\n'
159 else:
159 else:
160 raise ValueError(what)
160 raise ValueError(what)
161
161
162 def merge_groups(self):
162 def merge_groups(self):
163 """Yield sequence of line groups. Each one is a tuple:
163 """Yield sequence of line groups. Each one is a tuple:
164
164
165 'unchanged', lines
165 'unchanged', lines
166 Lines unchanged from base
166 Lines unchanged from base
167
167
168 'a', lines
168 'a', lines
169 Lines taken from a
169 Lines taken from a
170
170
171 'same', lines
171 'same', lines
172 Lines taken from a (and equal to b)
172 Lines taken from a (and equal to b)
173
173
174 'b', lines
174 'b', lines
175 Lines taken from b
175 Lines taken from b
176
176
177 'conflict', base_lines, a_lines, b_lines
177 'conflict', base_lines, a_lines, b_lines
178 Lines from base were changed to either a or b and conflict.
178 Lines from base were changed to either a or b and conflict.
179 """
179 """
180 for t in self.merge_regions():
180 for t in self.merge_regions():
181 what = t[0]
181 what = t[0]
182 if what == 'unchanged':
182 if what == 'unchanged':
183 yield what, self.base[t[1]:t[2]]
183 yield what, self.base[t[1]:t[2]]
184 elif what in ('a', 'same'):
184 elif what == 'a' or what == 'same':
185 yield what, self.a[t[1]:t[2]]
185 yield what, self.a[t[1]:t[2]]
186 elif what == 'b':
186 elif what == 'b':
187 yield what, self.b[t[1]:t[2]]
187 yield what, self.b[t[1]:t[2]]
188 elif what == 'conflict':
188 elif what == 'conflict':
189 yield (what,
189 yield (what,
190 self.base[t[1]:t[2]],
190 self.base[t[1]:t[2]],
191 self.a[t[3]:t[4]],
191 self.a[t[3]:t[4]],
192 self.b[t[5]:t[6]])
192 self.b[t[5]:t[6]])
193 else:
193 else:
194 raise ValueError(what)
194 raise ValueError(what)
195
195
196 def merge_regions(self):
196 def merge_regions(self):
197 """Return sequences of matching and conflicting regions.
197 """Return sequences of matching and conflicting regions.
198
198
199 This returns tuples, where the first value says what kind we
199 This returns tuples, where the first value says what kind we
200 have:
200 have:
201
201
202 'unchanged', start, end
202 'unchanged', start, end
203 Take a region of base[start:end]
203 Take a region of base[start:end]
204
204
205 'same', astart, aend
205 'same', astart, aend
206 b and a are different from base but give the same result
206 b and a are different from base but give the same result
207
207
208 'a', start, end
208 'a', start, end
209 Non-clashing insertion from a[start:end]
209 Non-clashing insertion from a[start:end]
210
210
211 Method is as follows:
211 Method is as follows:
212
212
213 The two sequences align only on regions which match the base
213 The two sequences align only on regions which match the base
214 and both descendents. These are found by doing a two-way diff
214 and both descendents. These are found by doing a two-way diff
215 of each one against the base, and then finding the
215 of each one against the base, and then finding the
216 intersections between those regions. These "sync regions"
216 intersections between those regions. These "sync regions"
217 are by definition unchanged in both and easily dealt with.
217 are by definition unchanged in both and easily dealt with.
218
218
219 The regions in between can be in any of three cases:
219 The regions in between can be in any of three cases:
220 conflicted, or changed on only one side.
220 conflicted, or changed on only one side.
221 """
221 """
222
222
223 # section a[0:ia] has been disposed of, etc
223 # section a[0:ia] has been disposed of, etc
224 iz = ia = ib = 0
224 iz = ia = ib = 0
225
225
226 for zmatch, zend, amatch, aend, bmatch, bend in self.find_sync_regions():
226 for zmatch, zend, amatch, aend, bmatch, bend in self.find_sync_regions():
227 #print 'match base [%d:%d]' % (zmatch, zend)
227 #print 'match base [%d:%d]' % (zmatch, zend)
228
228
229 matchlen = zend - zmatch
229 matchlen = zend - zmatch
230 assert matchlen >= 0
230 assert matchlen >= 0
231 assert matchlen == (aend - amatch)
231 assert matchlen == (aend - amatch)
232 assert matchlen == (bend - bmatch)
232 assert matchlen == (bend - bmatch)
233
233
234 len_a = amatch - ia
234 len_a = amatch - ia
235 len_b = bmatch - ib
235 len_b = bmatch - ib
236 len_base = zmatch - iz
236 len_base = zmatch - iz
237 assert len_a >= 0
237 assert len_a >= 0
238 assert len_b >= 0
238 assert len_b >= 0
239 assert len_base >= 0
239 assert len_base >= 0
240
240
241 #print 'unmatched a=%d, b=%d' % (len_a, len_b)
241 #print 'unmatched a=%d, b=%d' % (len_a, len_b)
242
242
243 if len_a or len_b:
243 if len_a or len_b:
244 # try to avoid actually slicing the lists
244 # try to avoid actually slicing the lists
245 equal_a = compare_range(self.a, ia, amatch,
245 equal_a = compare_range(self.a, ia, amatch,
246 self.base, iz, zmatch)
246 self.base, iz, zmatch)
247 equal_b = compare_range(self.b, ib, bmatch,
247 equal_b = compare_range(self.b, ib, bmatch,
248 self.base, iz, zmatch)
248 self.base, iz, zmatch)
249 same = compare_range(self.a, ia, amatch,
249 same = compare_range(self.a, ia, amatch,
250 self.b, ib, bmatch)
250 self.b, ib, bmatch)
251
251
252 if same:
252 if same:
253 yield 'same', ia, amatch
253 yield 'same', ia, amatch
254 elif equal_a and not equal_b:
254 elif equal_a and not equal_b:
255 yield 'b', ib, bmatch
255 yield 'b', ib, bmatch
256 elif equal_b and not equal_a:
256 elif equal_b and not equal_a:
257 yield 'a', ia, amatch
257 yield 'a', ia, amatch
258 elif not equal_a and not equal_b:
258 elif not equal_a and not equal_b:
259 yield 'conflict', iz, zmatch, ia, amatch, ib, bmatch
259 yield 'conflict', iz, zmatch, ia, amatch, ib, bmatch
260 else:
260 else:
261 raise AssertionError("can't handle a=b=base but unmatched")
261 raise AssertionError("can't handle a=b=base but unmatched")
262
262
263 ia = amatch
263 ia = amatch
264 ib = bmatch
264 ib = bmatch
265 iz = zmatch
265 iz = zmatch
266
266
267 # if the same part of the base was deleted on both sides
267 # if the same part of the base was deleted on both sides
268 # that's OK, we can just skip it.
268 # that's OK, we can just skip it.
269
269
270
270
271 if matchlen > 0:
271 if matchlen > 0:
272 assert ia == amatch
272 assert ia == amatch
273 assert ib == bmatch
273 assert ib == bmatch
274 assert iz == zmatch
274 assert iz == zmatch
275
275
276 yield 'unchanged', zmatch, zend
276 yield 'unchanged', zmatch, zend
277 iz = zend
277 iz = zend
278 ia = aend
278 ia = aend
279 ib = bend
279 ib = bend
280
280
281 def reprocess_merge_regions(self, merge_regions):
281 def reprocess_merge_regions(self, merge_regions):
282 """Where there are conflict regions, remove the agreed lines.
282 """Where there are conflict regions, remove the agreed lines.
283
283
284 Lines where both A and B have made the same changes are
284 Lines where both A and B have made the same changes are
285 eliminated.
285 eliminated.
286 """
286 """
287 for region in merge_regions:
287 for region in merge_regions:
288 if region[0] != "conflict":
288 if region[0] != "conflict":
289 yield region
289 yield region
290 continue
290 continue
291 type, iz, zmatch, ia, amatch, ib, bmatch = region
291 type, iz, zmatch, ia, amatch, ib, bmatch = region
292 a_region = self.a[ia:amatch]
292 a_region = self.a[ia:amatch]
293 b_region = self.b[ib:bmatch]
293 b_region = self.b[ib:bmatch]
294 matches = mdiff.get_matching_blocks(''.join(a_region),
294 matches = mdiff.get_matching_blocks(''.join(a_region),
295 ''.join(b_region))
295 ''.join(b_region))
296 next_a = ia
296 next_a = ia
297 next_b = ib
297 next_b = ib
298 for region_ia, region_ib, region_len in matches[:-1]:
298 for region_ia, region_ib, region_len in matches[:-1]:
299 region_ia += ia
299 region_ia += ia
300 region_ib += ib
300 region_ib += ib
301 reg = self.mismatch_region(next_a, region_ia, next_b,
301 reg = self.mismatch_region(next_a, region_ia, next_b,
302 region_ib)
302 region_ib)
303 if reg is not None:
303 if reg is not None:
304 yield reg
304 yield reg
305 yield 'same', region_ia, region_len + region_ia
305 yield 'same', region_ia, region_len + region_ia
306 next_a = region_ia + region_len
306 next_a = region_ia + region_len
307 next_b = region_ib + region_len
307 next_b = region_ib + region_len
308 reg = self.mismatch_region(next_a, amatch, next_b, bmatch)
308 reg = self.mismatch_region(next_a, amatch, next_b, bmatch)
309 if reg is not None:
309 if reg is not None:
310 yield reg
310 yield reg
311
311
312 def mismatch_region(next_a, region_ia, next_b, region_ib):
312 def mismatch_region(next_a, region_ia, next_b, region_ib):
313 if next_a < region_ia or next_b < region_ib:
313 if next_a < region_ia or next_b < region_ib:
314 return 'conflict', None, None, next_a, region_ia, next_b, region_ib
314 return 'conflict', None, None, next_a, region_ia, next_b, region_ib
315 mismatch_region = staticmethod(mismatch_region)
315 mismatch_region = staticmethod(mismatch_region)
316
316
317 def find_sync_regions(self):
317 def find_sync_regions(self):
318 """Return a list of sync regions, where both descendents match the base.
318 """Return a list of sync regions, where both descendents match the base.
319
319
320 Generates a list of (base1, base2, a1, a2, b1, b2). There is
320 Generates a list of (base1, base2, a1, a2, b1, b2). There is
321 always a zero-length sync region at the end of all the files.
321 always a zero-length sync region at the end of all the files.
322 """
322 """
323
323
324 ia = ib = 0
324 ia = ib = 0
325 amatches = mdiff.get_matching_blocks(self.basetext, self.atext)
325 amatches = mdiff.get_matching_blocks(self.basetext, self.atext)
326 bmatches = mdiff.get_matching_blocks(self.basetext, self.btext)
326 bmatches = mdiff.get_matching_blocks(self.basetext, self.btext)
327 len_a = len(amatches)
327 len_a = len(amatches)
328 len_b = len(bmatches)
328 len_b = len(bmatches)
329
329
330 sl = []
330 sl = []
331
331
332 while ia < len_a and ib < len_b:
332 while ia < len_a and ib < len_b:
333 abase, amatch, alen = amatches[ia]
333 abase, amatch, alen = amatches[ia]
334 bbase, bmatch, blen = bmatches[ib]
334 bbase, bmatch, blen = bmatches[ib]
335
335
336 # there is an unconflicted block at i; how long does it
336 # there is an unconflicted block at i; how long does it
337 # extend? until whichever one ends earlier.
337 # extend? until whichever one ends earlier.
338 i = intersect((abase, abase + alen), (bbase, bbase + blen))
338 i = intersect((abase, abase + alen), (bbase, bbase + blen))
339 if i:
339 if i:
340 intbase = i[0]
340 intbase = i[0]
341 intend = i[1]
341 intend = i[1]
342 intlen = intend - intbase
342 intlen = intend - intbase
343
343
344 # found a match of base[i[0], i[1]]; this may be less than
344 # found a match of base[i[0], i[1]]; this may be less than
345 # the region that matches in either one
345 # the region that matches in either one
346 assert intlen <= alen
346 assert intlen <= alen
347 assert intlen <= blen
347 assert intlen <= blen
348 assert abase <= intbase
348 assert abase <= intbase
349 assert bbase <= intbase
349 assert bbase <= intbase
350
350
351 asub = amatch + (intbase - abase)
351 asub = amatch + (intbase - abase)
352 bsub = bmatch + (intbase - bbase)
352 bsub = bmatch + (intbase - bbase)
353 aend = asub + intlen
353 aend = asub + intlen
354 bend = bsub + intlen
354 bend = bsub + intlen
355
355
356 assert self.base[intbase:intend] == self.a[asub:aend], \
356 assert self.base[intbase:intend] == self.a[asub:aend], \
357 (self.base[intbase:intend], self.a[asub:aend])
357 (self.base[intbase:intend], self.a[asub:aend])
358
358
359 assert self.base[intbase:intend] == self.b[bsub:bend]
359 assert self.base[intbase:intend] == self.b[bsub:bend]
360
360
361 sl.append((intbase, intend,
361 sl.append((intbase, intend,
362 asub, aend,
362 asub, aend,
363 bsub, bend))
363 bsub, bend))
364
364
365 # advance whichever one ends first in the base text
365 # advance whichever one ends first in the base text
366 if (abase + alen) < (bbase + blen):
366 if (abase + alen) < (bbase + blen):
367 ia += 1
367 ia += 1
368 else:
368 else:
369 ib += 1
369 ib += 1
370
370
371 intbase = len(self.base)
371 intbase = len(self.base)
372 abase = len(self.a)
372 abase = len(self.a)
373 bbase = len(self.b)
373 bbase = len(self.b)
374 sl.append((intbase, intbase, abase, abase, bbase, bbase))
374 sl.append((intbase, intbase, abase, abase, bbase, bbase))
375
375
376 return sl
376 return sl
377
377
378 def find_unconflicted(self):
378 def find_unconflicted(self):
379 """Return a list of ranges in base that are not conflicted."""
379 """Return a list of ranges in base that are not conflicted."""
380 am = mdiff.get_matching_blocks(self.basetext, self.atext)
380 am = mdiff.get_matching_blocks(self.basetext, self.atext)
381 bm = mdiff.get_matching_blocks(self.basetext, self.btext)
381 bm = mdiff.get_matching_blocks(self.basetext, self.btext)
382
382
383 unc = []
383 unc = []
384
384
385 while am and bm:
385 while am and bm:
386 # there is an unconflicted block at i; how long does it
386 # there is an unconflicted block at i; how long does it
387 # extend? until whichever one ends earlier.
387 # extend? until whichever one ends earlier.
388 a1 = am[0][0]
388 a1 = am[0][0]
389 a2 = a1 + am[0][2]
389 a2 = a1 + am[0][2]
390 b1 = bm[0][0]
390 b1 = bm[0][0]
391 b2 = b1 + bm[0][2]
391 b2 = b1 + bm[0][2]
392 i = intersect((a1, a2), (b1, b2))
392 i = intersect((a1, a2), (b1, b2))
393 if i:
393 if i:
394 unc.append(i)
394 unc.append(i)
395
395
396 if a2 < b2:
396 if a2 < b2:
397 del am[0]
397 del am[0]
398 else:
398 else:
399 del bm[0]
399 del bm[0]
400
400
401 return unc
401 return unc
402
402
403 def simplemerge(ui, local, base, other, **opts):
403 def simplemerge(ui, local, base, other, **opts):
404 def readfile(filename):
404 def readfile(filename):
405 f = open(filename, "rb")
405 f = open(filename, "rb")
406 text = f.read()
406 text = f.read()
407 f.close()
407 f.close()
408 if util.binary(text):
408 if util.binary(text):
409 msg = _("%s looks like a binary file.") % filename
409 msg = _("%s looks like a binary file.") % filename
410 if not opts.get('text'):
410 if not opts.get('text'):
411 raise util.Abort(msg)
411 raise util.Abort(msg)
412 elif not opts.get('quiet'):
412 elif not opts.get('quiet'):
413 ui.warn(_('warning: %s\n') % msg)
413 ui.warn(_('warning: %s\n') % msg)
414 return text
414 return text
415
415
416 name_a = local
416 name_a = local
417 name_b = other
417 name_b = other
418 labels = opts.get('label', [])
418 labels = opts.get('label', [])
419 if labels:
419 if labels:
420 name_a = labels.pop(0)
420 name_a = labels.pop(0)
421 if labels:
421 if labels:
422 name_b = labels.pop(0)
422 name_b = labels.pop(0)
423 if labels:
423 if labels:
424 raise util.Abort(_("can only specify two labels."))
424 raise util.Abort(_("can only specify two labels."))
425
425
426 localtext = readfile(local)
426 localtext = readfile(local)
427 basetext = readfile(base)
427 basetext = readfile(base)
428 othertext = readfile(other)
428 othertext = readfile(other)
429
429
430 local = os.path.realpath(local)
430 local = os.path.realpath(local)
431 if not opts.get('print'):
431 if not opts.get('print'):
432 opener = util.opener(os.path.dirname(local))
432 opener = util.opener(os.path.dirname(local))
433 out = opener(os.path.basename(local), "w", atomictemp=True)
433 out = opener(os.path.basename(local), "w", atomictemp=True)
434 else:
434 else:
435 out = sys.stdout
435 out = sys.stdout
436
436
437 reprocess = not opts.get('no_minimal')
437 reprocess = not opts.get('no_minimal')
438
438
439 m3 = Merge3Text(basetext, localtext, othertext)
439 m3 = Merge3Text(basetext, localtext, othertext)
440 for line in m3.merge_lines(name_a=name_a, name_b=name_b,
440 for line in m3.merge_lines(name_a=name_a, name_b=name_b,
441 reprocess=reprocess):
441 reprocess=reprocess):
442 out.write(line)
442 out.write(line)
443
443
444 if not opts.get('print'):
444 if not opts.get('print'):
445 out.rename()
445 out.rename()
446
446
447 if m3.conflicts:
447 if m3.conflicts:
448 if not opts.get('quiet'):
448 if not opts.get('quiet'):
449 ui.warn(_("warning: conflicts during merge.\n"))
449 ui.warn(_("warning: conflicts during merge.\n"))
450 return 1
450 return 1
@@ -1,1448 +1,1448
1 # util.py - Mercurial utility functions and platform specfic implementations
1 # util.py - Mercurial utility functions and platform specfic implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specfic implementations.
10 """Mercurial utility functions and platform specfic implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import error, osutil, encoding
17 import error, osutil, encoding
18 import errno, re, shutil, sys, tempfile, traceback
18 import errno, re, shutil, sys, tempfile, traceback
19 import os, stat, time, calendar, textwrap, unicodedata, signal
19 import os, stat, time, calendar, textwrap, unicodedata, signal
20 import imp, socket
20 import imp, socket
21
21
22 # Python compatibility
22 # Python compatibility
23
23
24 def sha1(s):
24 def sha1(s):
25 return _fastsha1(s)
25 return _fastsha1(s)
26
26
27 def _fastsha1(s):
27 def _fastsha1(s):
28 # This function will import sha1 from hashlib or sha (whichever is
28 # This function will import sha1 from hashlib or sha (whichever is
29 # available) and overwrite itself with it on the first call.
29 # available) and overwrite itself with it on the first call.
30 # Subsequent calls will go directly to the imported function.
30 # Subsequent calls will go directly to the imported function.
31 if sys.version_info >= (2, 5):
31 if sys.version_info >= (2, 5):
32 from hashlib import sha1 as _sha1
32 from hashlib import sha1 as _sha1
33 else:
33 else:
34 from sha import sha as _sha1
34 from sha import sha as _sha1
35 global _fastsha1, sha1
35 global _fastsha1, sha1
36 _fastsha1 = sha1 = _sha1
36 _fastsha1 = sha1 = _sha1
37 return _sha1(s)
37 return _sha1(s)
38
38
39 import __builtin__
39 import __builtin__
40
40
41 if sys.version_info[0] < 3:
41 if sys.version_info[0] < 3:
42 def fakebuffer(sliceable, offset=0):
42 def fakebuffer(sliceable, offset=0):
43 return sliceable[offset:]
43 return sliceable[offset:]
44 else:
44 else:
45 def fakebuffer(sliceable, offset=0):
45 def fakebuffer(sliceable, offset=0):
46 return memoryview(sliceable)[offset:]
46 return memoryview(sliceable)[offset:]
47 try:
47 try:
48 buffer
48 buffer
49 except NameError:
49 except NameError:
50 __builtin__.buffer = fakebuffer
50 __builtin__.buffer = fakebuffer
51
51
52 import subprocess
52 import subprocess
53 closefds = os.name == 'posix'
53 closefds = os.name == 'posix'
54
54
55 def popen2(cmd, env=None, newlines=False):
55 def popen2(cmd, env=None, newlines=False):
56 # Setting bufsize to -1 lets the system decide the buffer size.
56 # Setting bufsize to -1 lets the system decide the buffer size.
57 # The default for bufsize is 0, meaning unbuffered. This leads to
57 # The default for bufsize is 0, meaning unbuffered. This leads to
58 # poor performance on Mac OS X: http://bugs.python.org/issue4194
58 # poor performance on Mac OS X: http://bugs.python.org/issue4194
59 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
59 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
60 close_fds=closefds,
60 close_fds=closefds,
61 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
61 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
62 universal_newlines=newlines,
62 universal_newlines=newlines,
63 env=env)
63 env=env)
64 return p.stdin, p.stdout
64 return p.stdin, p.stdout
65
65
66 def popen3(cmd, env=None, newlines=False):
66 def popen3(cmd, env=None, newlines=False):
67 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
67 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
68 close_fds=closefds,
68 close_fds=closefds,
69 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
69 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
70 stderr=subprocess.PIPE,
70 stderr=subprocess.PIPE,
71 universal_newlines=newlines,
71 universal_newlines=newlines,
72 env=env)
72 env=env)
73 return p.stdin, p.stdout, p.stderr
73 return p.stdin, p.stdout, p.stderr
74
74
75 def version():
75 def version():
76 """Return version information if available."""
76 """Return version information if available."""
77 try:
77 try:
78 import __version__
78 import __version__
79 return __version__.version
79 return __version__.version
80 except ImportError:
80 except ImportError:
81 return 'unknown'
81 return 'unknown'
82
82
83 # used by parsedate
83 # used by parsedate
84 defaultdateformats = (
84 defaultdateformats = (
85 '%Y-%m-%d %H:%M:%S',
85 '%Y-%m-%d %H:%M:%S',
86 '%Y-%m-%d %I:%M:%S%p',
86 '%Y-%m-%d %I:%M:%S%p',
87 '%Y-%m-%d %H:%M',
87 '%Y-%m-%d %H:%M',
88 '%Y-%m-%d %I:%M%p',
88 '%Y-%m-%d %I:%M%p',
89 '%Y-%m-%d',
89 '%Y-%m-%d',
90 '%m-%d',
90 '%m-%d',
91 '%m/%d',
91 '%m/%d',
92 '%m/%d/%y',
92 '%m/%d/%y',
93 '%m/%d/%Y',
93 '%m/%d/%Y',
94 '%a %b %d %H:%M:%S %Y',
94 '%a %b %d %H:%M:%S %Y',
95 '%a %b %d %I:%M:%S%p %Y',
95 '%a %b %d %I:%M:%S%p %Y',
96 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
96 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
97 '%b %d %H:%M:%S %Y',
97 '%b %d %H:%M:%S %Y',
98 '%b %d %I:%M:%S%p %Y',
98 '%b %d %I:%M:%S%p %Y',
99 '%b %d %H:%M:%S',
99 '%b %d %H:%M:%S',
100 '%b %d %I:%M:%S%p',
100 '%b %d %I:%M:%S%p',
101 '%b %d %H:%M',
101 '%b %d %H:%M',
102 '%b %d %I:%M%p',
102 '%b %d %I:%M%p',
103 '%b %d %Y',
103 '%b %d %Y',
104 '%b %d',
104 '%b %d',
105 '%H:%M:%S',
105 '%H:%M:%S',
106 '%I:%M:%S%p',
106 '%I:%M:%S%p',
107 '%H:%M',
107 '%H:%M',
108 '%I:%M%p',
108 '%I:%M%p',
109 )
109 )
110
110
111 extendeddateformats = defaultdateformats + (
111 extendeddateformats = defaultdateformats + (
112 "%Y",
112 "%Y",
113 "%Y-%m",
113 "%Y-%m",
114 "%b",
114 "%b",
115 "%b %Y",
115 "%b %Y",
116 )
116 )
117
117
118 def cachefunc(func):
118 def cachefunc(func):
119 '''cache the result of function calls'''
119 '''cache the result of function calls'''
120 # XXX doesn't handle keywords args
120 # XXX doesn't handle keywords args
121 cache = {}
121 cache = {}
122 if func.func_code.co_argcount == 1:
122 if func.func_code.co_argcount == 1:
123 # we gain a small amount of time because
123 # we gain a small amount of time because
124 # we don't need to pack/unpack the list
124 # we don't need to pack/unpack the list
125 def f(arg):
125 def f(arg):
126 if arg not in cache:
126 if arg not in cache:
127 cache[arg] = func(arg)
127 cache[arg] = func(arg)
128 return cache[arg]
128 return cache[arg]
129 else:
129 else:
130 def f(*args):
130 def f(*args):
131 if args not in cache:
131 if args not in cache:
132 cache[args] = func(*args)
132 cache[args] = func(*args)
133 return cache[args]
133 return cache[args]
134
134
135 return f
135 return f
136
136
137 def lrucachefunc(func):
137 def lrucachefunc(func):
138 '''cache most recent results of function calls'''
138 '''cache most recent results of function calls'''
139 cache = {}
139 cache = {}
140 order = []
140 order = []
141 if func.func_code.co_argcount == 1:
141 if func.func_code.co_argcount == 1:
142 def f(arg):
142 def f(arg):
143 if arg not in cache:
143 if arg not in cache:
144 if len(cache) > 20:
144 if len(cache) > 20:
145 del cache[order.pop(0)]
145 del cache[order.pop(0)]
146 cache[arg] = func(arg)
146 cache[arg] = func(arg)
147 else:
147 else:
148 order.remove(arg)
148 order.remove(arg)
149 order.append(arg)
149 order.append(arg)
150 return cache[arg]
150 return cache[arg]
151 else:
151 else:
152 def f(*args):
152 def f(*args):
153 if args not in cache:
153 if args not in cache:
154 if len(cache) > 20:
154 if len(cache) > 20:
155 del cache[order.pop(0)]
155 del cache[order.pop(0)]
156 cache[args] = func(*args)
156 cache[args] = func(*args)
157 else:
157 else:
158 order.remove(args)
158 order.remove(args)
159 order.append(args)
159 order.append(args)
160 return cache[args]
160 return cache[args]
161
161
162 return f
162 return f
163
163
164 class propertycache(object):
164 class propertycache(object):
165 def __init__(self, func):
165 def __init__(self, func):
166 self.func = func
166 self.func = func
167 self.name = func.__name__
167 self.name = func.__name__
168 def __get__(self, obj, type=None):
168 def __get__(self, obj, type=None):
169 result = self.func(obj)
169 result = self.func(obj)
170 setattr(obj, self.name, result)
170 setattr(obj, self.name, result)
171 return result
171 return result
172
172
173 def pipefilter(s, cmd):
173 def pipefilter(s, cmd):
174 '''filter string S through command CMD, returning its output'''
174 '''filter string S through command CMD, returning its output'''
175 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
175 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
176 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
176 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
177 pout, perr = p.communicate(s)
177 pout, perr = p.communicate(s)
178 return pout
178 return pout
179
179
180 def tempfilter(s, cmd):
180 def tempfilter(s, cmd):
181 '''filter string S through a pair of temporary files with CMD.
181 '''filter string S through a pair of temporary files with CMD.
182 CMD is used as a template to create the real command to be run,
182 CMD is used as a template to create the real command to be run,
183 with the strings INFILE and OUTFILE replaced by the real names of
183 with the strings INFILE and OUTFILE replaced by the real names of
184 the temporary files generated.'''
184 the temporary files generated.'''
185 inname, outname = None, None
185 inname, outname = None, None
186 try:
186 try:
187 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
187 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
188 fp = os.fdopen(infd, 'wb')
188 fp = os.fdopen(infd, 'wb')
189 fp.write(s)
189 fp.write(s)
190 fp.close()
190 fp.close()
191 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
191 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
192 os.close(outfd)
192 os.close(outfd)
193 cmd = cmd.replace('INFILE', inname)
193 cmd = cmd.replace('INFILE', inname)
194 cmd = cmd.replace('OUTFILE', outname)
194 cmd = cmd.replace('OUTFILE', outname)
195 code = os.system(cmd)
195 code = os.system(cmd)
196 if sys.platform == 'OpenVMS' and code & 1:
196 if sys.platform == 'OpenVMS' and code & 1:
197 code = 0
197 code = 0
198 if code:
198 if code:
199 raise Abort(_("command '%s' failed: %s") %
199 raise Abort(_("command '%s' failed: %s") %
200 (cmd, explain_exit(code)))
200 (cmd, explain_exit(code)))
201 return open(outname, 'rb').read()
201 return open(outname, 'rb').read()
202 finally:
202 finally:
203 try:
203 try:
204 if inname:
204 if inname:
205 os.unlink(inname)
205 os.unlink(inname)
206 except:
206 except:
207 pass
207 pass
208 try:
208 try:
209 if outname:
209 if outname:
210 os.unlink(outname)
210 os.unlink(outname)
211 except:
211 except:
212 pass
212 pass
213
213
214 filtertable = {
214 filtertable = {
215 'tempfile:': tempfilter,
215 'tempfile:': tempfilter,
216 'pipe:': pipefilter,
216 'pipe:': pipefilter,
217 }
217 }
218
218
219 def filter(s, cmd):
219 def filter(s, cmd):
220 "filter a string through a command that transforms its input to its output"
220 "filter a string through a command that transforms its input to its output"
221 for name, fn in filtertable.iteritems():
221 for name, fn in filtertable.iteritems():
222 if cmd.startswith(name):
222 if cmd.startswith(name):
223 return fn(s, cmd[len(name):].lstrip())
223 return fn(s, cmd[len(name):].lstrip())
224 return pipefilter(s, cmd)
224 return pipefilter(s, cmd)
225
225
226 def binary(s):
226 def binary(s):
227 """return true if a string is binary data"""
227 """return true if a string is binary data"""
228 return bool(s and '\0' in s)
228 return bool(s and '\0' in s)
229
229
230 def increasingchunks(source, min=1024, max=65536):
230 def increasingchunks(source, min=1024, max=65536):
231 '''return no less than min bytes per chunk while data remains,
231 '''return no less than min bytes per chunk while data remains,
232 doubling min after each chunk until it reaches max'''
232 doubling min after each chunk until it reaches max'''
233 def log2(x):
233 def log2(x):
234 if not x:
234 if not x:
235 return 0
235 return 0
236 i = 0
236 i = 0
237 while x:
237 while x:
238 x >>= 1
238 x >>= 1
239 i += 1
239 i += 1
240 return i - 1
240 return i - 1
241
241
242 buf = []
242 buf = []
243 blen = 0
243 blen = 0
244 for chunk in source:
244 for chunk in source:
245 buf.append(chunk)
245 buf.append(chunk)
246 blen += len(chunk)
246 blen += len(chunk)
247 if blen >= min:
247 if blen >= min:
248 if min < max:
248 if min < max:
249 min = min << 1
249 min = min << 1
250 nmin = 1 << log2(blen)
250 nmin = 1 << log2(blen)
251 if nmin > min:
251 if nmin > min:
252 min = nmin
252 min = nmin
253 if min > max:
253 if min > max:
254 min = max
254 min = max
255 yield ''.join(buf)
255 yield ''.join(buf)
256 blen = 0
256 blen = 0
257 buf = []
257 buf = []
258 if buf:
258 if buf:
259 yield ''.join(buf)
259 yield ''.join(buf)
260
260
261 Abort = error.Abort
261 Abort = error.Abort
262
262
263 def always(fn):
263 def always(fn):
264 return True
264 return True
265
265
266 def never(fn):
266 def never(fn):
267 return False
267 return False
268
268
269 def pathto(root, n1, n2):
269 def pathto(root, n1, n2):
270 '''return the relative path from one place to another.
270 '''return the relative path from one place to another.
271 root should use os.sep to separate directories
271 root should use os.sep to separate directories
272 n1 should use os.sep to separate directories
272 n1 should use os.sep to separate directories
273 n2 should use "/" to separate directories
273 n2 should use "/" to separate directories
274 returns an os.sep-separated path.
274 returns an os.sep-separated path.
275
275
276 If n1 is a relative path, it's assumed it's
276 If n1 is a relative path, it's assumed it's
277 relative to root.
277 relative to root.
278 n2 should always be relative to root.
278 n2 should always be relative to root.
279 '''
279 '''
280 if not n1:
280 if not n1:
281 return localpath(n2)
281 return localpath(n2)
282 if os.path.isabs(n1):
282 if os.path.isabs(n1):
283 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
283 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
284 return os.path.join(root, localpath(n2))
284 return os.path.join(root, localpath(n2))
285 n2 = '/'.join((pconvert(root), n2))
285 n2 = '/'.join((pconvert(root), n2))
286 a, b = splitpath(n1), n2.split('/')
286 a, b = splitpath(n1), n2.split('/')
287 a.reverse()
287 a.reverse()
288 b.reverse()
288 b.reverse()
289 while a and b and a[-1] == b[-1]:
289 while a and b and a[-1] == b[-1]:
290 a.pop()
290 a.pop()
291 b.pop()
291 b.pop()
292 b.reverse()
292 b.reverse()
293 return os.sep.join((['..'] * len(a)) + b) or '.'
293 return os.sep.join((['..'] * len(a)) + b) or '.'
294
294
295 def canonpath(root, cwd, myname, auditor=None):
295 def canonpath(root, cwd, myname, auditor=None):
296 """return the canonical path of myname, given cwd and root"""
296 """return the canonical path of myname, given cwd and root"""
297 if endswithsep(root):
297 if endswithsep(root):
298 rootsep = root
298 rootsep = root
299 else:
299 else:
300 rootsep = root + os.sep
300 rootsep = root + os.sep
301 name = myname
301 name = myname
302 if not os.path.isabs(name):
302 if not os.path.isabs(name):
303 name = os.path.join(root, cwd, name)
303 name = os.path.join(root, cwd, name)
304 name = os.path.normpath(name)
304 name = os.path.normpath(name)
305 if auditor is None:
305 if auditor is None:
306 auditor = path_auditor(root)
306 auditor = path_auditor(root)
307 if name != rootsep and name.startswith(rootsep):
307 if name != rootsep and name.startswith(rootsep):
308 name = name[len(rootsep):]
308 name = name[len(rootsep):]
309 auditor(name)
309 auditor(name)
310 return pconvert(name)
310 return pconvert(name)
311 elif name == root:
311 elif name == root:
312 return ''
312 return ''
313 else:
313 else:
314 # Determine whether `name' is in the hierarchy at or beneath `root',
314 # Determine whether `name' is in the hierarchy at or beneath `root',
315 # by iterating name=dirname(name) until that causes no change (can't
315 # by iterating name=dirname(name) until that causes no change (can't
316 # check name == '/', because that doesn't work on windows). For each
316 # check name == '/', because that doesn't work on windows). For each
317 # `name', compare dev/inode numbers. If they match, the list `rel'
317 # `name', compare dev/inode numbers. If they match, the list `rel'
318 # holds the reversed list of components making up the relative file
318 # holds the reversed list of components making up the relative file
319 # name we want.
319 # name we want.
320 root_st = os.stat(root)
320 root_st = os.stat(root)
321 rel = []
321 rel = []
322 while True:
322 while True:
323 try:
323 try:
324 name_st = os.stat(name)
324 name_st = os.stat(name)
325 except OSError:
325 except OSError:
326 break
326 break
327 if samestat(name_st, root_st):
327 if samestat(name_st, root_st):
328 if not rel:
328 if not rel:
329 # name was actually the same as root (maybe a symlink)
329 # name was actually the same as root (maybe a symlink)
330 return ''
330 return ''
331 rel.reverse()
331 rel.reverse()
332 name = os.path.join(*rel)
332 name = os.path.join(*rel)
333 auditor(name)
333 auditor(name)
334 return pconvert(name)
334 return pconvert(name)
335 dirname, basename = os.path.split(name)
335 dirname, basename = os.path.split(name)
336 rel.append(basename)
336 rel.append(basename)
337 if dirname == name:
337 if dirname == name:
338 break
338 break
339 name = dirname
339 name = dirname
340
340
341 raise Abort('%s not under root' % myname)
341 raise Abort('%s not under root' % myname)
342
342
343 _hgexecutable = None
343 _hgexecutable = None
344
344
345 def main_is_frozen():
345 def main_is_frozen():
346 """return True if we are a frozen executable.
346 """return True if we are a frozen executable.
347
347
348 The code supports py2exe (most common, Windows only) and tools/freeze
348 The code supports py2exe (most common, Windows only) and tools/freeze
349 (portable, not much used).
349 (portable, not much used).
350 """
350 """
351 return (hasattr(sys, "frozen") or # new py2exe
351 return (hasattr(sys, "frozen") or # new py2exe
352 hasattr(sys, "importers") or # old py2exe
352 hasattr(sys, "importers") or # old py2exe
353 imp.is_frozen("__main__")) # tools/freeze
353 imp.is_frozen("__main__")) # tools/freeze
354
354
355 def hgexecutable():
355 def hgexecutable():
356 """return location of the 'hg' executable.
356 """return location of the 'hg' executable.
357
357
358 Defaults to $HG or 'hg' in the search path.
358 Defaults to $HG or 'hg' in the search path.
359 """
359 """
360 if _hgexecutable is None:
360 if _hgexecutable is None:
361 hg = os.environ.get('HG')
361 hg = os.environ.get('HG')
362 if hg:
362 if hg:
363 set_hgexecutable(hg)
363 set_hgexecutable(hg)
364 elif main_is_frozen():
364 elif main_is_frozen():
365 set_hgexecutable(sys.executable)
365 set_hgexecutable(sys.executable)
366 else:
366 else:
367 exe = find_exe('hg') or os.path.basename(sys.argv[0])
367 exe = find_exe('hg') or os.path.basename(sys.argv[0])
368 set_hgexecutable(exe)
368 set_hgexecutable(exe)
369 return _hgexecutable
369 return _hgexecutable
370
370
371 def set_hgexecutable(path):
371 def set_hgexecutable(path):
372 """set location of the 'hg' executable"""
372 """set location of the 'hg' executable"""
373 global _hgexecutable
373 global _hgexecutable
374 _hgexecutable = path
374 _hgexecutable = path
375
375
376 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
376 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
377 '''enhanced shell command execution.
377 '''enhanced shell command execution.
378 run with environment maybe modified, maybe in different dir.
378 run with environment maybe modified, maybe in different dir.
379
379
380 if command fails and onerr is None, return status. if ui object,
380 if command fails and onerr is None, return status. if ui object,
381 print error message and return status, else raise onerr object as
381 print error message and return status, else raise onerr object as
382 exception.
382 exception.
383
383
384 if out is specified, it is assumed to be a file-like object that has a
384 if out is specified, it is assumed to be a file-like object that has a
385 write() method. stdout and stderr will be redirected to out.'''
385 write() method. stdout and stderr will be redirected to out.'''
386 def py2shell(val):
386 def py2shell(val):
387 'convert python object into string that is useful to shell'
387 'convert python object into string that is useful to shell'
388 if val is None or val is False:
388 if val is None or val is False:
389 return '0'
389 return '0'
390 if val is True:
390 if val is True:
391 return '1'
391 return '1'
392 return str(val)
392 return str(val)
393 origcmd = cmd
393 origcmd = cmd
394 if os.name == 'nt':
394 if os.name == 'nt':
395 cmd = '"%s"' % cmd
395 cmd = '"%s"' % cmd
396 env = dict(os.environ)
396 env = dict(os.environ)
397 env.update((k, py2shell(v)) for k, v in environ.iteritems())
397 env.update((k, py2shell(v)) for k, v in environ.iteritems())
398 env['HG'] = hgexecutable()
398 env['HG'] = hgexecutable()
399 if out is None:
399 if out is None:
400 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
400 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
401 env=env, cwd=cwd)
401 env=env, cwd=cwd)
402 else:
402 else:
403 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
403 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
404 env=env, cwd=cwd, stdout=subprocess.PIPE,
404 env=env, cwd=cwd, stdout=subprocess.PIPE,
405 stderr=subprocess.STDOUT)
405 stderr=subprocess.STDOUT)
406 for line in proc.stdout:
406 for line in proc.stdout:
407 out.write(line)
407 out.write(line)
408 proc.wait()
408 proc.wait()
409 rc = proc.returncode
409 rc = proc.returncode
410 if sys.platform == 'OpenVMS' and rc & 1:
410 if sys.platform == 'OpenVMS' and rc & 1:
411 rc = 0
411 rc = 0
412 if rc and onerr:
412 if rc and onerr:
413 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
413 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
414 explain_exit(rc)[0])
414 explain_exit(rc)[0])
415 if errprefix:
415 if errprefix:
416 errmsg = '%s: %s' % (errprefix, errmsg)
416 errmsg = '%s: %s' % (errprefix, errmsg)
417 try:
417 try:
418 onerr.warn(errmsg + '\n')
418 onerr.warn(errmsg + '\n')
419 except AttributeError:
419 except AttributeError:
420 raise onerr(errmsg)
420 raise onerr(errmsg)
421 return rc
421 return rc
422
422
423 def checksignature(func):
423 def checksignature(func):
424 '''wrap a function with code to check for calling errors'''
424 '''wrap a function with code to check for calling errors'''
425 def check(*args, **kwargs):
425 def check(*args, **kwargs):
426 try:
426 try:
427 return func(*args, **kwargs)
427 return func(*args, **kwargs)
428 except TypeError:
428 except TypeError:
429 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
429 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
430 raise error.SignatureError
430 raise error.SignatureError
431 raise
431 raise
432
432
433 return check
433 return check
434
434
435 def unlink(f):
435 def unlink(f):
436 """unlink and remove the directory if it is empty"""
436 """unlink and remove the directory if it is empty"""
437 os.unlink(f)
437 os.unlink(f)
438 # try removing directories that might now be empty
438 # try removing directories that might now be empty
439 try:
439 try:
440 os.removedirs(os.path.dirname(f))
440 os.removedirs(os.path.dirname(f))
441 except OSError:
441 except OSError:
442 pass
442 pass
443
443
444 def copyfile(src, dest):
444 def copyfile(src, dest):
445 "copy a file, preserving mode and atime/mtime"
445 "copy a file, preserving mode and atime/mtime"
446 if os.path.islink(src):
446 if os.path.islink(src):
447 try:
447 try:
448 os.unlink(dest)
448 os.unlink(dest)
449 except:
449 except:
450 pass
450 pass
451 os.symlink(os.readlink(src), dest)
451 os.symlink(os.readlink(src), dest)
452 else:
452 else:
453 try:
453 try:
454 shutil.copyfile(src, dest)
454 shutil.copyfile(src, dest)
455 shutil.copystat(src, dest)
455 shutil.copystat(src, dest)
456 except shutil.Error, inst:
456 except shutil.Error, inst:
457 raise Abort(str(inst))
457 raise Abort(str(inst))
458
458
459 def copyfiles(src, dst, hardlink=None):
459 def copyfiles(src, dst, hardlink=None):
460 """Copy a directory tree using hardlinks if possible"""
460 """Copy a directory tree using hardlinks if possible"""
461
461
462 if hardlink is None:
462 if hardlink is None:
463 hardlink = (os.stat(src).st_dev ==
463 hardlink = (os.stat(src).st_dev ==
464 os.stat(os.path.dirname(dst)).st_dev)
464 os.stat(os.path.dirname(dst)).st_dev)
465
465
466 num = 0
466 num = 0
467 if os.path.isdir(src):
467 if os.path.isdir(src):
468 os.mkdir(dst)
468 os.mkdir(dst)
469 for name, kind in osutil.listdir(src):
469 for name, kind in osutil.listdir(src):
470 srcname = os.path.join(src, name)
470 srcname = os.path.join(src, name)
471 dstname = os.path.join(dst, name)
471 dstname = os.path.join(dst, name)
472 hardlink, n = copyfiles(srcname, dstname, hardlink)
472 hardlink, n = copyfiles(srcname, dstname, hardlink)
473 num += n
473 num += n
474 else:
474 else:
475 if hardlink:
475 if hardlink:
476 try:
476 try:
477 os_link(src, dst)
477 os_link(src, dst)
478 except (IOError, OSError):
478 except (IOError, OSError):
479 hardlink = False
479 hardlink = False
480 shutil.copy(src, dst)
480 shutil.copy(src, dst)
481 else:
481 else:
482 shutil.copy(src, dst)
482 shutil.copy(src, dst)
483 num += 1
483 num += 1
484
484
485 return hardlink, num
485 return hardlink, num
486
486
487 class path_auditor(object):
487 class path_auditor(object):
488 '''ensure that a filesystem path contains no banned components.
488 '''ensure that a filesystem path contains no banned components.
489 the following properties of a path are checked:
489 the following properties of a path are checked:
490
490
491 - under top-level .hg
491 - under top-level .hg
492 - starts at the root of a windows drive
492 - starts at the root of a windows drive
493 - contains ".."
493 - contains ".."
494 - traverses a symlink (e.g. a/symlink_here/b)
494 - traverses a symlink (e.g. a/symlink_here/b)
495 - inside a nested repository (a callback can be used to approve
495 - inside a nested repository (a callback can be used to approve
496 some nested repositories, e.g., subrepositories)
496 some nested repositories, e.g., subrepositories)
497 '''
497 '''
498
498
499 def __init__(self, root, callback=None):
499 def __init__(self, root, callback=None):
500 self.audited = set()
500 self.audited = set()
501 self.auditeddir = set()
501 self.auditeddir = set()
502 self.root = root
502 self.root = root
503 self.callback = callback
503 self.callback = callback
504
504
505 def __call__(self, path):
505 def __call__(self, path):
506 if path in self.audited:
506 if path in self.audited:
507 return
507 return
508 normpath = os.path.normcase(path)
508 normpath = os.path.normcase(path)
509 parts = splitpath(normpath)
509 parts = splitpath(normpath)
510 if (os.path.splitdrive(path)[0]
510 if (os.path.splitdrive(path)[0]
511 or parts[0].lower() in ('.hg', '.hg.', '')
511 or parts[0].lower() in ('.hg', '.hg.', '')
512 or os.pardir in parts):
512 or os.pardir in parts):
513 raise Abort(_("path contains illegal component: %s") % path)
513 raise Abort(_("path contains illegal component: %s") % path)
514 if '.hg' in path.lower():
514 if '.hg' in path.lower():
515 lparts = [p.lower() for p in parts]
515 lparts = [p.lower() for p in parts]
516 for p in '.hg', '.hg.':
516 for p in '.hg', '.hg.':
517 if p in lparts[1:]:
517 if p in lparts[1:]:
518 pos = lparts.index(p)
518 pos = lparts.index(p)
519 base = os.path.join(*parts[:pos])
519 base = os.path.join(*parts[:pos])
520 raise Abort(_('path %r is inside repo %r') % (path, base))
520 raise Abort(_('path %r is inside repo %r') % (path, base))
521 def check(prefix):
521 def check(prefix):
522 curpath = os.path.join(self.root, prefix)
522 curpath = os.path.join(self.root, prefix)
523 try:
523 try:
524 st = os.lstat(curpath)
524 st = os.lstat(curpath)
525 except OSError, err:
525 except OSError, err:
526 # EINVAL can be raised as invalid path syntax under win32.
526 # EINVAL can be raised as invalid path syntax under win32.
527 # They must be ignored for patterns can be checked too.
527 # They must be ignored for patterns can be checked too.
528 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
528 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
529 raise
529 raise
530 else:
530 else:
531 if stat.S_ISLNK(st.st_mode):
531 if stat.S_ISLNK(st.st_mode):
532 raise Abort(_('path %r traverses symbolic link %r') %
532 raise Abort(_('path %r traverses symbolic link %r') %
533 (path, prefix))
533 (path, prefix))
534 elif (stat.S_ISDIR(st.st_mode) and
534 elif (stat.S_ISDIR(st.st_mode) and
535 os.path.isdir(os.path.join(curpath, '.hg'))):
535 os.path.isdir(os.path.join(curpath, '.hg'))):
536 if not self.callback or not self.callback(curpath):
536 if not self.callback or not self.callback(curpath):
537 raise Abort(_('path %r is inside repo %r') %
537 raise Abort(_('path %r is inside repo %r') %
538 (path, prefix))
538 (path, prefix))
539 parts.pop()
539 parts.pop()
540 prefixes = []
540 prefixes = []
541 while parts:
541 while parts:
542 prefix = os.sep.join(parts)
542 prefix = os.sep.join(parts)
543 if prefix in self.auditeddir:
543 if prefix in self.auditeddir:
544 break
544 break
545 check(prefix)
545 check(prefix)
546 prefixes.append(prefix)
546 prefixes.append(prefix)
547 parts.pop()
547 parts.pop()
548
548
549 self.audited.add(path)
549 self.audited.add(path)
550 # only add prefixes to the cache after checking everything: we don't
550 # only add prefixes to the cache after checking everything: we don't
551 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
551 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
552 self.auditeddir.update(prefixes)
552 self.auditeddir.update(prefixes)
553
553
554 def nlinks(pathname):
554 def nlinks(pathname):
555 """Return number of hardlinks for the given file."""
555 """Return number of hardlinks for the given file."""
556 return os.lstat(pathname).st_nlink
556 return os.lstat(pathname).st_nlink
557
557
558 if hasattr(os, 'link'):
558 if hasattr(os, 'link'):
559 os_link = os.link
559 os_link = os.link
560 else:
560 else:
561 def os_link(src, dst):
561 def os_link(src, dst):
562 raise OSError(0, _("Hardlinks not supported"))
562 raise OSError(0, _("Hardlinks not supported"))
563
563
564 def lookup_reg(key, name=None, scope=None):
564 def lookup_reg(key, name=None, scope=None):
565 return None
565 return None
566
566
567 def hidewindow():
567 def hidewindow():
568 """Hide current shell window.
568 """Hide current shell window.
569
569
570 Used to hide the window opened when starting asynchronous
570 Used to hide the window opened when starting asynchronous
571 child process under Windows, unneeded on other systems.
571 child process under Windows, unneeded on other systems.
572 """
572 """
573 pass
573 pass
574
574
575 if os.name == 'nt':
575 if os.name == 'nt':
576 from windows import *
576 from windows import *
577 else:
577 else:
578 from posix import *
578 from posix import *
579
579
580 def makelock(info, pathname):
580 def makelock(info, pathname):
581 try:
581 try:
582 return os.symlink(info, pathname)
582 return os.symlink(info, pathname)
583 except OSError, why:
583 except OSError, why:
584 if why.errno == errno.EEXIST:
584 if why.errno == errno.EEXIST:
585 raise
585 raise
586 except AttributeError: # no symlink in os
586 except AttributeError: # no symlink in os
587 pass
587 pass
588
588
589 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
589 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
590 os.write(ld, info)
590 os.write(ld, info)
591 os.close(ld)
591 os.close(ld)
592
592
593 def readlock(pathname):
593 def readlock(pathname):
594 try:
594 try:
595 return os.readlink(pathname)
595 return os.readlink(pathname)
596 except OSError, why:
596 except OSError, why:
597 if why.errno not in (errno.EINVAL, errno.ENOSYS):
597 if why.errno not in (errno.EINVAL, errno.ENOSYS):
598 raise
598 raise
599 except AttributeError: # no symlink in os
599 except AttributeError: # no symlink in os
600 pass
600 pass
601 return posixfile(pathname).read()
601 return posixfile(pathname).read()
602
602
603 def fstat(fp):
603 def fstat(fp):
604 '''stat file object that may not have fileno method.'''
604 '''stat file object that may not have fileno method.'''
605 try:
605 try:
606 return os.fstat(fp.fileno())
606 return os.fstat(fp.fileno())
607 except AttributeError:
607 except AttributeError:
608 return os.stat(fp.name)
608 return os.stat(fp.name)
609
609
610 # File system features
610 # File system features
611
611
612 def checkcase(path):
612 def checkcase(path):
613 """
613 """
614 Check whether the given path is on a case-sensitive filesystem
614 Check whether the given path is on a case-sensitive filesystem
615
615
616 Requires a path (like /foo/.hg) ending with a foldable final
616 Requires a path (like /foo/.hg) ending with a foldable final
617 directory component.
617 directory component.
618 """
618 """
619 s1 = os.stat(path)
619 s1 = os.stat(path)
620 d, b = os.path.split(path)
620 d, b = os.path.split(path)
621 p2 = os.path.join(d, b.upper())
621 p2 = os.path.join(d, b.upper())
622 if path == p2:
622 if path == p2:
623 p2 = os.path.join(d, b.lower())
623 p2 = os.path.join(d, b.lower())
624 try:
624 try:
625 s2 = os.stat(p2)
625 s2 = os.stat(p2)
626 if s2 == s1:
626 if s2 == s1:
627 return False
627 return False
628 return True
628 return True
629 except:
629 except:
630 return True
630 return True
631
631
632 _fspathcache = {}
632 _fspathcache = {}
633 def fspath(name, root):
633 def fspath(name, root):
634 '''Get name in the case stored in the filesystem
634 '''Get name in the case stored in the filesystem
635
635
636 The name is either relative to root, or it is an absolute path starting
636 The name is either relative to root, or it is an absolute path starting
637 with root. Note that this function is unnecessary, and should not be
637 with root. Note that this function is unnecessary, and should not be
638 called, for case-sensitive filesystems (simply because it's expensive).
638 called, for case-sensitive filesystems (simply because it's expensive).
639 '''
639 '''
640 # If name is absolute, make it relative
640 # If name is absolute, make it relative
641 if name.lower().startswith(root.lower()):
641 if name.lower().startswith(root.lower()):
642 l = len(root)
642 l = len(root)
643 if name[l] in (os.sep, os.altsep):
643 if name[l] == os.sep or name[l] == os.altsep:
644 l = l + 1
644 l = l + 1
645 name = name[l:]
645 name = name[l:]
646
646
647 if not os.path.lexists(os.path.join(root, name)):
647 if not os.path.lexists(os.path.join(root, name)):
648 return None
648 return None
649
649
650 seps = os.sep
650 seps = os.sep
651 if os.altsep:
651 if os.altsep:
652 seps = seps + os.altsep
652 seps = seps + os.altsep
653 # Protect backslashes. This gets silly very quickly.
653 # Protect backslashes. This gets silly very quickly.
654 seps.replace('\\','\\\\')
654 seps.replace('\\','\\\\')
655 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
655 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
656 dir = os.path.normcase(os.path.normpath(root))
656 dir = os.path.normcase(os.path.normpath(root))
657 result = []
657 result = []
658 for part, sep in pattern.findall(name):
658 for part, sep in pattern.findall(name):
659 if sep:
659 if sep:
660 result.append(sep)
660 result.append(sep)
661 continue
661 continue
662
662
663 if dir not in _fspathcache:
663 if dir not in _fspathcache:
664 _fspathcache[dir] = os.listdir(dir)
664 _fspathcache[dir] = os.listdir(dir)
665 contents = _fspathcache[dir]
665 contents = _fspathcache[dir]
666
666
667 lpart = part.lower()
667 lpart = part.lower()
668 lenp = len(part)
668 lenp = len(part)
669 for n in contents:
669 for n in contents:
670 if lenp == len(n) and n.lower() == lpart:
670 if lenp == len(n) and n.lower() == lpart:
671 result.append(n)
671 result.append(n)
672 break
672 break
673 else:
673 else:
674 # Cannot happen, as the file exists!
674 # Cannot happen, as the file exists!
675 result.append(part)
675 result.append(part)
676 dir = os.path.join(dir, lpart)
676 dir = os.path.join(dir, lpart)
677
677
678 return ''.join(result)
678 return ''.join(result)
679
679
680 def checkexec(path):
680 def checkexec(path):
681 """
681 """
682 Check whether the given path is on a filesystem with UNIX-like exec flags
682 Check whether the given path is on a filesystem with UNIX-like exec flags
683
683
684 Requires a directory (like /foo/.hg)
684 Requires a directory (like /foo/.hg)
685 """
685 """
686
686
687 # VFAT on some Linux versions can flip mode but it doesn't persist
687 # VFAT on some Linux versions can flip mode but it doesn't persist
688 # a FS remount. Frequently we can detect it if files are created
688 # a FS remount. Frequently we can detect it if files are created
689 # with exec bit on.
689 # with exec bit on.
690
690
691 try:
691 try:
692 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
692 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
693 fh, fn = tempfile.mkstemp(dir=path, prefix='hg-checkexec-')
693 fh, fn = tempfile.mkstemp(dir=path, prefix='hg-checkexec-')
694 try:
694 try:
695 os.close(fh)
695 os.close(fh)
696 m = os.stat(fn).st_mode & 0777
696 m = os.stat(fn).st_mode & 0777
697 new_file_has_exec = m & EXECFLAGS
697 new_file_has_exec = m & EXECFLAGS
698 os.chmod(fn, m ^ EXECFLAGS)
698 os.chmod(fn, m ^ EXECFLAGS)
699 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
699 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
700 finally:
700 finally:
701 os.unlink(fn)
701 os.unlink(fn)
702 except (IOError, OSError):
702 except (IOError, OSError):
703 # we don't care, the user probably won't be able to commit anyway
703 # we don't care, the user probably won't be able to commit anyway
704 return False
704 return False
705 return not (new_file_has_exec or exec_flags_cannot_flip)
705 return not (new_file_has_exec or exec_flags_cannot_flip)
706
706
707 def checklink(path):
707 def checklink(path):
708 """check whether the given path is on a symlink-capable filesystem"""
708 """check whether the given path is on a symlink-capable filesystem"""
709 # mktemp is not racy because symlink creation will fail if the
709 # mktemp is not racy because symlink creation will fail if the
710 # file already exists
710 # file already exists
711 name = tempfile.mktemp(dir=path, prefix='hg-checklink-')
711 name = tempfile.mktemp(dir=path, prefix='hg-checklink-')
712 try:
712 try:
713 os.symlink(".", name)
713 os.symlink(".", name)
714 os.unlink(name)
714 os.unlink(name)
715 return True
715 return True
716 except (OSError, AttributeError):
716 except (OSError, AttributeError):
717 return False
717 return False
718
718
719 def needbinarypatch():
719 def needbinarypatch():
720 """return True if patches should be applied in binary mode by default."""
720 """return True if patches should be applied in binary mode by default."""
721 return os.name == 'nt'
721 return os.name == 'nt'
722
722
723 def endswithsep(path):
723 def endswithsep(path):
724 '''Check path ends with os.sep or os.altsep.'''
724 '''Check path ends with os.sep or os.altsep.'''
725 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
725 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
726
726
727 def splitpath(path):
727 def splitpath(path):
728 '''Split path by os.sep.
728 '''Split path by os.sep.
729 Note that this function does not use os.altsep because this is
729 Note that this function does not use os.altsep because this is
730 an alternative of simple "xxx.split(os.sep)".
730 an alternative of simple "xxx.split(os.sep)".
731 It is recommended to use os.path.normpath() before using this
731 It is recommended to use os.path.normpath() before using this
732 function if need.'''
732 function if need.'''
733 return path.split(os.sep)
733 return path.split(os.sep)
734
734
735 def gui():
735 def gui():
736 '''Are we running in a GUI?'''
736 '''Are we running in a GUI?'''
737 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
737 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
738
738
739 def mktempcopy(name, emptyok=False, createmode=None):
739 def mktempcopy(name, emptyok=False, createmode=None):
740 """Create a temporary file with the same contents from name
740 """Create a temporary file with the same contents from name
741
741
742 The permission bits are copied from the original file.
742 The permission bits are copied from the original file.
743
743
744 If the temporary file is going to be truncated immediately, you
744 If the temporary file is going to be truncated immediately, you
745 can use emptyok=True as an optimization.
745 can use emptyok=True as an optimization.
746
746
747 Returns the name of the temporary file.
747 Returns the name of the temporary file.
748 """
748 """
749 d, fn = os.path.split(name)
749 d, fn = os.path.split(name)
750 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
750 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
751 os.close(fd)
751 os.close(fd)
752 # Temporary files are created with mode 0600, which is usually not
752 # Temporary files are created with mode 0600, which is usually not
753 # what we want. If the original file already exists, just copy
753 # what we want. If the original file already exists, just copy
754 # its mode. Otherwise, manually obey umask.
754 # its mode. Otherwise, manually obey umask.
755 try:
755 try:
756 st_mode = os.lstat(name).st_mode & 0777
756 st_mode = os.lstat(name).st_mode & 0777
757 except OSError, inst:
757 except OSError, inst:
758 if inst.errno != errno.ENOENT:
758 if inst.errno != errno.ENOENT:
759 raise
759 raise
760 st_mode = createmode
760 st_mode = createmode
761 if st_mode is None:
761 if st_mode is None:
762 st_mode = ~umask
762 st_mode = ~umask
763 st_mode &= 0666
763 st_mode &= 0666
764 os.chmod(temp, st_mode)
764 os.chmod(temp, st_mode)
765 if emptyok:
765 if emptyok:
766 return temp
766 return temp
767 try:
767 try:
768 try:
768 try:
769 ifp = posixfile(name, "rb")
769 ifp = posixfile(name, "rb")
770 except IOError, inst:
770 except IOError, inst:
771 if inst.errno == errno.ENOENT:
771 if inst.errno == errno.ENOENT:
772 return temp
772 return temp
773 if not getattr(inst, 'filename', None):
773 if not getattr(inst, 'filename', None):
774 inst.filename = name
774 inst.filename = name
775 raise
775 raise
776 ofp = posixfile(temp, "wb")
776 ofp = posixfile(temp, "wb")
777 for chunk in filechunkiter(ifp):
777 for chunk in filechunkiter(ifp):
778 ofp.write(chunk)
778 ofp.write(chunk)
779 ifp.close()
779 ifp.close()
780 ofp.close()
780 ofp.close()
781 except:
781 except:
782 try: os.unlink(temp)
782 try: os.unlink(temp)
783 except: pass
783 except: pass
784 raise
784 raise
785 return temp
785 return temp
786
786
787 class atomictempfile(object):
787 class atomictempfile(object):
788 """file-like object that atomically updates a file
788 """file-like object that atomically updates a file
789
789
790 All writes will be redirected to a temporary copy of the original
790 All writes will be redirected to a temporary copy of the original
791 file. When rename is called, the copy is renamed to the original
791 file. When rename is called, the copy is renamed to the original
792 name, making the changes visible.
792 name, making the changes visible.
793 """
793 """
794 def __init__(self, name, mode='w+b', createmode=None):
794 def __init__(self, name, mode='w+b', createmode=None):
795 self.__name = name
795 self.__name = name
796 self._fp = None
796 self._fp = None
797 self.temp = mktempcopy(name, emptyok=('w' in mode),
797 self.temp = mktempcopy(name, emptyok=('w' in mode),
798 createmode=createmode)
798 createmode=createmode)
799 self._fp = posixfile(self.temp, mode)
799 self._fp = posixfile(self.temp, mode)
800
800
801 def __getattr__(self, name):
801 def __getattr__(self, name):
802 return getattr(self._fp, name)
802 return getattr(self._fp, name)
803
803
804 def rename(self):
804 def rename(self):
805 if not self._fp.closed:
805 if not self._fp.closed:
806 self._fp.close()
806 self._fp.close()
807 rename(self.temp, localpath(self.__name))
807 rename(self.temp, localpath(self.__name))
808
808
809 def __del__(self):
809 def __del__(self):
810 if not self._fp:
810 if not self._fp:
811 return
811 return
812 if not self._fp.closed:
812 if not self._fp.closed:
813 try:
813 try:
814 os.unlink(self.temp)
814 os.unlink(self.temp)
815 except: pass
815 except: pass
816 self._fp.close()
816 self._fp.close()
817
817
818 def makedirs(name, mode=None):
818 def makedirs(name, mode=None):
819 """recursive directory creation with parent mode inheritance"""
819 """recursive directory creation with parent mode inheritance"""
820 try:
820 try:
821 os.mkdir(name)
821 os.mkdir(name)
822 if mode is not None:
822 if mode is not None:
823 os.chmod(name, mode)
823 os.chmod(name, mode)
824 return
824 return
825 except OSError, err:
825 except OSError, err:
826 if err.errno == errno.EEXIST:
826 if err.errno == errno.EEXIST:
827 return
827 return
828 if err.errno != errno.ENOENT:
828 if err.errno != errno.ENOENT:
829 raise
829 raise
830 parent = os.path.abspath(os.path.dirname(name))
830 parent = os.path.abspath(os.path.dirname(name))
831 makedirs(parent, mode)
831 makedirs(parent, mode)
832 makedirs(name, mode)
832 makedirs(name, mode)
833
833
834 class opener(object):
834 class opener(object):
835 """Open files relative to a base directory
835 """Open files relative to a base directory
836
836
837 This class is used to hide the details of COW semantics and
837 This class is used to hide the details of COW semantics and
838 remote file access from higher level code.
838 remote file access from higher level code.
839 """
839 """
840 def __init__(self, base, audit=True):
840 def __init__(self, base, audit=True):
841 self.base = base
841 self.base = base
842 if audit:
842 if audit:
843 self.auditor = path_auditor(base)
843 self.auditor = path_auditor(base)
844 else:
844 else:
845 self.auditor = always
845 self.auditor = always
846 self.createmode = None
846 self.createmode = None
847
847
848 @propertycache
848 @propertycache
849 def _can_symlink(self):
849 def _can_symlink(self):
850 return checklink(self.base)
850 return checklink(self.base)
851
851
852 def _fixfilemode(self, name):
852 def _fixfilemode(self, name):
853 if self.createmode is None:
853 if self.createmode is None:
854 return
854 return
855 os.chmod(name, self.createmode & 0666)
855 os.chmod(name, self.createmode & 0666)
856
856
857 def __call__(self, path, mode="r", text=False, atomictemp=False):
857 def __call__(self, path, mode="r", text=False, atomictemp=False):
858 self.auditor(path)
858 self.auditor(path)
859 f = os.path.join(self.base, path)
859 f = os.path.join(self.base, path)
860
860
861 if not text and "b" not in mode:
861 if not text and "b" not in mode:
862 mode += "b" # for that other OS
862 mode += "b" # for that other OS
863
863
864 nlink = -1
864 nlink = -1
865 if mode not in ("r", "rb"):
865 if mode not in ("r", "rb"):
866 try:
866 try:
867 nlink = nlinks(f)
867 nlink = nlinks(f)
868 except OSError:
868 except OSError:
869 nlink = 0
869 nlink = 0
870 d = os.path.dirname(f)
870 d = os.path.dirname(f)
871 if not os.path.isdir(d):
871 if not os.path.isdir(d):
872 makedirs(d, self.createmode)
872 makedirs(d, self.createmode)
873 if atomictemp:
873 if atomictemp:
874 return atomictempfile(f, mode, self.createmode)
874 return atomictempfile(f, mode, self.createmode)
875 if nlink > 1:
875 if nlink > 1:
876 rename(mktempcopy(f), f)
876 rename(mktempcopy(f), f)
877 fp = posixfile(f, mode)
877 fp = posixfile(f, mode)
878 if nlink == 0:
878 if nlink == 0:
879 self._fixfilemode(f)
879 self._fixfilemode(f)
880 return fp
880 return fp
881
881
882 def symlink(self, src, dst):
882 def symlink(self, src, dst):
883 self.auditor(dst)
883 self.auditor(dst)
884 linkname = os.path.join(self.base, dst)
884 linkname = os.path.join(self.base, dst)
885 try:
885 try:
886 os.unlink(linkname)
886 os.unlink(linkname)
887 except OSError:
887 except OSError:
888 pass
888 pass
889
889
890 dirname = os.path.dirname(linkname)
890 dirname = os.path.dirname(linkname)
891 if not os.path.exists(dirname):
891 if not os.path.exists(dirname):
892 makedirs(dirname, self.createmode)
892 makedirs(dirname, self.createmode)
893
893
894 if self._can_symlink:
894 if self._can_symlink:
895 try:
895 try:
896 os.symlink(src, linkname)
896 os.symlink(src, linkname)
897 except OSError, err:
897 except OSError, err:
898 raise OSError(err.errno, _('could not symlink to %r: %s') %
898 raise OSError(err.errno, _('could not symlink to %r: %s') %
899 (src, err.strerror), linkname)
899 (src, err.strerror), linkname)
900 else:
900 else:
901 f = self(dst, "w")
901 f = self(dst, "w")
902 f.write(src)
902 f.write(src)
903 f.close()
903 f.close()
904 self._fixfilemode(dst)
904 self._fixfilemode(dst)
905
905
906 class chunkbuffer(object):
906 class chunkbuffer(object):
907 """Allow arbitrary sized chunks of data to be efficiently read from an
907 """Allow arbitrary sized chunks of data to be efficiently read from an
908 iterator over chunks of arbitrary size."""
908 iterator over chunks of arbitrary size."""
909
909
910 def __init__(self, in_iter):
910 def __init__(self, in_iter):
911 """in_iter is the iterator that's iterating over the input chunks.
911 """in_iter is the iterator that's iterating over the input chunks.
912 targetsize is how big a buffer to try to maintain."""
912 targetsize is how big a buffer to try to maintain."""
913 def splitbig(chunks):
913 def splitbig(chunks):
914 for chunk in chunks:
914 for chunk in chunks:
915 if len(chunk) > 2**20:
915 if len(chunk) > 2**20:
916 pos = 0
916 pos = 0
917 while pos < len(chunk):
917 while pos < len(chunk):
918 end = pos + 2 ** 18
918 end = pos + 2 ** 18
919 yield chunk[pos:end]
919 yield chunk[pos:end]
920 pos = end
920 pos = end
921 else:
921 else:
922 yield chunk
922 yield chunk
923 self.iter = splitbig(in_iter)
923 self.iter = splitbig(in_iter)
924 self._queue = []
924 self._queue = []
925
925
926 def read(self, l):
926 def read(self, l):
927 """Read L bytes of data from the iterator of chunks of data.
927 """Read L bytes of data from the iterator of chunks of data.
928 Returns less than L bytes if the iterator runs dry."""
928 Returns less than L bytes if the iterator runs dry."""
929 left = l
929 left = l
930 buf = ''
930 buf = ''
931 queue = self._queue
931 queue = self._queue
932 while left > 0:
932 while left > 0:
933 # refill the queue
933 # refill the queue
934 if not queue:
934 if not queue:
935 target = 2**18
935 target = 2**18
936 for chunk in self.iter:
936 for chunk in self.iter:
937 queue.append(chunk)
937 queue.append(chunk)
938 target -= len(chunk)
938 target -= len(chunk)
939 if target <= 0:
939 if target <= 0:
940 break
940 break
941 if not queue:
941 if not queue:
942 break
942 break
943
943
944 chunk = queue.pop(0)
944 chunk = queue.pop(0)
945 left -= len(chunk)
945 left -= len(chunk)
946 if left < 0:
946 if left < 0:
947 queue.insert(0, chunk[left:])
947 queue.insert(0, chunk[left:])
948 buf += chunk[:left]
948 buf += chunk[:left]
949 else:
949 else:
950 buf += chunk
950 buf += chunk
951
951
952 return buf
952 return buf
953
953
954 def filechunkiter(f, size=65536, limit=None):
954 def filechunkiter(f, size=65536, limit=None):
955 """Create a generator that produces the data in the file size
955 """Create a generator that produces the data in the file size
956 (default 65536) bytes at a time, up to optional limit (default is
956 (default 65536) bytes at a time, up to optional limit (default is
957 to read all data). Chunks may be less than size bytes if the
957 to read all data). Chunks may be less than size bytes if the
958 chunk is the last chunk in the file, or the file is a socket or
958 chunk is the last chunk in the file, or the file is a socket or
959 some other type of file that sometimes reads less data than is
959 some other type of file that sometimes reads less data than is
960 requested."""
960 requested."""
961 assert size >= 0
961 assert size >= 0
962 assert limit is None or limit >= 0
962 assert limit is None or limit >= 0
963 while True:
963 while True:
964 if limit is None:
964 if limit is None:
965 nbytes = size
965 nbytes = size
966 else:
966 else:
967 nbytes = min(limit, size)
967 nbytes = min(limit, size)
968 s = nbytes and f.read(nbytes)
968 s = nbytes and f.read(nbytes)
969 if not s:
969 if not s:
970 break
970 break
971 if limit:
971 if limit:
972 limit -= len(s)
972 limit -= len(s)
973 yield s
973 yield s
974
974
975 def makedate():
975 def makedate():
976 lt = time.localtime()
976 lt = time.localtime()
977 if lt[8] == 1 and time.daylight:
977 if lt[8] == 1 and time.daylight:
978 tz = time.altzone
978 tz = time.altzone
979 else:
979 else:
980 tz = time.timezone
980 tz = time.timezone
981 return time.mktime(lt), tz
981 return time.mktime(lt), tz
982
982
983 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
983 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
984 """represent a (unixtime, offset) tuple as a localized time.
984 """represent a (unixtime, offset) tuple as a localized time.
985 unixtime is seconds since the epoch, and offset is the time zone's
985 unixtime is seconds since the epoch, and offset is the time zone's
986 number of seconds away from UTC. if timezone is false, do not
986 number of seconds away from UTC. if timezone is false, do not
987 append time zone to string."""
987 append time zone to string."""
988 t, tz = date or makedate()
988 t, tz = date or makedate()
989 if "%1" in format or "%2" in format:
989 if "%1" in format or "%2" in format:
990 sign = (tz > 0) and "-" or "+"
990 sign = (tz > 0) and "-" or "+"
991 minutes = abs(tz) // 60
991 minutes = abs(tz) // 60
992 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
992 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
993 format = format.replace("%2", "%02d" % (minutes % 60))
993 format = format.replace("%2", "%02d" % (minutes % 60))
994 s = time.strftime(format, time.gmtime(float(t) - tz))
994 s = time.strftime(format, time.gmtime(float(t) - tz))
995 return s
995 return s
996
996
997 def shortdate(date=None):
997 def shortdate(date=None):
998 """turn (timestamp, tzoff) tuple into iso 8631 date."""
998 """turn (timestamp, tzoff) tuple into iso 8631 date."""
999 return datestr(date, format='%Y-%m-%d')
999 return datestr(date, format='%Y-%m-%d')
1000
1000
1001 def strdate(string, format, defaults=[]):
1001 def strdate(string, format, defaults=[]):
1002 """parse a localized time string and return a (unixtime, offset) tuple.
1002 """parse a localized time string and return a (unixtime, offset) tuple.
1003 if the string cannot be parsed, ValueError is raised."""
1003 if the string cannot be parsed, ValueError is raised."""
1004 def timezone(string):
1004 def timezone(string):
1005 tz = string.split()[-1]
1005 tz = string.split()[-1]
1006 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1006 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1007 sign = (tz[0] == "+") and 1 or -1
1007 sign = (tz[0] == "+") and 1 or -1
1008 hours = int(tz[1:3])
1008 hours = int(tz[1:3])
1009 minutes = int(tz[3:5])
1009 minutes = int(tz[3:5])
1010 return -sign * (hours * 60 + minutes) * 60
1010 return -sign * (hours * 60 + minutes) * 60
1011 if tz in ("GMT", "UTC"):
1011 if tz == "GMT" or tz == "UTC":
1012 return 0
1012 return 0
1013 return None
1013 return None
1014
1014
1015 # NOTE: unixtime = localunixtime + offset
1015 # NOTE: unixtime = localunixtime + offset
1016 offset, date = timezone(string), string
1016 offset, date = timezone(string), string
1017 if offset != None:
1017 if offset != None:
1018 date = " ".join(string.split()[:-1])
1018 date = " ".join(string.split()[:-1])
1019
1019
1020 # add missing elements from defaults
1020 # add missing elements from defaults
1021 for part in defaults:
1021 for part in defaults:
1022 found = [True for p in part if ("%"+p) in format]
1022 found = [True for p in part if ("%"+p) in format]
1023 if not found:
1023 if not found:
1024 date += "@" + defaults[part]
1024 date += "@" + defaults[part]
1025 format += "@%" + part[0]
1025 format += "@%" + part[0]
1026
1026
1027 timetuple = time.strptime(date, format)
1027 timetuple = time.strptime(date, format)
1028 localunixtime = int(calendar.timegm(timetuple))
1028 localunixtime = int(calendar.timegm(timetuple))
1029 if offset is None:
1029 if offset is None:
1030 # local timezone
1030 # local timezone
1031 unixtime = int(time.mktime(timetuple))
1031 unixtime = int(time.mktime(timetuple))
1032 offset = unixtime - localunixtime
1032 offset = unixtime - localunixtime
1033 else:
1033 else:
1034 unixtime = localunixtime + offset
1034 unixtime = localunixtime + offset
1035 return unixtime, offset
1035 return unixtime, offset
1036
1036
1037 def parsedate(date, formats=None, defaults=None):
1037 def parsedate(date, formats=None, defaults=None):
1038 """parse a localized date/time string and return a (unixtime, offset) tuple.
1038 """parse a localized date/time string and return a (unixtime, offset) tuple.
1039
1039
1040 The date may be a "unixtime offset" string or in one of the specified
1040 The date may be a "unixtime offset" string or in one of the specified
1041 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1041 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1042 """
1042 """
1043 if not date:
1043 if not date:
1044 return 0, 0
1044 return 0, 0
1045 if isinstance(date, tuple) and len(date) == 2:
1045 if isinstance(date, tuple) and len(date) == 2:
1046 return date
1046 return date
1047 if not formats:
1047 if not formats:
1048 formats = defaultdateformats
1048 formats = defaultdateformats
1049 date = date.strip()
1049 date = date.strip()
1050 try:
1050 try:
1051 when, offset = map(int, date.split(' '))
1051 when, offset = map(int, date.split(' '))
1052 except ValueError:
1052 except ValueError:
1053 # fill out defaults
1053 # fill out defaults
1054 if not defaults:
1054 if not defaults:
1055 defaults = {}
1055 defaults = {}
1056 now = makedate()
1056 now = makedate()
1057 for part in "d mb yY HI M S".split():
1057 for part in "d mb yY HI M S".split():
1058 if part not in defaults:
1058 if part not in defaults:
1059 if part[0] in "HMS":
1059 if part[0] in "HMS":
1060 defaults[part] = "00"
1060 defaults[part] = "00"
1061 else:
1061 else:
1062 defaults[part] = datestr(now, "%" + part[0])
1062 defaults[part] = datestr(now, "%" + part[0])
1063
1063
1064 for format in formats:
1064 for format in formats:
1065 try:
1065 try:
1066 when, offset = strdate(date, format, defaults)
1066 when, offset = strdate(date, format, defaults)
1067 except (ValueError, OverflowError):
1067 except (ValueError, OverflowError):
1068 pass
1068 pass
1069 else:
1069 else:
1070 break
1070 break
1071 else:
1071 else:
1072 raise Abort(_('invalid date: %r') % date)
1072 raise Abort(_('invalid date: %r') % date)
1073 # validate explicit (probably user-specified) date and
1073 # validate explicit (probably user-specified) date and
1074 # time zone offset. values must fit in signed 32 bits for
1074 # time zone offset. values must fit in signed 32 bits for
1075 # current 32-bit linux runtimes. timezones go from UTC-12
1075 # current 32-bit linux runtimes. timezones go from UTC-12
1076 # to UTC+14
1076 # to UTC+14
1077 if abs(when) > 0x7fffffff:
1077 if abs(when) > 0x7fffffff:
1078 raise Abort(_('date exceeds 32 bits: %d') % when)
1078 raise Abort(_('date exceeds 32 bits: %d') % when)
1079 if offset < -50400 or offset > 43200:
1079 if offset < -50400 or offset > 43200:
1080 raise Abort(_('impossible time zone offset: %d') % offset)
1080 raise Abort(_('impossible time zone offset: %d') % offset)
1081 return when, offset
1081 return when, offset
1082
1082
1083 def matchdate(date):
1083 def matchdate(date):
1084 """Return a function that matches a given date match specifier
1084 """Return a function that matches a given date match specifier
1085
1085
1086 Formats include:
1086 Formats include:
1087
1087
1088 '{date}' match a given date to the accuracy provided
1088 '{date}' match a given date to the accuracy provided
1089
1089
1090 '<{date}' on or before a given date
1090 '<{date}' on or before a given date
1091
1091
1092 '>{date}' on or after a given date
1092 '>{date}' on or after a given date
1093
1093
1094 """
1094 """
1095
1095
1096 def lower(date):
1096 def lower(date):
1097 d = dict(mb="1", d="1")
1097 d = dict(mb="1", d="1")
1098 return parsedate(date, extendeddateformats, d)[0]
1098 return parsedate(date, extendeddateformats, d)[0]
1099
1099
1100 def upper(date):
1100 def upper(date):
1101 d = dict(mb="12", HI="23", M="59", S="59")
1101 d = dict(mb="12", HI="23", M="59", S="59")
1102 for days in "31 30 29".split():
1102 for days in "31 30 29".split():
1103 try:
1103 try:
1104 d["d"] = days
1104 d["d"] = days
1105 return parsedate(date, extendeddateformats, d)[0]
1105 return parsedate(date, extendeddateformats, d)[0]
1106 except:
1106 except:
1107 pass
1107 pass
1108 d["d"] = "28"
1108 d["d"] = "28"
1109 return parsedate(date, extendeddateformats, d)[0]
1109 return parsedate(date, extendeddateformats, d)[0]
1110
1110
1111 date = date.strip()
1111 date = date.strip()
1112 if date[0] == "<":
1112 if date[0] == "<":
1113 when = upper(date[1:])
1113 when = upper(date[1:])
1114 return lambda x: x <= when
1114 return lambda x: x <= when
1115 elif date[0] == ">":
1115 elif date[0] == ">":
1116 when = lower(date[1:])
1116 when = lower(date[1:])
1117 return lambda x: x >= when
1117 return lambda x: x >= when
1118 elif date[0] == "-":
1118 elif date[0] == "-":
1119 try:
1119 try:
1120 days = int(date[1:])
1120 days = int(date[1:])
1121 except ValueError:
1121 except ValueError:
1122 raise Abort(_("invalid day spec: %s") % date[1:])
1122 raise Abort(_("invalid day spec: %s") % date[1:])
1123 when = makedate()[0] - days * 3600 * 24
1123 when = makedate()[0] - days * 3600 * 24
1124 return lambda x: x >= when
1124 return lambda x: x >= when
1125 elif " to " in date:
1125 elif " to " in date:
1126 a, b = date.split(" to ")
1126 a, b = date.split(" to ")
1127 start, stop = lower(a), upper(b)
1127 start, stop = lower(a), upper(b)
1128 return lambda x: x >= start and x <= stop
1128 return lambda x: x >= start and x <= stop
1129 else:
1129 else:
1130 start, stop = lower(date), upper(date)
1130 start, stop = lower(date), upper(date)
1131 return lambda x: x >= start and x <= stop
1131 return lambda x: x >= start and x <= stop
1132
1132
1133 def shortuser(user):
1133 def shortuser(user):
1134 """Return a short representation of a user name or email address."""
1134 """Return a short representation of a user name or email address."""
1135 f = user.find('@')
1135 f = user.find('@')
1136 if f >= 0:
1136 if f >= 0:
1137 user = user[:f]
1137 user = user[:f]
1138 f = user.find('<')
1138 f = user.find('<')
1139 if f >= 0:
1139 if f >= 0:
1140 user = user[f + 1:]
1140 user = user[f + 1:]
1141 f = user.find(' ')
1141 f = user.find(' ')
1142 if f >= 0:
1142 if f >= 0:
1143 user = user[:f]
1143 user = user[:f]
1144 f = user.find('.')
1144 f = user.find('.')
1145 if f >= 0:
1145 if f >= 0:
1146 user = user[:f]
1146 user = user[:f]
1147 return user
1147 return user
1148
1148
1149 def email(author):
1149 def email(author):
1150 '''get email of author.'''
1150 '''get email of author.'''
1151 r = author.find('>')
1151 r = author.find('>')
1152 if r == -1:
1152 if r == -1:
1153 r = None
1153 r = None
1154 return author[author.find('<') + 1:r]
1154 return author[author.find('<') + 1:r]
1155
1155
1156 def ellipsis(text, maxlength=400):
1156 def ellipsis(text, maxlength=400):
1157 """Trim string to at most maxlength (default: 400) characters."""
1157 """Trim string to at most maxlength (default: 400) characters."""
1158 if len(text) <= maxlength:
1158 if len(text) <= maxlength:
1159 return text
1159 return text
1160 else:
1160 else:
1161 return "%s..." % (text[:maxlength - 3])
1161 return "%s..." % (text[:maxlength - 3])
1162
1162
1163 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1163 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1164 '''yield every hg repository under path, recursively.'''
1164 '''yield every hg repository under path, recursively.'''
1165 def errhandler(err):
1165 def errhandler(err):
1166 if err.filename == path:
1166 if err.filename == path:
1167 raise err
1167 raise err
1168 if followsym and hasattr(os.path, 'samestat'):
1168 if followsym and hasattr(os.path, 'samestat'):
1169 def _add_dir_if_not_there(dirlst, dirname):
1169 def _add_dir_if_not_there(dirlst, dirname):
1170 match = False
1170 match = False
1171 samestat = os.path.samestat
1171 samestat = os.path.samestat
1172 dirstat = os.stat(dirname)
1172 dirstat = os.stat(dirname)
1173 for lstdirstat in dirlst:
1173 for lstdirstat in dirlst:
1174 if samestat(dirstat, lstdirstat):
1174 if samestat(dirstat, lstdirstat):
1175 match = True
1175 match = True
1176 break
1176 break
1177 if not match:
1177 if not match:
1178 dirlst.append(dirstat)
1178 dirlst.append(dirstat)
1179 return not match
1179 return not match
1180 else:
1180 else:
1181 followsym = False
1181 followsym = False
1182
1182
1183 if (seen_dirs is None) and followsym:
1183 if (seen_dirs is None) and followsym:
1184 seen_dirs = []
1184 seen_dirs = []
1185 _add_dir_if_not_there(seen_dirs, path)
1185 _add_dir_if_not_there(seen_dirs, path)
1186 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1186 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1187 dirs.sort()
1187 dirs.sort()
1188 if '.hg' in dirs:
1188 if '.hg' in dirs:
1189 yield root # found a repository
1189 yield root # found a repository
1190 qroot = os.path.join(root, '.hg', 'patches')
1190 qroot = os.path.join(root, '.hg', 'patches')
1191 if os.path.isdir(os.path.join(qroot, '.hg')):
1191 if os.path.isdir(os.path.join(qroot, '.hg')):
1192 yield qroot # we have a patch queue repo here
1192 yield qroot # we have a patch queue repo here
1193 if recurse:
1193 if recurse:
1194 # avoid recursing inside the .hg directory
1194 # avoid recursing inside the .hg directory
1195 dirs.remove('.hg')
1195 dirs.remove('.hg')
1196 else:
1196 else:
1197 dirs[:] = [] # don't descend further
1197 dirs[:] = [] # don't descend further
1198 elif followsym:
1198 elif followsym:
1199 newdirs = []
1199 newdirs = []
1200 for d in dirs:
1200 for d in dirs:
1201 fname = os.path.join(root, d)
1201 fname = os.path.join(root, d)
1202 if _add_dir_if_not_there(seen_dirs, fname):
1202 if _add_dir_if_not_there(seen_dirs, fname):
1203 if os.path.islink(fname):
1203 if os.path.islink(fname):
1204 for hgname in walkrepos(fname, True, seen_dirs):
1204 for hgname in walkrepos(fname, True, seen_dirs):
1205 yield hgname
1205 yield hgname
1206 else:
1206 else:
1207 newdirs.append(d)
1207 newdirs.append(d)
1208 dirs[:] = newdirs
1208 dirs[:] = newdirs
1209
1209
1210 _rcpath = None
1210 _rcpath = None
1211
1211
1212 def os_rcpath():
1212 def os_rcpath():
1213 '''return default os-specific hgrc search path'''
1213 '''return default os-specific hgrc search path'''
1214 path = system_rcpath()
1214 path = system_rcpath()
1215 path.extend(user_rcpath())
1215 path.extend(user_rcpath())
1216 path = [os.path.normpath(f) for f in path]
1216 path = [os.path.normpath(f) for f in path]
1217 return path
1217 return path
1218
1218
1219 def rcpath():
1219 def rcpath():
1220 '''return hgrc search path. if env var HGRCPATH is set, use it.
1220 '''return hgrc search path. if env var HGRCPATH is set, use it.
1221 for each item in path, if directory, use files ending in .rc,
1221 for each item in path, if directory, use files ending in .rc,
1222 else use item.
1222 else use item.
1223 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1223 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1224 if no HGRCPATH, use default os-specific path.'''
1224 if no HGRCPATH, use default os-specific path.'''
1225 global _rcpath
1225 global _rcpath
1226 if _rcpath is None:
1226 if _rcpath is None:
1227 if 'HGRCPATH' in os.environ:
1227 if 'HGRCPATH' in os.environ:
1228 _rcpath = []
1228 _rcpath = []
1229 for p in os.environ['HGRCPATH'].split(os.pathsep):
1229 for p in os.environ['HGRCPATH'].split(os.pathsep):
1230 if not p:
1230 if not p:
1231 continue
1231 continue
1232 p = expandpath(p)
1232 p = expandpath(p)
1233 if os.path.isdir(p):
1233 if os.path.isdir(p):
1234 for f, kind in osutil.listdir(p):
1234 for f, kind in osutil.listdir(p):
1235 if f.endswith('.rc'):
1235 if f.endswith('.rc'):
1236 _rcpath.append(os.path.join(p, f))
1236 _rcpath.append(os.path.join(p, f))
1237 else:
1237 else:
1238 _rcpath.append(p)
1238 _rcpath.append(p)
1239 else:
1239 else:
1240 _rcpath = os_rcpath()
1240 _rcpath = os_rcpath()
1241 return _rcpath
1241 return _rcpath
1242
1242
1243 def bytecount(nbytes):
1243 def bytecount(nbytes):
1244 '''return byte count formatted as readable string, with units'''
1244 '''return byte count formatted as readable string, with units'''
1245
1245
1246 units = (
1246 units = (
1247 (100, 1 << 30, _('%.0f GB')),
1247 (100, 1 << 30, _('%.0f GB')),
1248 (10, 1 << 30, _('%.1f GB')),
1248 (10, 1 << 30, _('%.1f GB')),
1249 (1, 1 << 30, _('%.2f GB')),
1249 (1, 1 << 30, _('%.2f GB')),
1250 (100, 1 << 20, _('%.0f MB')),
1250 (100, 1 << 20, _('%.0f MB')),
1251 (10, 1 << 20, _('%.1f MB')),
1251 (10, 1 << 20, _('%.1f MB')),
1252 (1, 1 << 20, _('%.2f MB')),
1252 (1, 1 << 20, _('%.2f MB')),
1253 (100, 1 << 10, _('%.0f KB')),
1253 (100, 1 << 10, _('%.0f KB')),
1254 (10, 1 << 10, _('%.1f KB')),
1254 (10, 1 << 10, _('%.1f KB')),
1255 (1, 1 << 10, _('%.2f KB')),
1255 (1, 1 << 10, _('%.2f KB')),
1256 (1, 1, _('%.0f bytes')),
1256 (1, 1, _('%.0f bytes')),
1257 )
1257 )
1258
1258
1259 for multiplier, divisor, format in units:
1259 for multiplier, divisor, format in units:
1260 if nbytes >= divisor * multiplier:
1260 if nbytes >= divisor * multiplier:
1261 return format % (nbytes / float(divisor))
1261 return format % (nbytes / float(divisor))
1262 return units[-1][2] % nbytes
1262 return units[-1][2] % nbytes
1263
1263
1264 def drop_scheme(scheme, path):
1264 def drop_scheme(scheme, path):
1265 sc = scheme + ':'
1265 sc = scheme + ':'
1266 if path.startswith(sc):
1266 if path.startswith(sc):
1267 path = path[len(sc):]
1267 path = path[len(sc):]
1268 if path.startswith('//'):
1268 if path.startswith('//'):
1269 if scheme == 'file':
1269 if scheme == 'file':
1270 i = path.find('/', 2)
1270 i = path.find('/', 2)
1271 if i == -1:
1271 if i == -1:
1272 return ''
1272 return ''
1273 # On Windows, absolute paths are rooted at the current drive
1273 # On Windows, absolute paths are rooted at the current drive
1274 # root. On POSIX they are rooted at the file system root.
1274 # root. On POSIX they are rooted at the file system root.
1275 if os.name == 'nt':
1275 if os.name == 'nt':
1276 droot = os.path.splitdrive(os.getcwd())[0] + '/'
1276 droot = os.path.splitdrive(os.getcwd())[0] + '/'
1277 path = os.path.join(droot, path[i + 1:])
1277 path = os.path.join(droot, path[i + 1:])
1278 else:
1278 else:
1279 path = path[i:]
1279 path = path[i:]
1280 else:
1280 else:
1281 path = path[2:]
1281 path = path[2:]
1282 return path
1282 return path
1283
1283
1284 def uirepr(s):
1284 def uirepr(s):
1285 # Avoid double backslash in Windows path repr()
1285 # Avoid double backslash in Windows path repr()
1286 return repr(s).replace('\\\\', '\\')
1286 return repr(s).replace('\\\\', '\\')
1287
1287
1288 #### naming convention of below implementation follows 'textwrap' module
1288 #### naming convention of below implementation follows 'textwrap' module
1289
1289
1290 class MBTextWrapper(textwrap.TextWrapper):
1290 class MBTextWrapper(textwrap.TextWrapper):
1291 def __init__(self, **kwargs):
1291 def __init__(self, **kwargs):
1292 textwrap.TextWrapper.__init__(self, **kwargs)
1292 textwrap.TextWrapper.__init__(self, **kwargs)
1293
1293
1294 def _cutdown(self, str, space_left):
1294 def _cutdown(self, str, space_left):
1295 l = 0
1295 l = 0
1296 ucstr = unicode(str, encoding.encoding)
1296 ucstr = unicode(str, encoding.encoding)
1297 w = unicodedata.east_asian_width
1297 w = unicodedata.east_asian_width
1298 for i in xrange(len(ucstr)):
1298 for i in xrange(len(ucstr)):
1299 l += w(ucstr[i]) in 'WFA' and 2 or 1
1299 l += w(ucstr[i]) in 'WFA' and 2 or 1
1300 if space_left < l:
1300 if space_left < l:
1301 return (ucstr[:i].encode(encoding.encoding),
1301 return (ucstr[:i].encode(encoding.encoding),
1302 ucstr[i:].encode(encoding.encoding))
1302 ucstr[i:].encode(encoding.encoding))
1303 return str, ''
1303 return str, ''
1304
1304
1305 # ----------------------------------------
1305 # ----------------------------------------
1306 # overriding of base class
1306 # overriding of base class
1307
1307
1308 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1308 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1309 space_left = max(width - cur_len, 1)
1309 space_left = max(width - cur_len, 1)
1310
1310
1311 if self.break_long_words:
1311 if self.break_long_words:
1312 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1312 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1313 cur_line.append(cut)
1313 cur_line.append(cut)
1314 reversed_chunks[-1] = res
1314 reversed_chunks[-1] = res
1315 elif not cur_line:
1315 elif not cur_line:
1316 cur_line.append(reversed_chunks.pop())
1316 cur_line.append(reversed_chunks.pop())
1317
1317
1318 #### naming convention of above implementation follows 'textwrap' module
1318 #### naming convention of above implementation follows 'textwrap' module
1319
1319
1320 def wrap(line, width=None, initindent='', hangindent=''):
1320 def wrap(line, width=None, initindent='', hangindent=''):
1321 if width is None:
1321 if width is None:
1322 width = termwidth() - 2
1322 width = termwidth() - 2
1323 maxindent = max(len(hangindent), len(initindent))
1323 maxindent = max(len(hangindent), len(initindent))
1324 if width <= maxindent:
1324 if width <= maxindent:
1325 # adjust for weird terminal size
1325 # adjust for weird terminal size
1326 width = max(78, maxindent + 1)
1326 width = max(78, maxindent + 1)
1327 wrapper = MBTextWrapper(width=width,
1327 wrapper = MBTextWrapper(width=width,
1328 initial_indent=initindent,
1328 initial_indent=initindent,
1329 subsequent_indent=hangindent)
1329 subsequent_indent=hangindent)
1330 return wrapper.fill(line)
1330 return wrapper.fill(line)
1331
1331
1332 def iterlines(iterator):
1332 def iterlines(iterator):
1333 for chunk in iterator:
1333 for chunk in iterator:
1334 for line in chunk.splitlines():
1334 for line in chunk.splitlines():
1335 yield line
1335 yield line
1336
1336
1337 def expandpath(path):
1337 def expandpath(path):
1338 return os.path.expanduser(os.path.expandvars(path))
1338 return os.path.expanduser(os.path.expandvars(path))
1339
1339
1340 def hgcmd():
1340 def hgcmd():
1341 """Return the command used to execute current hg
1341 """Return the command used to execute current hg
1342
1342
1343 This is different from hgexecutable() because on Windows we want
1343 This is different from hgexecutable() because on Windows we want
1344 to avoid things opening new shell windows like batch files, so we
1344 to avoid things opening new shell windows like batch files, so we
1345 get either the python call or current executable.
1345 get either the python call or current executable.
1346 """
1346 """
1347 if main_is_frozen():
1347 if main_is_frozen():
1348 return [sys.executable]
1348 return [sys.executable]
1349 return gethgcmd()
1349 return gethgcmd()
1350
1350
1351 def rundetached(args, condfn):
1351 def rundetached(args, condfn):
1352 """Execute the argument list in a detached process.
1352 """Execute the argument list in a detached process.
1353
1353
1354 condfn is a callable which is called repeatedly and should return
1354 condfn is a callable which is called repeatedly and should return
1355 True once the child process is known to have started successfully.
1355 True once the child process is known to have started successfully.
1356 At this point, the child process PID is returned. If the child
1356 At this point, the child process PID is returned. If the child
1357 process fails to start or finishes before condfn() evaluates to
1357 process fails to start or finishes before condfn() evaluates to
1358 True, return -1.
1358 True, return -1.
1359 """
1359 """
1360 # Windows case is easier because the child process is either
1360 # Windows case is easier because the child process is either
1361 # successfully starting and validating the condition or exiting
1361 # successfully starting and validating the condition or exiting
1362 # on failure. We just poll on its PID. On Unix, if the child
1362 # on failure. We just poll on its PID. On Unix, if the child
1363 # process fails to start, it will be left in a zombie state until
1363 # process fails to start, it will be left in a zombie state until
1364 # the parent wait on it, which we cannot do since we expect a long
1364 # the parent wait on it, which we cannot do since we expect a long
1365 # running process on success. Instead we listen for SIGCHLD telling
1365 # running process on success. Instead we listen for SIGCHLD telling
1366 # us our child process terminated.
1366 # us our child process terminated.
1367 terminated = set()
1367 terminated = set()
1368 def handler(signum, frame):
1368 def handler(signum, frame):
1369 terminated.add(os.wait())
1369 terminated.add(os.wait())
1370 prevhandler = None
1370 prevhandler = None
1371 if hasattr(signal, 'SIGCHLD'):
1371 if hasattr(signal, 'SIGCHLD'):
1372 prevhandler = signal.signal(signal.SIGCHLD, handler)
1372 prevhandler = signal.signal(signal.SIGCHLD, handler)
1373 try:
1373 try:
1374 pid = spawndetached(args)
1374 pid = spawndetached(args)
1375 while not condfn():
1375 while not condfn():
1376 if ((pid in terminated or not testpid(pid))
1376 if ((pid in terminated or not testpid(pid))
1377 and not condfn()):
1377 and not condfn()):
1378 return -1
1378 return -1
1379 time.sleep(0.1)
1379 time.sleep(0.1)
1380 return pid
1380 return pid
1381 finally:
1381 finally:
1382 if prevhandler is not None:
1382 if prevhandler is not None:
1383 signal.signal(signal.SIGCHLD, prevhandler)
1383 signal.signal(signal.SIGCHLD, prevhandler)
1384
1384
1385 try:
1385 try:
1386 any, all = any, all
1386 any, all = any, all
1387 except NameError:
1387 except NameError:
1388 def any(iterable):
1388 def any(iterable):
1389 for i in iterable:
1389 for i in iterable:
1390 if i:
1390 if i:
1391 return True
1391 return True
1392 return False
1392 return False
1393
1393
1394 def all(iterable):
1394 def all(iterable):
1395 for i in iterable:
1395 for i in iterable:
1396 if not i:
1396 if not i:
1397 return False
1397 return False
1398 return True
1398 return True
1399
1399
1400 def termwidth():
1400 def termwidth():
1401 if 'COLUMNS' in os.environ:
1401 if 'COLUMNS' in os.environ:
1402 try:
1402 try:
1403 return int(os.environ['COLUMNS'])
1403 return int(os.environ['COLUMNS'])
1404 except ValueError:
1404 except ValueError:
1405 pass
1405 pass
1406 return termwidth_()
1406 return termwidth_()
1407
1407
1408 def interpolate(prefix, mapping, s, fn=None):
1408 def interpolate(prefix, mapping, s, fn=None):
1409 """Return the result of interpolating items in the mapping into string s.
1409 """Return the result of interpolating items in the mapping into string s.
1410
1410
1411 prefix is a single character string, or a two character string with
1411 prefix is a single character string, or a two character string with
1412 a backslash as the first character if the prefix needs to be escaped in
1412 a backslash as the first character if the prefix needs to be escaped in
1413 a regular expression.
1413 a regular expression.
1414
1414
1415 fn is an optional function that will be applied to the replacement text
1415 fn is an optional function that will be applied to the replacement text
1416 just before replacement.
1416 just before replacement.
1417 """
1417 """
1418 fn = fn or (lambda s: s)
1418 fn = fn or (lambda s: s)
1419 r = re.compile(r'%s(%s)' % (prefix, '|'.join(mapping.keys())))
1419 r = re.compile(r'%s(%s)' % (prefix, '|'.join(mapping.keys())))
1420 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1420 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1421
1421
1422 def getport(port):
1422 def getport(port):
1423 """Return the port for a given network service.
1423 """Return the port for a given network service.
1424
1424
1425 If port is an integer, it's returned as is. If it's a string, it's
1425 If port is an integer, it's returned as is. If it's a string, it's
1426 looked up using socket.getservbyname(). If there's no matching
1426 looked up using socket.getservbyname(). If there's no matching
1427 service, util.Abort is raised.
1427 service, util.Abort is raised.
1428 """
1428 """
1429 try:
1429 try:
1430 return int(port)
1430 return int(port)
1431 except ValueError:
1431 except ValueError:
1432 pass
1432 pass
1433
1433
1434 try:
1434 try:
1435 return socket.getservbyname(port)
1435 return socket.getservbyname(port)
1436 except socket.error:
1436 except socket.error:
1437 raise Abort(_("no port number associated with service '%s'") % port)
1437 raise Abort(_("no port number associated with service '%s'") % port)
1438
1438
1439 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1439 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1440 '0': False, 'no': False, 'false': False, 'off': False,
1440 '0': False, 'no': False, 'false': False, 'off': False,
1441 'never': False}
1441 'never': False}
1442
1442
1443 def parsebool(s):
1443 def parsebool(s):
1444 """Parse s into a boolean.
1444 """Parse s into a boolean.
1445
1445
1446 If s is not a valid boolean, returns None.
1446 If s is not a valid boolean, returns None.
1447 """
1447 """
1448 return _booleans.get(s.lower(), None)
1448 return _booleans.get(s.lower(), None)
@@ -1,195 +1,195
1 # win32.py - utility functions that use win32 API
1 # win32.py - utility functions that use win32 API
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """Utility functions that use win32 API.
8 """Utility functions that use win32 API.
9
9
10 Mark Hammond's win32all package allows better functionality on
10 Mark Hammond's win32all package allows better functionality on
11 Windows. This module overrides definitions in util.py. If not
11 Windows. This module overrides definitions in util.py. If not
12 available, import of this module will fail, and generic code will be
12 available, import of this module will fail, and generic code will be
13 used.
13 used.
14 """
14 """
15
15
16 import win32api
16 import win32api
17
17
18 import errno, os, sys, pywintypes, win32con, win32file, win32process
18 import errno, os, sys, pywintypes, win32con, win32file, win32process
19 import winerror, win32gui, win32console
19 import winerror, win32gui, win32console
20 import osutil, encoding
20 import osutil, encoding
21 from win32com.shell import shell, shellcon
21 from win32com.shell import shell, shellcon
22
22
23 def os_link(src, dst):
23 def os_link(src, dst):
24 try:
24 try:
25 win32file.CreateHardLink(dst, src)
25 win32file.CreateHardLink(dst, src)
26 except pywintypes.error:
26 except pywintypes.error:
27 raise OSError(errno.EINVAL, 'target implements hardlinks improperly')
27 raise OSError(errno.EINVAL, 'target implements hardlinks improperly')
28 except NotImplementedError: # Another fake error win Win98
28 except NotImplementedError: # Another fake error win Win98
29 raise OSError(errno.EINVAL, 'Hardlinking not supported')
29 raise OSError(errno.EINVAL, 'Hardlinking not supported')
30
30
31 def _getfileinfo(pathname):
31 def _getfileinfo(pathname):
32 """Return number of hardlinks for the given file."""
32 """Return number of hardlinks for the given file."""
33 try:
33 try:
34 fh = win32file.CreateFile(pathname,
34 fh = win32file.CreateFile(pathname,
35 win32file.GENERIC_READ, win32file.FILE_SHARE_READ,
35 win32file.GENERIC_READ, win32file.FILE_SHARE_READ,
36 None, win32file.OPEN_EXISTING, 0, None)
36 None, win32file.OPEN_EXISTING, 0, None)
37 except pywintypes.error:
37 except pywintypes.error:
38 raise OSError(errno.ENOENT, 'The system cannot find the file specified')
38 raise OSError(errno.ENOENT, 'The system cannot find the file specified')
39 try:
39 try:
40 return win32file.GetFileInformationByHandle(fh)
40 return win32file.GetFileInformationByHandle(fh)
41 finally:
41 finally:
42 fh.Close()
42 fh.Close()
43
43
44 def nlinks(pathname):
44 def nlinks(pathname):
45 """Return number of hardlinks for the given file."""
45 """Return number of hardlinks for the given file."""
46 links = _getfileinfo(pathname)[7]
46 links = _getfileinfo(pathname)[7]
47 if links < 2:
47 if links < 2:
48 # Known to be wrong for most network drives
48 # Known to be wrong for most network drives
49 dirname = os.path.dirname(pathname)
49 dirname = os.path.dirname(pathname)
50 if not dirname:
50 if not dirname:
51 dirname = '.'
51 dirname = '.'
52 dt = win32file.GetDriveType(dirname + '\\')
52 dt = win32file.GetDriveType(dirname + '\\')
53 if dt in (4, 1):
53 if dt == 4 or dt == 1:
54 # Fake hardlink to force COW for network drives
54 # Fake hardlink to force COW for network drives
55 links = 2
55 links = 2
56 return links
56 return links
57
57
58 def samefile(fpath1, fpath2):
58 def samefile(fpath1, fpath2):
59 """Returns whether fpath1 and fpath2 refer to the same file. This is only
59 """Returns whether fpath1 and fpath2 refer to the same file. This is only
60 guaranteed to work for files, not directories."""
60 guaranteed to work for files, not directories."""
61 res1 = _getfileinfo(fpath1)
61 res1 = _getfileinfo(fpath1)
62 res2 = _getfileinfo(fpath2)
62 res2 = _getfileinfo(fpath2)
63 # Index 4 is the volume serial number, and 8 and 9 contain the file ID
63 # Index 4 is the volume serial number, and 8 and 9 contain the file ID
64 return res1[4] == res2[4] and res1[8] == res2[8] and res1[9] == res2[9]
64 return res1[4] == res2[4] and res1[8] == res2[8] and res1[9] == res2[9]
65
65
66 def samedevice(fpath1, fpath2):
66 def samedevice(fpath1, fpath2):
67 """Returns whether fpath1 and fpath2 are on the same device. This is only
67 """Returns whether fpath1 and fpath2 are on the same device. This is only
68 guaranteed to work for files, not directories."""
68 guaranteed to work for files, not directories."""
69 res1 = _getfileinfo(fpath1)
69 res1 = _getfileinfo(fpath1)
70 res2 = _getfileinfo(fpath2)
70 res2 = _getfileinfo(fpath2)
71 return res1[4] == res2[4]
71 return res1[4] == res2[4]
72
72
73 def testpid(pid):
73 def testpid(pid):
74 '''return True if pid is still running or unable to
74 '''return True if pid is still running or unable to
75 determine, False otherwise'''
75 determine, False otherwise'''
76 try:
76 try:
77 handle = win32api.OpenProcess(
77 handle = win32api.OpenProcess(
78 win32con.PROCESS_QUERY_INFORMATION, False, pid)
78 win32con.PROCESS_QUERY_INFORMATION, False, pid)
79 if handle:
79 if handle:
80 status = win32process.GetExitCodeProcess(handle)
80 status = win32process.GetExitCodeProcess(handle)
81 return status == win32con.STILL_ACTIVE
81 return status == win32con.STILL_ACTIVE
82 except pywintypes.error, details:
82 except pywintypes.error, details:
83 return details[0] != winerror.ERROR_INVALID_PARAMETER
83 return details[0] != winerror.ERROR_INVALID_PARAMETER
84 return True
84 return True
85
85
86 def lookup_reg(key, valname=None, scope=None):
86 def lookup_reg(key, valname=None, scope=None):
87 ''' Look up a key/value name in the Windows registry.
87 ''' Look up a key/value name in the Windows registry.
88
88
89 valname: value name. If unspecified, the default value for the key
89 valname: value name. If unspecified, the default value for the key
90 is used.
90 is used.
91 scope: optionally specify scope for registry lookup, this can be
91 scope: optionally specify scope for registry lookup, this can be
92 a sequence of scopes to look up in order. Default (CURRENT_USER,
92 a sequence of scopes to look up in order. Default (CURRENT_USER,
93 LOCAL_MACHINE).
93 LOCAL_MACHINE).
94 '''
94 '''
95 try:
95 try:
96 from _winreg import HKEY_CURRENT_USER, HKEY_LOCAL_MACHINE, \
96 from _winreg import HKEY_CURRENT_USER, HKEY_LOCAL_MACHINE, \
97 QueryValueEx, OpenKey
97 QueryValueEx, OpenKey
98 except ImportError:
98 except ImportError:
99 return None
99 return None
100
100
101 if scope is None:
101 if scope is None:
102 scope = (HKEY_CURRENT_USER, HKEY_LOCAL_MACHINE)
102 scope = (HKEY_CURRENT_USER, HKEY_LOCAL_MACHINE)
103 elif not isinstance(scope, (list, tuple)):
103 elif not isinstance(scope, (list, tuple)):
104 scope = (scope,)
104 scope = (scope,)
105 for s in scope:
105 for s in scope:
106 try:
106 try:
107 val = QueryValueEx(OpenKey(s, key), valname)[0]
107 val = QueryValueEx(OpenKey(s, key), valname)[0]
108 # never let a Unicode string escape into the wild
108 # never let a Unicode string escape into the wild
109 return encoding.tolocal(val.encode('UTF-8'))
109 return encoding.tolocal(val.encode('UTF-8'))
110 except EnvironmentError:
110 except EnvironmentError:
111 pass
111 pass
112
112
113 def system_rcpath_win32():
113 def system_rcpath_win32():
114 '''return default os-specific hgrc search path'''
114 '''return default os-specific hgrc search path'''
115 proc = win32api.GetCurrentProcess()
115 proc = win32api.GetCurrentProcess()
116 try:
116 try:
117 # This will fail on windows < NT
117 # This will fail on windows < NT
118 filename = win32process.GetModuleFileNameEx(proc, 0)
118 filename = win32process.GetModuleFileNameEx(proc, 0)
119 except:
119 except:
120 filename = win32api.GetModuleFileName(0)
120 filename = win32api.GetModuleFileName(0)
121 # Use mercurial.ini found in directory with hg.exe
121 # Use mercurial.ini found in directory with hg.exe
122 progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
122 progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
123 if os.path.isfile(progrc):
123 if os.path.isfile(progrc):
124 return [progrc]
124 return [progrc]
125 # Use hgrc.d found in directory with hg.exe
125 # Use hgrc.d found in directory with hg.exe
126 progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d')
126 progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d')
127 if os.path.isdir(progrcd):
127 if os.path.isdir(progrcd):
128 rcpath = []
128 rcpath = []
129 for f, kind in osutil.listdir(progrcd):
129 for f, kind in osutil.listdir(progrcd):
130 if f.endswith('.rc'):
130 if f.endswith('.rc'):
131 rcpath.append(os.path.join(progrcd, f))
131 rcpath.append(os.path.join(progrcd, f))
132 return rcpath
132 return rcpath
133 # else look for a system rcpath in the registry
133 # else look for a system rcpath in the registry
134 try:
134 try:
135 value = win32api.RegQueryValue(
135 value = win32api.RegQueryValue(
136 win32con.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Mercurial')
136 win32con.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Mercurial')
137 rcpath = []
137 rcpath = []
138 for p in value.split(os.pathsep):
138 for p in value.split(os.pathsep):
139 if p.lower().endswith('mercurial.ini'):
139 if p.lower().endswith('mercurial.ini'):
140 rcpath.append(p)
140 rcpath.append(p)
141 elif os.path.isdir(p):
141 elif os.path.isdir(p):
142 for f, kind in osutil.listdir(p):
142 for f, kind in osutil.listdir(p):
143 if f.endswith('.rc'):
143 if f.endswith('.rc'):
144 rcpath.append(os.path.join(p, f))
144 rcpath.append(os.path.join(p, f))
145 return rcpath
145 return rcpath
146 except pywintypes.error:
146 except pywintypes.error:
147 return []
147 return []
148
148
149 def user_rcpath_win32():
149 def user_rcpath_win32():
150 '''return os-specific hgrc search path to the user dir'''
150 '''return os-specific hgrc search path to the user dir'''
151 userdir = os.path.expanduser('~')
151 userdir = os.path.expanduser('~')
152 if sys.getwindowsversion()[3] != 2 and userdir == '~':
152 if sys.getwindowsversion()[3] != 2 and userdir == '~':
153 # We are on win < nt: fetch the APPDATA directory location and use
153 # We are on win < nt: fetch the APPDATA directory location and use
154 # the parent directory as the user home dir.
154 # the parent directory as the user home dir.
155 appdir = shell.SHGetPathFromIDList(
155 appdir = shell.SHGetPathFromIDList(
156 shell.SHGetSpecialFolderLocation(0, shellcon.CSIDL_APPDATA))
156 shell.SHGetSpecialFolderLocation(0, shellcon.CSIDL_APPDATA))
157 userdir = os.path.dirname(appdir)
157 userdir = os.path.dirname(appdir)
158 return [os.path.join(userdir, 'mercurial.ini'),
158 return [os.path.join(userdir, 'mercurial.ini'),
159 os.path.join(userdir, '.hgrc')]
159 os.path.join(userdir, '.hgrc')]
160
160
161 def getuser():
161 def getuser():
162 '''return name of current user'''
162 '''return name of current user'''
163 return win32api.GetUserName()
163 return win32api.GetUserName()
164
164
165 def set_signal_handler_win32():
165 def set_signal_handler_win32():
166 """Register a termination handler for console events including
166 """Register a termination handler for console events including
167 CTRL+C. python signal handlers do not work well with socket
167 CTRL+C. python signal handlers do not work well with socket
168 operations.
168 operations.
169 """
169 """
170 def handler(event):
170 def handler(event):
171 win32process.ExitProcess(1)
171 win32process.ExitProcess(1)
172 win32api.SetConsoleCtrlHandler(handler)
172 win32api.SetConsoleCtrlHandler(handler)
173
173
174 def hidewindow():
174 def hidewindow():
175 def callback(*args, **kwargs):
175 def callback(*args, **kwargs):
176 hwnd, pid = args
176 hwnd, pid = args
177 wpid = win32process.GetWindowThreadProcessId(hwnd)[1]
177 wpid = win32process.GetWindowThreadProcessId(hwnd)[1]
178 if pid == wpid:
178 if pid == wpid:
179 win32gui.ShowWindow(hwnd, win32con.SW_HIDE)
179 win32gui.ShowWindow(hwnd, win32con.SW_HIDE)
180
180
181 pid = win32process.GetCurrentProcessId()
181 pid = win32process.GetCurrentProcessId()
182 win32gui.EnumWindows(callback, pid)
182 win32gui.EnumWindows(callback, pid)
183
183
184 def termwidth_():
184 def termwidth_():
185 try:
185 try:
186 # Query stderr to avoid problems with redirections
186 # Query stderr to avoid problems with redirections
187 screenbuf = win32console.GetStdHandle(win32console.STD_ERROR_HANDLE)
187 screenbuf = win32console.GetStdHandle(win32console.STD_ERROR_HANDLE)
188 try:
188 try:
189 window = screenbuf.GetConsoleScreenBufferInfo()['Window']
189 window = screenbuf.GetConsoleScreenBufferInfo()['Window']
190 width = window.Right - window.Left
190 width = window.Right - window.Left
191 return width
191 return width
192 finally:
192 finally:
193 screenbuf.Detach()
193 screenbuf.Detach()
194 except pywintypes.error:
194 except pywintypes.error:
195 return 79
195 return 79
General Comments 0
You need to be logged in to leave comments. Login now