##// END OF EJS Templates
Merge with stable
Matt Mackall -
r10420:41d0ed2c merge default
parent child Browse files
Show More
@@ -1,228 +1,233
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import bin, hex, nullid
8 from node import bin, hex, nullid
9 from i18n import _
9 from i18n import _
10 import util, error, revlog, encoding
10 import util, error, revlog, encoding
11
11
12 def _string_escape(text):
12 def _string_escape(text):
13 """
13 """
14 >>> d = {'nl': chr(10), 'bs': chr(92), 'cr': chr(13), 'nul': chr(0)}
14 >>> d = {'nl': chr(10), 'bs': chr(92), 'cr': chr(13), 'nul': chr(0)}
15 >>> s = "ab%(nl)scd%(bs)s%(bs)sn%(nul)sab%(cr)scd%(bs)s%(nl)s" % d
15 >>> s = "ab%(nl)scd%(bs)s%(bs)sn%(nul)sab%(cr)scd%(bs)s%(nl)s" % d
16 >>> s
16 >>> s
17 'ab\\ncd\\\\\\\\n\\x00ab\\rcd\\\\\\n'
17 'ab\\ncd\\\\\\\\n\\x00ab\\rcd\\\\\\n'
18 >>> res = _string_escape(s)
18 >>> res = _string_escape(s)
19 >>> s == res.decode('string_escape')
19 >>> s == res.decode('string_escape')
20 True
20 True
21 """
21 """
22 # subset of the string_escape codec
22 # subset of the string_escape codec
23 text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r')
23 text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r')
24 return text.replace('\0', '\\0')
24 return text.replace('\0', '\\0')
25
25
26 def decodeextra(text):
26 def decodeextra(text):
27 extra = {}
27 extra = {}
28 for l in text.split('\0'):
28 for l in text.split('\0'):
29 if l:
29 if l:
30 k, v = l.decode('string_escape').split(':', 1)
30 k, v = l.decode('string_escape').split(':', 1)
31 extra[k] = v
31 extra[k] = v
32 return extra
32 return extra
33
33
34 def encodeextra(d):
34 def encodeextra(d):
35 # keys must be sorted to produce a deterministic changelog entry
35 # keys must be sorted to produce a deterministic changelog entry
36 items = [_string_escape('%s:%s' % (k, d[k])) for k in sorted(d)]
36 items = [_string_escape('%s:%s' % (k, d[k])) for k in sorted(d)]
37 return "\0".join(items)
37 return "\0".join(items)
38
38
39 class appender(object):
39 class appender(object):
40 '''the changelog index must be updated last on disk, so we use this class
40 '''the changelog index must be updated last on disk, so we use this class
41 to delay writes to it'''
41 to delay writes to it'''
42 def __init__(self, fp, buf):
42 def __init__(self, fp, buf):
43 self.data = buf
43 self.data = buf
44 self.fp = fp
44 self.fp = fp
45 self.offset = fp.tell()
45 self.offset = fp.tell()
46 self.size = util.fstat(fp).st_size
46 self.size = util.fstat(fp).st_size
47
47
48 def end(self):
48 def end(self):
49 return self.size + len("".join(self.data))
49 return self.size + len("".join(self.data))
50 def tell(self):
50 def tell(self):
51 return self.offset
51 return self.offset
52 def flush(self):
52 def flush(self):
53 pass
53 pass
54 def close(self):
54 def close(self):
55 self.fp.close()
55 self.fp.close()
56
56
57 def seek(self, offset, whence=0):
57 def seek(self, offset, whence=0):
58 '''virtual file offset spans real file and data'''
58 '''virtual file offset spans real file and data'''
59 if whence == 0:
59 if whence == 0:
60 self.offset = offset
60 self.offset = offset
61 elif whence == 1:
61 elif whence == 1:
62 self.offset += offset
62 self.offset += offset
63 elif whence == 2:
63 elif whence == 2:
64 self.offset = self.end() + offset
64 self.offset = self.end() + offset
65 if self.offset < self.size:
65 if self.offset < self.size:
66 self.fp.seek(self.offset)
66 self.fp.seek(self.offset)
67
67
68 def read(self, count=-1):
68 def read(self, count=-1):
69 '''only trick here is reads that span real file and data'''
69 '''only trick here is reads that span real file and data'''
70 ret = ""
70 ret = ""
71 if self.offset < self.size:
71 if self.offset < self.size:
72 s = self.fp.read(count)
72 s = self.fp.read(count)
73 ret = s
73 ret = s
74 self.offset += len(s)
74 self.offset += len(s)
75 if count > 0:
75 if count > 0:
76 count -= len(s)
76 count -= len(s)
77 if count != 0:
77 if count != 0:
78 doff = self.offset - self.size
78 doff = self.offset - self.size
79 self.data.insert(0, "".join(self.data))
79 self.data.insert(0, "".join(self.data))
80 del self.data[1:]
80 del self.data[1:]
81 s = self.data[0][doff:doff + count]
81 s = self.data[0][doff:doff + count]
82 self.offset += len(s)
82 self.offset += len(s)
83 ret += s
83 ret += s
84 return ret
84 return ret
85
85
86 def write(self, s):
86 def write(self, s):
87 self.data.append(str(s))
87 self.data.append(str(s))
88 self.offset += len(s)
88 self.offset += len(s)
89
89
90 def delayopener(opener, target, divert, buf):
90 def delayopener(opener, target, divert, buf):
91 def o(name, mode='r'):
91 def o(name, mode='r'):
92 if name != target:
92 if name != target:
93 return opener(name, mode)
93 return opener(name, mode)
94 if divert:
94 if divert:
95 return opener(name + ".a", mode.replace('a', 'w'))
95 return opener(name + ".a", mode.replace('a', 'w'))
96 # otherwise, divert to memory
96 # otherwise, divert to memory
97 return appender(opener(name, mode), buf)
97 return appender(opener(name, mode), buf)
98 return o
98 return o
99
99
100 class changelog(revlog.revlog):
100 class changelog(revlog.revlog):
101 def __init__(self, opener):
101 def __init__(self, opener):
102 revlog.revlog.__init__(self, opener, "00changelog.i")
102 revlog.revlog.__init__(self, opener, "00changelog.i")
103 self._realopener = opener
103 self._realopener = opener
104 self._delayed = False
104 self._delayed = False
105 self._divert = False
105 self._divert = False
106
106
107 def delayupdate(self):
107 def delayupdate(self):
108 "delay visibility of index updates to other readers"
108 "delay visibility of index updates to other readers"
109 self._delayed = True
109 self._delayed = True
110 self._divert = (len(self) == 0)
110 self._divert = (len(self) == 0)
111 self._delaybuf = []
111 self._delaybuf = []
112 self.opener = delayopener(self._realopener, self.indexfile,
112 self.opener = delayopener(self._realopener, self.indexfile,
113 self._divert, self._delaybuf)
113 self._divert, self._delaybuf)
114
114
115 def finalize(self, tr):
115 def finalize(self, tr):
116 "finalize index updates"
116 "finalize index updates"
117 self._delayed = False
117 self._delayed = False
118 self.opener = self._realopener
118 self.opener = self._realopener
119 # move redirected index data back into place
119 # move redirected index data back into place
120 if self._divert:
120 if self._divert:
121 n = self.opener(self.indexfile + ".a").name
121 n = self.opener(self.indexfile + ".a").name
122 util.rename(n, n[:-2])
122 util.rename(n, n[:-2])
123 elif self._delaybuf:
123 elif self._delaybuf:
124 fp = self.opener(self.indexfile, 'a')
124 fp = self.opener(self.indexfile, 'a')
125 fp.write("".join(self._delaybuf))
125 fp.write("".join(self._delaybuf))
126 fp.close()
126 fp.close()
127 self._delaybuf = []
127 self._delaybuf = []
128 # split when we're done
128 # split when we're done
129 self.checkinlinesize(tr)
129 self.checkinlinesize(tr)
130
130
131 def readpending(self, file):
131 def readpending(self, file):
132 r = revlog.revlog(self.opener, file)
132 r = revlog.revlog(self.opener, file)
133 self.index = r.index
133 self.index = r.index
134 self.nodemap = r.nodemap
134 self.nodemap = r.nodemap
135 self._chunkcache = r._chunkcache
135 self._chunkcache = r._chunkcache
136
136
137 def writepending(self):
137 def writepending(self):
138 "create a file containing the unfinalized state for pretxnchangegroup"
138 "create a file containing the unfinalized state for pretxnchangegroup"
139 if self._delaybuf:
139 if self._delaybuf:
140 # make a temporary copy of the index
140 # make a temporary copy of the index
141 fp1 = self._realopener(self.indexfile)
141 fp1 = self._realopener(self.indexfile)
142 fp2 = self._realopener(self.indexfile + ".a", "w")
142 fp2 = self._realopener(self.indexfile + ".a", "w")
143 fp2.write(fp1.read())
143 fp2.write(fp1.read())
144 # add pending data
144 # add pending data
145 fp2.write("".join(self._delaybuf))
145 fp2.write("".join(self._delaybuf))
146 fp2.close()
146 fp2.close()
147 # switch modes so finalize can simply rename
147 # switch modes so finalize can simply rename
148 self._delaybuf = []
148 self._delaybuf = []
149 self._divert = True
149 self._divert = True
150
150
151 if self._divert:
151 if self._divert:
152 return True
152 return True
153
153
154 return False
154 return False
155
155
156 def checkinlinesize(self, tr, fp=None):
156 def checkinlinesize(self, tr, fp=None):
157 if not self._delayed:
157 if not self._delayed:
158 revlog.revlog.checkinlinesize(self, tr, fp)
158 revlog.revlog.checkinlinesize(self, tr, fp)
159
159
160 def read(self, node):
160 def read(self, node):
161 """
161 """
162 format used:
162 format used:
163 nodeid\n : manifest node in ascii
163 nodeid\n : manifest node in ascii
164 user\n : user, no \n or \r allowed
164 user\n : user, no \n or \r allowed
165 time tz extra\n : date (time is int or float, timezone is int)
165 time tz extra\n : date (time is int or float, timezone is int)
166 : extra is metadatas, encoded and separated by '\0'
166 : extra is metadatas, encoded and separated by '\0'
167 : older versions ignore it
167 : older versions ignore it
168 files\n\n : files modified by the cset, no \n or \r allowed
168 files\n\n : files modified by the cset, no \n or \r allowed
169 (.*) : comment (free text, ideally utf-8)
169 (.*) : comment (free text, ideally utf-8)
170
170
171 changelog v0 doesn't use extra
171 changelog v0 doesn't use extra
172 """
172 """
173 text = self.revision(node)
173 text = self.revision(node)
174 if not text:
174 if not text:
175 return (nullid, "", (0, 0), [], "", {'branch': 'default'})
175 return (nullid, "", (0, 0), [], "", {'branch': 'default'})
176 last = text.index("\n\n")
176 last = text.index("\n\n")
177 desc = encoding.tolocal(text[last + 2:])
177 desc = encoding.tolocal(text[last + 2:])
178 l = text[:last].split('\n')
178 l = text[:last].split('\n')
179 manifest = bin(l[0])
179 manifest = bin(l[0])
180 user = encoding.tolocal(l[1])
180 user = encoding.tolocal(l[1])
181
181
182 extra_data = l[2].split(' ', 2)
182 extra_data = l[2].split(' ', 2)
183 if len(extra_data) != 3:
183 if len(extra_data) != 3:
184 time = float(extra_data.pop(0))
184 time = float(extra_data.pop(0))
185 try:
185 try:
186 # various tools did silly things with the time zone field.
186 # various tools did silly things with the time zone field.
187 timezone = int(extra_data[0])
187 timezone = int(extra_data[0])
188 except:
188 except:
189 timezone = 0
189 timezone = 0
190 extra = {}
190 extra = {}
191 else:
191 else:
192 time, timezone, extra = extra_data
192 time, timezone, extra = extra_data
193 time, timezone = float(time), int(timezone)
193 time, timezone = float(time), int(timezone)
194 extra = decodeextra(extra)
194 extra = decodeextra(extra)
195 if not extra.get('branch'):
195 if not extra.get('branch'):
196 extra['branch'] = 'default'
196 extra['branch'] = 'default'
197 files = l[3:]
197 files = l[3:]
198 return (manifest, user, (time, timezone), files, desc, extra)
198 return (manifest, user, (time, timezone), files, desc, extra)
199
199
200 def add(self, manifest, files, desc, transaction, p1, p2,
200 def add(self, manifest, files, desc, transaction, p1, p2,
201 user, date=None, extra=None):
201 user, date=None, extra=None):
202 user = user.strip()
202 user = user.strip()
203 # An empty username or a username with a "\n" will make the
203 # An empty username or a username with a "\n" will make the
204 # revision text contain two "\n\n" sequences -> corrupt
204 # revision text contain two "\n\n" sequences -> corrupt
205 # repository since read cannot unpack the revision.
205 # repository since read cannot unpack the revision.
206 if not user:
206 if not user:
207 raise error.RevlogError(_("empty username"))
207 raise error.RevlogError(_("empty username"))
208 if "\n" in user:
208 if "\n" in user:
209 raise error.RevlogError(_("username %s contains a newline")
209 raise error.RevlogError(_("username %s contains a newline")
210 % repr(user))
210 % repr(user))
211
211
212 # strip trailing whitespace and leading and trailing empty lines
212 # strip trailing whitespace and leading and trailing empty lines
213 desc = '\n'.join([l.rstrip() for l in desc.splitlines()]).strip('\n')
213 desc = '\n'.join([l.rstrip() for l in desc.splitlines()]).strip('\n')
214
214
215 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
215 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
216
216
217 if date:
217 if date:
218 parseddate = "%d %d" % util.parsedate(date)
218 parseddate = "%d %d" % util.parsedate(date)
219 else:
219 else:
220 parseddate = "%d %d" % util.makedate()
220 parseddate = "%d %d" % util.makedate()
221 if extra and extra.get("branch") in ("default", ""):
221 if extra:
222 del extra["branch"]
222 branch = extra.get("branch")
223 if branch in ("default", ""):
224 del extra["branch"]
225 elif branch in (".", "null", "tip"):
226 raise error.RevlogError(_('the name \'%s\' is reserved')
227 % branch)
223 if extra:
228 if extra:
224 extra = encodeextra(extra)
229 extra = encodeextra(extra)
225 parseddate = "%s %s" % (parseddate, extra)
230 parseddate = "%s %s" % (parseddate, extra)
226 l = [hex(manifest), user, parseddate] + sorted(files) + ["", desc]
231 l = [hex(manifest), user, parseddate] + sorted(files) + ["", desc]
227 text = "\n".join(l)
232 text = "\n".join(l)
228 return self.addrevision(text, transaction, len(self), p1, p2)
233 return self.addrevision(text, transaction, len(self), p1, p2)
@@ -1,655 +1,657
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid
8 from node import nullid
9 from i18n import _
9 from i18n import _
10 import util, ignore, osutil, parsers
10 import util, ignore, osutil, parsers
11 import struct, os, stat, errno
11 import struct, os, stat, errno
12 import cStringIO
12 import cStringIO
13
13
14 _unknown = ('?', 0, 0, 0)
14 _unknown = ('?', 0, 0, 0)
15 _format = ">cllll"
15 _format = ">cllll"
16 propertycache = util.propertycache
16 propertycache = util.propertycache
17
17
18 def _finddirs(path):
18 def _finddirs(path):
19 pos = path.rfind('/')
19 pos = path.rfind('/')
20 while pos != -1:
20 while pos != -1:
21 yield path[:pos]
21 yield path[:pos]
22 pos = path.rfind('/', 0, pos)
22 pos = path.rfind('/', 0, pos)
23
23
24 def _incdirs(dirs, path):
24 def _incdirs(dirs, path):
25 for base in _finddirs(path):
25 for base in _finddirs(path):
26 if base in dirs:
26 if base in dirs:
27 dirs[base] += 1
27 dirs[base] += 1
28 return
28 return
29 dirs[base] = 1
29 dirs[base] = 1
30
30
31 def _decdirs(dirs, path):
31 def _decdirs(dirs, path):
32 for base in _finddirs(path):
32 for base in _finddirs(path):
33 if dirs[base] > 1:
33 if dirs[base] > 1:
34 dirs[base] -= 1
34 dirs[base] -= 1
35 return
35 return
36 del dirs[base]
36 del dirs[base]
37
37
38 class dirstate(object):
38 class dirstate(object):
39
39
40 def __init__(self, opener, ui, root):
40 def __init__(self, opener, ui, root):
41 '''Create a new dirstate object.
41 '''Create a new dirstate object.
42
42
43 opener is an open()-like callable that can be used to open the
43 opener is an open()-like callable that can be used to open the
44 dirstate file; root is the root of the directory tracked by
44 dirstate file; root is the root of the directory tracked by
45 the dirstate.
45 the dirstate.
46 '''
46 '''
47 self._opener = opener
47 self._opener = opener
48 self._root = root
48 self._root = root
49 self._rootdir = os.path.join(root, '')
49 self._rootdir = os.path.join(root, '')
50 self._dirty = False
50 self._dirty = False
51 self._dirtypl = False
51 self._dirtypl = False
52 self._ui = ui
52 self._ui = ui
53
53
54 @propertycache
54 @propertycache
55 def _map(self):
55 def _map(self):
56 '''Return the dirstate contents as a map from filename to
56 '''Return the dirstate contents as a map from filename to
57 (state, mode, size, time).'''
57 (state, mode, size, time).'''
58 self._read()
58 self._read()
59 return self._map
59 return self._map
60
60
61 @propertycache
61 @propertycache
62 def _copymap(self):
62 def _copymap(self):
63 self._read()
63 self._read()
64 return self._copymap
64 return self._copymap
65
65
66 @propertycache
66 @propertycache
67 def _foldmap(self):
67 def _foldmap(self):
68 f = {}
68 f = {}
69 for name in self._map:
69 for name in self._map:
70 f[os.path.normcase(name)] = name
70 f[os.path.normcase(name)] = name
71 return f
71 return f
72
72
73 @propertycache
73 @propertycache
74 def _branch(self):
74 def _branch(self):
75 try:
75 try:
76 return self._opener("branch").read().strip() or "default"
76 return self._opener("branch").read().strip() or "default"
77 except IOError:
77 except IOError:
78 return "default"
78 return "default"
79
79
80 @propertycache
80 @propertycache
81 def _pl(self):
81 def _pl(self):
82 try:
82 try:
83 st = self._opener("dirstate").read(40)
83 st = self._opener("dirstate").read(40)
84 l = len(st)
84 l = len(st)
85 if l == 40:
85 if l == 40:
86 return st[:20], st[20:40]
86 return st[:20], st[20:40]
87 elif l > 0 and l < 40:
87 elif l > 0 and l < 40:
88 raise util.Abort(_('working directory state appears damaged!'))
88 raise util.Abort(_('working directory state appears damaged!'))
89 except IOError, err:
89 except IOError, err:
90 if err.errno != errno.ENOENT:
90 if err.errno != errno.ENOENT:
91 raise
91 raise
92 return [nullid, nullid]
92 return [nullid, nullid]
93
93
94 @propertycache
94 @propertycache
95 def _dirs(self):
95 def _dirs(self):
96 dirs = {}
96 dirs = {}
97 for f, s in self._map.iteritems():
97 for f, s in self._map.iteritems():
98 if s[0] != 'r':
98 if s[0] != 'r':
99 _incdirs(dirs, f)
99 _incdirs(dirs, f)
100 return dirs
100 return dirs
101
101
102 @propertycache
102 @propertycache
103 def _ignore(self):
103 def _ignore(self):
104 files = [self._join('.hgignore')]
104 files = [self._join('.hgignore')]
105 for name, path in self._ui.configitems("ui"):
105 for name, path in self._ui.configitems("ui"):
106 if name == 'ignore' or name.startswith('ignore.'):
106 if name == 'ignore' or name.startswith('ignore.'):
107 files.append(util.expandpath(path))
107 files.append(util.expandpath(path))
108 return ignore.ignore(self._root, files, self._ui.warn)
108 return ignore.ignore(self._root, files, self._ui.warn)
109
109
110 @propertycache
110 @propertycache
111 def _slash(self):
111 def _slash(self):
112 return self._ui.configbool('ui', 'slash') and os.sep != '/'
112 return self._ui.configbool('ui', 'slash') and os.sep != '/'
113
113
114 @propertycache
114 @propertycache
115 def _checklink(self):
115 def _checklink(self):
116 return util.checklink(self._root)
116 return util.checklink(self._root)
117
117
118 @propertycache
118 @propertycache
119 def _checkexec(self):
119 def _checkexec(self):
120 return util.checkexec(self._root)
120 return util.checkexec(self._root)
121
121
122 @propertycache
122 @propertycache
123 def _checkcase(self):
123 def _checkcase(self):
124 return not util.checkcase(self._join('.hg'))
124 return not util.checkcase(self._join('.hg'))
125
125
126 def _join(self, f):
126 def _join(self, f):
127 # much faster than os.path.join()
127 # much faster than os.path.join()
128 # it's safe because f is always a relative path
128 # it's safe because f is always a relative path
129 return self._rootdir + f
129 return self._rootdir + f
130
130
131 def flagfunc(self, fallback):
131 def flagfunc(self, fallback):
132 if self._checklink:
132 if self._checklink:
133 if self._checkexec:
133 if self._checkexec:
134 def f(x):
134 def f(x):
135 p = self._join(x)
135 p = self._join(x)
136 if os.path.islink(p):
136 if os.path.islink(p):
137 return 'l'
137 return 'l'
138 if util.is_exec(p):
138 if util.is_exec(p):
139 return 'x'
139 return 'x'
140 return ''
140 return ''
141 return f
141 return f
142 def f(x):
142 def f(x):
143 if os.path.islink(self._join(x)):
143 if os.path.islink(self._join(x)):
144 return 'l'
144 return 'l'
145 if 'x' in fallback(x):
145 if 'x' in fallback(x):
146 return 'x'
146 return 'x'
147 return ''
147 return ''
148 return f
148 return f
149 if self._checkexec:
149 if self._checkexec:
150 def f(x):
150 def f(x):
151 if 'l' in fallback(x):
151 if 'l' in fallback(x):
152 return 'l'
152 return 'l'
153 if util.is_exec(self._join(x)):
153 if util.is_exec(self._join(x)):
154 return 'x'
154 return 'x'
155 return ''
155 return ''
156 return f
156 return f
157 return fallback
157 return fallback
158
158
159 def getcwd(self):
159 def getcwd(self):
160 cwd = os.getcwd()
160 cwd = os.getcwd()
161 if cwd == self._root:
161 if cwd == self._root:
162 return ''
162 return ''
163 # self._root ends with a path separator if self._root is '/' or 'C:\'
163 # self._root ends with a path separator if self._root is '/' or 'C:\'
164 rootsep = self._root
164 rootsep = self._root
165 if not util.endswithsep(rootsep):
165 if not util.endswithsep(rootsep):
166 rootsep += os.sep
166 rootsep += os.sep
167 if cwd.startswith(rootsep):
167 if cwd.startswith(rootsep):
168 return cwd[len(rootsep):]
168 return cwd[len(rootsep):]
169 else:
169 else:
170 # we're outside the repo. return an absolute path.
170 # we're outside the repo. return an absolute path.
171 return cwd
171 return cwd
172
172
173 def pathto(self, f, cwd=None):
173 def pathto(self, f, cwd=None):
174 if cwd is None:
174 if cwd is None:
175 cwd = self.getcwd()
175 cwd = self.getcwd()
176 path = util.pathto(self._root, cwd, f)
176 path = util.pathto(self._root, cwd, f)
177 if self._slash:
177 if self._slash:
178 return util.normpath(path)
178 return util.normpath(path)
179 return path
179 return path
180
180
181 def __getitem__(self, key):
181 def __getitem__(self, key):
182 '''Return the current state of key (a filename) in the dirstate.
182 '''Return the current state of key (a filename) in the dirstate.
183
183
184 States are:
184 States are:
185 n normal
185 n normal
186 m needs merging
186 m needs merging
187 r marked for removal
187 r marked for removal
188 a marked for addition
188 a marked for addition
189 ? not tracked
189 ? not tracked
190 '''
190 '''
191 return self._map.get(key, ("?",))[0]
191 return self._map.get(key, ("?",))[0]
192
192
193 def __contains__(self, key):
193 def __contains__(self, key):
194 return key in self._map
194 return key in self._map
195
195
196 def __iter__(self):
196 def __iter__(self):
197 for x in sorted(self._map):
197 for x in sorted(self._map):
198 yield x
198 yield x
199
199
200 def parents(self):
200 def parents(self):
201 return self._pl
201 return self._pl
202
202
203 def branch(self):
203 def branch(self):
204 return self._branch
204 return self._branch
205
205
206 def setparents(self, p1, p2=nullid):
206 def setparents(self, p1, p2=nullid):
207 self._dirty = self._dirtypl = True
207 self._dirty = self._dirtypl = True
208 self._pl = p1, p2
208 self._pl = p1, p2
209
209
210 def setbranch(self, branch):
210 def setbranch(self, branch):
211 if branch in ['tip', '.', 'null']:
212 raise util.Abort(_('the name \'%s\' is reserved') % branch)
211 self._branch = branch
213 self._branch = branch
212 self._opener("branch", "w").write(branch + '\n')
214 self._opener("branch", "w").write(branch + '\n')
213
215
214 def _read(self):
216 def _read(self):
215 self._map = {}
217 self._map = {}
216 self._copymap = {}
218 self._copymap = {}
217 try:
219 try:
218 st = self._opener("dirstate").read()
220 st = self._opener("dirstate").read()
219 except IOError, err:
221 except IOError, err:
220 if err.errno != errno.ENOENT:
222 if err.errno != errno.ENOENT:
221 raise
223 raise
222 return
224 return
223 if not st:
225 if not st:
224 return
226 return
225
227
226 p = parsers.parse_dirstate(self._map, self._copymap, st)
228 p = parsers.parse_dirstate(self._map, self._copymap, st)
227 if not self._dirtypl:
229 if not self._dirtypl:
228 self._pl = p
230 self._pl = p
229
231
230 def invalidate(self):
232 def invalidate(self):
231 for a in "_map _copymap _foldmap _branch _pl _dirs _ignore".split():
233 for a in "_map _copymap _foldmap _branch _pl _dirs _ignore".split():
232 if a in self.__dict__:
234 if a in self.__dict__:
233 delattr(self, a)
235 delattr(self, a)
234 self._dirty = False
236 self._dirty = False
235
237
236 def copy(self, source, dest):
238 def copy(self, source, dest):
237 """Mark dest as a copy of source. Unmark dest if source is None."""
239 """Mark dest as a copy of source. Unmark dest if source is None."""
238 if source == dest:
240 if source == dest:
239 return
241 return
240 self._dirty = True
242 self._dirty = True
241 if source is not None:
243 if source is not None:
242 self._copymap[dest] = source
244 self._copymap[dest] = source
243 elif dest in self._copymap:
245 elif dest in self._copymap:
244 del self._copymap[dest]
246 del self._copymap[dest]
245
247
246 def copied(self, file):
248 def copied(self, file):
247 return self._copymap.get(file, None)
249 return self._copymap.get(file, None)
248
250
249 def copies(self):
251 def copies(self):
250 return self._copymap
252 return self._copymap
251
253
252 def _droppath(self, f):
254 def _droppath(self, f):
253 if self[f] not in "?r" and "_dirs" in self.__dict__:
255 if self[f] not in "?r" and "_dirs" in self.__dict__:
254 _decdirs(self._dirs, f)
256 _decdirs(self._dirs, f)
255
257
256 def _addpath(self, f, check=False):
258 def _addpath(self, f, check=False):
257 oldstate = self[f]
259 oldstate = self[f]
258 if check or oldstate == "r":
260 if check or oldstate == "r":
259 if '\r' in f or '\n' in f:
261 if '\r' in f or '\n' in f:
260 raise util.Abort(
262 raise util.Abort(
261 _("'\\n' and '\\r' disallowed in filenames: %r") % f)
263 _("'\\n' and '\\r' disallowed in filenames: %r") % f)
262 if f in self._dirs:
264 if f in self._dirs:
263 raise util.Abort(_('directory %r already in dirstate') % f)
265 raise util.Abort(_('directory %r already in dirstate') % f)
264 # shadows
266 # shadows
265 for d in _finddirs(f):
267 for d in _finddirs(f):
266 if d in self._dirs:
268 if d in self._dirs:
267 break
269 break
268 if d in self._map and self[d] != 'r':
270 if d in self._map and self[d] != 'r':
269 raise util.Abort(
271 raise util.Abort(
270 _('file %r in dirstate clashes with %r') % (d, f))
272 _('file %r in dirstate clashes with %r') % (d, f))
271 if oldstate in "?r" and "_dirs" in self.__dict__:
273 if oldstate in "?r" and "_dirs" in self.__dict__:
272 _incdirs(self._dirs, f)
274 _incdirs(self._dirs, f)
273
275
274 def normal(self, f):
276 def normal(self, f):
275 '''Mark a file normal and clean.'''
277 '''Mark a file normal and clean.'''
276 self._dirty = True
278 self._dirty = True
277 self._addpath(f)
279 self._addpath(f)
278 s = os.lstat(self._join(f))
280 s = os.lstat(self._join(f))
279 self._map[f] = ('n', s.st_mode, s.st_size, int(s.st_mtime))
281 self._map[f] = ('n', s.st_mode, s.st_size, int(s.st_mtime))
280 if f in self._copymap:
282 if f in self._copymap:
281 del self._copymap[f]
283 del self._copymap[f]
282
284
283 def normallookup(self, f):
285 def normallookup(self, f):
284 '''Mark a file normal, but possibly dirty.'''
286 '''Mark a file normal, but possibly dirty.'''
285 if self._pl[1] != nullid and f in self._map:
287 if self._pl[1] != nullid and f in self._map:
286 # if there is a merge going on and the file was either
288 # if there is a merge going on and the file was either
287 # in state 'm' or dirty before being removed, restore that state.
289 # in state 'm' or dirty before being removed, restore that state.
288 entry = self._map[f]
290 entry = self._map[f]
289 if entry[0] == 'r' and entry[2] in (-1, -2):
291 if entry[0] == 'r' and entry[2] in (-1, -2):
290 source = self._copymap.get(f)
292 source = self._copymap.get(f)
291 if entry[2] == -1:
293 if entry[2] == -1:
292 self.merge(f)
294 self.merge(f)
293 elif entry[2] == -2:
295 elif entry[2] == -2:
294 self.normaldirty(f)
296 self.normaldirty(f)
295 if source:
297 if source:
296 self.copy(source, f)
298 self.copy(source, f)
297 return
299 return
298 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
300 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
299 return
301 return
300 self._dirty = True
302 self._dirty = True
301 self._addpath(f)
303 self._addpath(f)
302 self._map[f] = ('n', 0, -1, -1)
304 self._map[f] = ('n', 0, -1, -1)
303 if f in self._copymap:
305 if f in self._copymap:
304 del self._copymap[f]
306 del self._copymap[f]
305
307
306 def normaldirty(self, f):
308 def normaldirty(self, f):
307 '''Mark a file normal, but dirty.'''
309 '''Mark a file normal, but dirty.'''
308 self._dirty = True
310 self._dirty = True
309 self._addpath(f)
311 self._addpath(f)
310 self._map[f] = ('n', 0, -2, -1)
312 self._map[f] = ('n', 0, -2, -1)
311 if f in self._copymap:
313 if f in self._copymap:
312 del self._copymap[f]
314 del self._copymap[f]
313
315
314 def add(self, f):
316 def add(self, f):
315 '''Mark a file added.'''
317 '''Mark a file added.'''
316 self._dirty = True
318 self._dirty = True
317 self._addpath(f, True)
319 self._addpath(f, True)
318 self._map[f] = ('a', 0, -1, -1)
320 self._map[f] = ('a', 0, -1, -1)
319 if f in self._copymap:
321 if f in self._copymap:
320 del self._copymap[f]
322 del self._copymap[f]
321
323
322 def remove(self, f):
324 def remove(self, f):
323 '''Mark a file removed.'''
325 '''Mark a file removed.'''
324 self._dirty = True
326 self._dirty = True
325 self._droppath(f)
327 self._droppath(f)
326 size = 0
328 size = 0
327 if self._pl[1] != nullid and f in self._map:
329 if self._pl[1] != nullid and f in self._map:
328 entry = self._map[f]
330 entry = self._map[f]
329 if entry[0] == 'm':
331 if entry[0] == 'm':
330 size = -1
332 size = -1
331 elif entry[0] == 'n' and entry[2] == -2:
333 elif entry[0] == 'n' and entry[2] == -2:
332 size = -2
334 size = -2
333 self._map[f] = ('r', 0, size, 0)
335 self._map[f] = ('r', 0, size, 0)
334 if size == 0 and f in self._copymap:
336 if size == 0 and f in self._copymap:
335 del self._copymap[f]
337 del self._copymap[f]
336
338
337 def merge(self, f):
339 def merge(self, f):
338 '''Mark a file merged.'''
340 '''Mark a file merged.'''
339 self._dirty = True
341 self._dirty = True
340 s = os.lstat(self._join(f))
342 s = os.lstat(self._join(f))
341 self._addpath(f)
343 self._addpath(f)
342 self._map[f] = ('m', s.st_mode, s.st_size, int(s.st_mtime))
344 self._map[f] = ('m', s.st_mode, s.st_size, int(s.st_mtime))
343 if f in self._copymap:
345 if f in self._copymap:
344 del self._copymap[f]
346 del self._copymap[f]
345
347
346 def forget(self, f):
348 def forget(self, f):
347 '''Forget a file.'''
349 '''Forget a file.'''
348 self._dirty = True
350 self._dirty = True
349 try:
351 try:
350 self._droppath(f)
352 self._droppath(f)
351 del self._map[f]
353 del self._map[f]
352 except KeyError:
354 except KeyError:
353 self._ui.warn(_("not in dirstate: %s\n") % f)
355 self._ui.warn(_("not in dirstate: %s\n") % f)
354
356
355 def _normalize(self, path, knownpath):
357 def _normalize(self, path, knownpath):
356 norm_path = os.path.normcase(path)
358 norm_path = os.path.normcase(path)
357 fold_path = self._foldmap.get(norm_path, None)
359 fold_path = self._foldmap.get(norm_path, None)
358 if fold_path is None:
360 if fold_path is None:
359 if knownpath or not os.path.exists(os.path.join(self._root, path)):
361 if knownpath or not os.path.exists(os.path.join(self._root, path)):
360 fold_path = path
362 fold_path = path
361 else:
363 else:
362 fold_path = self._foldmap.setdefault(norm_path,
364 fold_path = self._foldmap.setdefault(norm_path,
363 util.fspath(path, self._root))
365 util.fspath(path, self._root))
364 return fold_path
366 return fold_path
365
367
366 def clear(self):
368 def clear(self):
367 self._map = {}
369 self._map = {}
368 if "_dirs" in self.__dict__:
370 if "_dirs" in self.__dict__:
369 delattr(self, "_dirs")
371 delattr(self, "_dirs")
370 self._copymap = {}
372 self._copymap = {}
371 self._pl = [nullid, nullid]
373 self._pl = [nullid, nullid]
372 self._dirty = True
374 self._dirty = True
373
375
374 def rebuild(self, parent, files):
376 def rebuild(self, parent, files):
375 self.clear()
377 self.clear()
376 for f in files:
378 for f in files:
377 if 'x' in files.flags(f):
379 if 'x' in files.flags(f):
378 self._map[f] = ('n', 0777, -1, 0)
380 self._map[f] = ('n', 0777, -1, 0)
379 else:
381 else:
380 self._map[f] = ('n', 0666, -1, 0)
382 self._map[f] = ('n', 0666, -1, 0)
381 self._pl = (parent, nullid)
383 self._pl = (parent, nullid)
382 self._dirty = True
384 self._dirty = True
383
385
384 def write(self):
386 def write(self):
385 if not self._dirty:
387 if not self._dirty:
386 return
388 return
387 st = self._opener("dirstate", "w", atomictemp=True)
389 st = self._opener("dirstate", "w", atomictemp=True)
388
390
389 # use the modification time of the newly created temporary file as the
391 # use the modification time of the newly created temporary file as the
390 # filesystem's notion of 'now'
392 # filesystem's notion of 'now'
391 now = int(util.fstat(st).st_mtime)
393 now = int(util.fstat(st).st_mtime)
392
394
393 cs = cStringIO.StringIO()
395 cs = cStringIO.StringIO()
394 copymap = self._copymap
396 copymap = self._copymap
395 pack = struct.pack
397 pack = struct.pack
396 write = cs.write
398 write = cs.write
397 write("".join(self._pl))
399 write("".join(self._pl))
398 for f, e in self._map.iteritems():
400 for f, e in self._map.iteritems():
399 if f in copymap:
401 if f in copymap:
400 f = "%s\0%s" % (f, copymap[f])
402 f = "%s\0%s" % (f, copymap[f])
401
403
402 if e[0] == 'n' and e[3] == now:
404 if e[0] == 'n' and e[3] == now:
403 # The file was last modified "simultaneously" with the current
405 # The file was last modified "simultaneously" with the current
404 # write to dirstate (i.e. within the same second for file-
406 # write to dirstate (i.e. within the same second for file-
405 # systems with a granularity of 1 sec). This commonly happens
407 # systems with a granularity of 1 sec). This commonly happens
406 # for at least a couple of files on 'update'.
408 # for at least a couple of files on 'update'.
407 # The user could change the file without changing its size
409 # The user could change the file without changing its size
408 # within the same second. Invalidate the file's stat data in
410 # within the same second. Invalidate the file's stat data in
409 # dirstate, forcing future 'status' calls to compare the
411 # dirstate, forcing future 'status' calls to compare the
410 # contents of the file. This prevents mistakenly treating such
412 # contents of the file. This prevents mistakenly treating such
411 # files as clean.
413 # files as clean.
412 e = (e[0], 0, -1, -1) # mark entry as 'unset'
414 e = (e[0], 0, -1, -1) # mark entry as 'unset'
413
415
414 e = pack(_format, e[0], e[1], e[2], e[3], len(f))
416 e = pack(_format, e[0], e[1], e[2], e[3], len(f))
415 write(e)
417 write(e)
416 write(f)
418 write(f)
417 st.write(cs.getvalue())
419 st.write(cs.getvalue())
418 st.rename()
420 st.rename()
419 self._dirty = self._dirtypl = False
421 self._dirty = self._dirtypl = False
420
422
421 def _dirignore(self, f):
423 def _dirignore(self, f):
422 if f == '.':
424 if f == '.':
423 return False
425 return False
424 if self._ignore(f):
426 if self._ignore(f):
425 return True
427 return True
426 for p in _finddirs(f):
428 for p in _finddirs(f):
427 if self._ignore(p):
429 if self._ignore(p):
428 return True
430 return True
429 return False
431 return False
430
432
431 def walk(self, match, subrepos, unknown, ignored):
433 def walk(self, match, subrepos, unknown, ignored):
432 '''
434 '''
433 Walk recursively through the directory tree, finding all files
435 Walk recursively through the directory tree, finding all files
434 matched by match.
436 matched by match.
435
437
436 Return a dict mapping filename to stat-like object (either
438 Return a dict mapping filename to stat-like object (either
437 mercurial.osutil.stat instance or return value of os.stat()).
439 mercurial.osutil.stat instance or return value of os.stat()).
438 '''
440 '''
439
441
440 def fwarn(f, msg):
442 def fwarn(f, msg):
441 self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
443 self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
442 return False
444 return False
443
445
444 def badtype(mode):
446 def badtype(mode):
445 kind = _('unknown')
447 kind = _('unknown')
446 if stat.S_ISCHR(mode):
448 if stat.S_ISCHR(mode):
447 kind = _('character device')
449 kind = _('character device')
448 elif stat.S_ISBLK(mode):
450 elif stat.S_ISBLK(mode):
449 kind = _('block device')
451 kind = _('block device')
450 elif stat.S_ISFIFO(mode):
452 elif stat.S_ISFIFO(mode):
451 kind = _('fifo')
453 kind = _('fifo')
452 elif stat.S_ISSOCK(mode):
454 elif stat.S_ISSOCK(mode):
453 kind = _('socket')
455 kind = _('socket')
454 elif stat.S_ISDIR(mode):
456 elif stat.S_ISDIR(mode):
455 kind = _('directory')
457 kind = _('directory')
456 return _('unsupported file type (type is %s)') % kind
458 return _('unsupported file type (type is %s)') % kind
457
459
458 ignore = self._ignore
460 ignore = self._ignore
459 dirignore = self._dirignore
461 dirignore = self._dirignore
460 if ignored:
462 if ignored:
461 ignore = util.never
463 ignore = util.never
462 dirignore = util.never
464 dirignore = util.never
463 elif not unknown:
465 elif not unknown:
464 # if unknown and ignored are False, skip step 2
466 # if unknown and ignored are False, skip step 2
465 ignore = util.always
467 ignore = util.always
466 dirignore = util.always
468 dirignore = util.always
467
469
468 matchfn = match.matchfn
470 matchfn = match.matchfn
469 badfn = match.bad
471 badfn = match.bad
470 dmap = self._map
472 dmap = self._map
471 normpath = util.normpath
473 normpath = util.normpath
472 listdir = osutil.listdir
474 listdir = osutil.listdir
473 lstat = os.lstat
475 lstat = os.lstat
474 getkind = stat.S_IFMT
476 getkind = stat.S_IFMT
475 dirkind = stat.S_IFDIR
477 dirkind = stat.S_IFDIR
476 regkind = stat.S_IFREG
478 regkind = stat.S_IFREG
477 lnkkind = stat.S_IFLNK
479 lnkkind = stat.S_IFLNK
478 join = self._join
480 join = self._join
479 work = []
481 work = []
480 wadd = work.append
482 wadd = work.append
481
483
482 if self._checkcase:
484 if self._checkcase:
483 normalize = self._normalize
485 normalize = self._normalize
484 else:
486 else:
485 normalize = lambda x, y: x
487 normalize = lambda x, y: x
486
488
487 exact = skipstep3 = False
489 exact = skipstep3 = False
488 if matchfn == match.exact: # match.exact
490 if matchfn == match.exact: # match.exact
489 exact = True
491 exact = True
490 dirignore = util.always # skip step 2
492 dirignore = util.always # skip step 2
491 elif match.files() and not match.anypats(): # match.match, no patterns
493 elif match.files() and not match.anypats(): # match.match, no patterns
492 skipstep3 = True
494 skipstep3 = True
493
495
494 files = set(match.files())
496 files = set(match.files())
495 if not files or '.' in files:
497 if not files or '.' in files:
496 files = ['']
498 files = ['']
497 results = dict.fromkeys(subrepos)
499 results = dict.fromkeys(subrepos)
498 results['.hg'] = None
500 results['.hg'] = None
499
501
500 # step 1: find all explicit files
502 # step 1: find all explicit files
501 for ff in sorted(files):
503 for ff in sorted(files):
502 nf = normalize(normpath(ff), False)
504 nf = normalize(normpath(ff), False)
503 if nf in results:
505 if nf in results:
504 continue
506 continue
505
507
506 try:
508 try:
507 st = lstat(join(nf))
509 st = lstat(join(nf))
508 kind = getkind(st.st_mode)
510 kind = getkind(st.st_mode)
509 if kind == dirkind:
511 if kind == dirkind:
510 skipstep3 = False
512 skipstep3 = False
511 if nf in dmap:
513 if nf in dmap:
512 #file deleted on disk but still in dirstate
514 #file deleted on disk but still in dirstate
513 results[nf] = None
515 results[nf] = None
514 match.dir(nf)
516 match.dir(nf)
515 if not dirignore(nf):
517 if not dirignore(nf):
516 wadd(nf)
518 wadd(nf)
517 elif kind == regkind or kind == lnkkind:
519 elif kind == regkind or kind == lnkkind:
518 results[nf] = st
520 results[nf] = st
519 else:
521 else:
520 badfn(ff, badtype(kind))
522 badfn(ff, badtype(kind))
521 if nf in dmap:
523 if nf in dmap:
522 results[nf] = None
524 results[nf] = None
523 except OSError, inst:
525 except OSError, inst:
524 if nf in dmap: # does it exactly match a file?
526 if nf in dmap: # does it exactly match a file?
525 results[nf] = None
527 results[nf] = None
526 else: # does it match a directory?
528 else: # does it match a directory?
527 prefix = nf + "/"
529 prefix = nf + "/"
528 for fn in dmap:
530 for fn in dmap:
529 if fn.startswith(prefix):
531 if fn.startswith(prefix):
530 match.dir(nf)
532 match.dir(nf)
531 skipstep3 = False
533 skipstep3 = False
532 break
534 break
533 else:
535 else:
534 badfn(ff, inst.strerror)
536 badfn(ff, inst.strerror)
535
537
536 # step 2: visit subdirectories
538 # step 2: visit subdirectories
537 while work:
539 while work:
538 nd = work.pop()
540 nd = work.pop()
539 skip = None
541 skip = None
540 if nd == '.':
542 if nd == '.':
541 nd = ''
543 nd = ''
542 else:
544 else:
543 skip = '.hg'
545 skip = '.hg'
544 try:
546 try:
545 entries = listdir(join(nd), stat=True, skip=skip)
547 entries = listdir(join(nd), stat=True, skip=skip)
546 except OSError, inst:
548 except OSError, inst:
547 if inst.errno == errno.EACCES:
549 if inst.errno == errno.EACCES:
548 fwarn(nd, inst.strerror)
550 fwarn(nd, inst.strerror)
549 continue
551 continue
550 raise
552 raise
551 for f, kind, st in entries:
553 for f, kind, st in entries:
552 nf = normalize(nd and (nd + "/" + f) or f, True)
554 nf = normalize(nd and (nd + "/" + f) or f, True)
553 if nf not in results:
555 if nf not in results:
554 if kind == dirkind:
556 if kind == dirkind:
555 if not ignore(nf):
557 if not ignore(nf):
556 match.dir(nf)
558 match.dir(nf)
557 wadd(nf)
559 wadd(nf)
558 if nf in dmap and matchfn(nf):
560 if nf in dmap and matchfn(nf):
559 results[nf] = None
561 results[nf] = None
560 elif kind == regkind or kind == lnkkind:
562 elif kind == regkind or kind == lnkkind:
561 if nf in dmap:
563 if nf in dmap:
562 if matchfn(nf):
564 if matchfn(nf):
563 results[nf] = st
565 results[nf] = st
564 elif matchfn(nf) and not ignore(nf):
566 elif matchfn(nf) and not ignore(nf):
565 results[nf] = st
567 results[nf] = st
566 elif nf in dmap and matchfn(nf):
568 elif nf in dmap and matchfn(nf):
567 results[nf] = None
569 results[nf] = None
568
570
569 # step 3: report unseen items in the dmap hash
571 # step 3: report unseen items in the dmap hash
570 if not skipstep3 and not exact:
572 if not skipstep3 and not exact:
571 visit = sorted([f for f in dmap if f not in results and matchfn(f)])
573 visit = sorted([f for f in dmap if f not in results and matchfn(f)])
572 for nf, st in zip(visit, util.statfiles([join(i) for i in visit])):
574 for nf, st in zip(visit, util.statfiles([join(i) for i in visit])):
573 if not st is None and not getkind(st.st_mode) in (regkind, lnkkind):
575 if not st is None and not getkind(st.st_mode) in (regkind, lnkkind):
574 st = None
576 st = None
575 results[nf] = st
577 results[nf] = st
576 for s in subrepos:
578 for s in subrepos:
577 del results[s]
579 del results[s]
578 del results['.hg']
580 del results['.hg']
579 return results
581 return results
580
582
581 def status(self, match, subrepos, ignored, clean, unknown):
583 def status(self, match, subrepos, ignored, clean, unknown):
582 '''Determine the status of the working copy relative to the
584 '''Determine the status of the working copy relative to the
583 dirstate and return a tuple of lists (unsure, modified, added,
585 dirstate and return a tuple of lists (unsure, modified, added,
584 removed, deleted, unknown, ignored, clean), where:
586 removed, deleted, unknown, ignored, clean), where:
585
587
586 unsure:
588 unsure:
587 files that might have been modified since the dirstate was
589 files that might have been modified since the dirstate was
588 written, but need to be read to be sure (size is the same
590 written, but need to be read to be sure (size is the same
589 but mtime differs)
591 but mtime differs)
590 modified:
592 modified:
591 files that have definitely been modified since the dirstate
593 files that have definitely been modified since the dirstate
592 was written (different size or mode)
594 was written (different size or mode)
593 added:
595 added:
594 files that have been explicitly added with hg add
596 files that have been explicitly added with hg add
595 removed:
597 removed:
596 files that have been explicitly removed with hg remove
598 files that have been explicitly removed with hg remove
597 deleted:
599 deleted:
598 files that have been deleted through other means ("missing")
600 files that have been deleted through other means ("missing")
599 unknown:
601 unknown:
600 files not in the dirstate that are not ignored
602 files not in the dirstate that are not ignored
601 ignored:
603 ignored:
602 files not in the dirstate that are ignored
604 files not in the dirstate that are ignored
603 (by _dirignore())
605 (by _dirignore())
604 clean:
606 clean:
605 files that have definitely not been modified since the
607 files that have definitely not been modified since the
606 dirstate was written
608 dirstate was written
607 '''
609 '''
608 listignored, listclean, listunknown = ignored, clean, unknown
610 listignored, listclean, listunknown = ignored, clean, unknown
609 lookup, modified, added, unknown, ignored = [], [], [], [], []
611 lookup, modified, added, unknown, ignored = [], [], [], [], []
610 removed, deleted, clean = [], [], []
612 removed, deleted, clean = [], [], []
611
613
612 dmap = self._map
614 dmap = self._map
613 ladd = lookup.append # aka "unsure"
615 ladd = lookup.append # aka "unsure"
614 madd = modified.append
616 madd = modified.append
615 aadd = added.append
617 aadd = added.append
616 uadd = unknown.append
618 uadd = unknown.append
617 iadd = ignored.append
619 iadd = ignored.append
618 radd = removed.append
620 radd = removed.append
619 dadd = deleted.append
621 dadd = deleted.append
620 cadd = clean.append
622 cadd = clean.append
621
623
622 for fn, st in self.walk(match, subrepos, listunknown,
624 for fn, st in self.walk(match, subrepos, listunknown,
623 listignored).iteritems():
625 listignored).iteritems():
624 if fn not in dmap:
626 if fn not in dmap:
625 if (listignored or match.exact(fn)) and self._dirignore(fn):
627 if (listignored or match.exact(fn)) and self._dirignore(fn):
626 if listignored:
628 if listignored:
627 iadd(fn)
629 iadd(fn)
628 elif listunknown:
630 elif listunknown:
629 uadd(fn)
631 uadd(fn)
630 continue
632 continue
631
633
632 state, mode, size, time = dmap[fn]
634 state, mode, size, time = dmap[fn]
633
635
634 if not st and state in "nma":
636 if not st and state in "nma":
635 dadd(fn)
637 dadd(fn)
636 elif state == 'n':
638 elif state == 'n':
637 if (size >= 0 and
639 if (size >= 0 and
638 (size != st.st_size
640 (size != st.st_size
639 or ((mode ^ st.st_mode) & 0100 and self._checkexec))
641 or ((mode ^ st.st_mode) & 0100 and self._checkexec))
640 or size == -2
642 or size == -2
641 or fn in self._copymap):
643 or fn in self._copymap):
642 madd(fn)
644 madd(fn)
643 elif time != int(st.st_mtime):
645 elif time != int(st.st_mtime):
644 ladd(fn)
646 ladd(fn)
645 elif listclean:
647 elif listclean:
646 cadd(fn)
648 cadd(fn)
647 elif state == 'm':
649 elif state == 'm':
648 madd(fn)
650 madd(fn)
649 elif state == 'a':
651 elif state == 'a':
650 aadd(fn)
652 aadd(fn)
651 elif state == 'r':
653 elif state == 'r':
652 radd(fn)
654 radd(fn)
653
655
654 return (lookup, modified, added, removed, deleted, unknown, ignored,
656 return (lookup, modified, added, removed, deleted, unknown, ignored,
655 clean)
657 clean)
@@ -1,90 +1,94
1 #!/bin/sh
1 #!/bin/sh
2
2
3 hg init a
3 hg init a
4 cd a
4 cd a
5 echo 'root' >root
5 echo 'root' >root
6 hg add root
6 hg add root
7 hg commit -d '0 0' -m "Adding root node"
7 hg commit -d '0 0' -m "Adding root node"
8
8
9 echo 'a' >a
9 echo 'a' >a
10 hg add a
10 hg add a
11 hg branch a
11 hg branch a
12 hg commit -d '1 0' -m "Adding a branch"
12 hg commit -d '1 0' -m "Adding a branch"
13
13
14 hg branch q
14 hg branch q
15 echo 'aa' >a
15 echo 'aa' >a
16 hg branch -C
16 hg branch -C
17 hg commit -d '2 0' -m "Adding to a branch"
17 hg commit -d '2 0' -m "Adding to a branch"
18
18
19 hg update -C 0
19 hg update -C 0
20 echo 'b' >b
20 echo 'b' >b
21 hg add b
21 hg add b
22 hg branch b
22 hg branch b
23 hg commit -d '2 0' -m "Adding b branch"
23 hg commit -d '2 0' -m "Adding b branch"
24
24
25 echo 'bh1' >bh1
25 echo 'bh1' >bh1
26 hg add bh1
26 hg add bh1
27 hg commit -d '3 0' -m "Adding b branch head 1"
27 hg commit -d '3 0' -m "Adding b branch head 1"
28
28
29 hg update -C 2
29 hg update -C 2
30 echo 'bh2' >bh2
30 echo 'bh2' >bh2
31 hg add bh2
31 hg add bh2
32 hg commit -d '4 0' -m "Adding b branch head 2"
32 hg commit -d '4 0' -m "Adding b branch head 2"
33
33
34 echo 'c' >c
34 echo 'c' >c
35 hg add c
35 hg add c
36 hg branch c
36 hg branch c
37 hg commit -d '5 0' -m "Adding c branch"
37 hg commit -d '5 0' -m "Adding c branch"
38
38
39 hg branch tip
40 hg branch null
41 hg branch .
42
39 echo 'd' >d
43 echo 'd' >d
40 hg add d
44 hg add d
41 hg branch 'a branch name much longer than the default justification used by branches'
45 hg branch 'a branch name much longer than the default justification used by branches'
42 hg commit -d '6 0' -m "Adding d branch"
46 hg commit -d '6 0' -m "Adding d branch"
43
47
44 hg branches
48 hg branches
45 echo '-------'
49 echo '-------'
46 hg branches -a
50 hg branches -a
47
51
48 echo "--- Branch a"
52 echo "--- Branch a"
49 hg log -b a
53 hg log -b a
50
54
51 echo "---- Branch b"
55 echo "---- Branch b"
52 hg log -b b
56 hg log -b b
53
57
54 echo "---- going to test branch closing"
58 echo "---- going to test branch closing"
55 hg branches
59 hg branches
56 hg up -C b
60 hg up -C b
57 echo 'xxx1' >> b
61 echo 'xxx1' >> b
58 hg commit -d '7 0' -m 'adding cset to branch b'
62 hg commit -d '7 0' -m 'adding cset to branch b'
59 hg up -C aee39cd168d0
63 hg up -C aee39cd168d0
60 echo 'xxx2' >> b
64 echo 'xxx2' >> b
61 hg commit -d '8 0' -m 'adding head to branch b'
65 hg commit -d '8 0' -m 'adding head to branch b'
62 echo 'xxx3' >> b
66 echo 'xxx3' >> b
63 hg commit -d '9 0' -m 'adding another cset to branch b'
67 hg commit -d '9 0' -m 'adding another cset to branch b'
64 hg branches
68 hg branches
65 hg heads --closed
69 hg heads --closed
66 hg heads
70 hg heads
67 hg commit -d '9 0' --close-branch -m 'prune bad branch'
71 hg commit -d '9 0' --close-branch -m 'prune bad branch'
68 hg branches -a
72 hg branches -a
69 hg up -C b
73 hg up -C b
70 hg commit -d '9 0' --close-branch -m 'close this part branch too'
74 hg commit -d '9 0' --close-branch -m 'close this part branch too'
71 echo '--- b branch should be inactive'
75 echo '--- b branch should be inactive'
72 hg branches
76 hg branches
73 hg branches -c
77 hg branches -c
74 hg branches -a
78 hg branches -a
75 hg heads b
79 hg heads b
76 hg heads --closed b
80 hg heads --closed b
77 echo 'xxx4' >> b
81 echo 'xxx4' >> b
78 hg commit -d '9 0' -m 'reopen branch with a change'
82 hg commit -d '9 0' -m 'reopen branch with a change'
79 echo '--- branch b is back in action'
83 echo '--- branch b is back in action'
80 hg branches -a
84 hg branches -a
81 echo '---- test heads listings'
85 echo '---- test heads listings'
82 hg heads
86 hg heads
83 echo '% branch default'
87 echo '% branch default'
84 hg heads default
88 hg heads default
85 echo '% branch a'
89 echo '% branch a'
86 hg heads a
90 hg heads a
87 hg heads --active a
91 hg heads --active a
88 echo '% branch b'
92 echo '% branch b'
89 hg heads b
93 hg heads b
90 hg heads --closed b
94 hg heads --closed b
@@ -1,241 +1,244
1 marked working directory as branch a
1 marked working directory as branch a
2 marked working directory as branch q
2 marked working directory as branch q
3 reset working directory to branch a
3 reset working directory to branch a
4 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
4 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
5 marked working directory as branch b
5 marked working directory as branch b
6 created new head
6 created new head
7 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
7 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
8 marked working directory as branch c
8 marked working directory as branch c
9 abort: the name 'tip' is reserved
10 abort: the name 'null' is reserved
11 abort: the name '.' is reserved
9 marked working directory as branch a branch name much longer than the default justification used by branches
12 marked working directory as branch a branch name much longer than the default justification used by branches
10 a branch name much longer than the default justification used by branches 7:10ff5895aa57
13 a branch name much longer than the default justification used by branches 7:10ff5895aa57
11 b 4:aee39cd168d0
14 b 4:aee39cd168d0
12 c 6:589736a22561 (inactive)
15 c 6:589736a22561 (inactive)
13 a 5:d8cbc61dbaa6 (inactive)
16 a 5:d8cbc61dbaa6 (inactive)
14 default 0:19709c5a4e75 (inactive)
17 default 0:19709c5a4e75 (inactive)
15 -------
18 -------
16 a branch name much longer than the default justification used by branches 7:10ff5895aa57
19 a branch name much longer than the default justification used by branches 7:10ff5895aa57
17 b 4:aee39cd168d0
20 b 4:aee39cd168d0
18 --- Branch a
21 --- Branch a
19 changeset: 5:d8cbc61dbaa6
22 changeset: 5:d8cbc61dbaa6
20 branch: a
23 branch: a
21 parent: 2:881fe2b92ad0
24 parent: 2:881fe2b92ad0
22 user: test
25 user: test
23 date: Thu Jan 01 00:00:04 1970 +0000
26 date: Thu Jan 01 00:00:04 1970 +0000
24 summary: Adding b branch head 2
27 summary: Adding b branch head 2
25
28
26 changeset: 2:881fe2b92ad0
29 changeset: 2:881fe2b92ad0
27 branch: a
30 branch: a
28 user: test
31 user: test
29 date: Thu Jan 01 00:00:02 1970 +0000
32 date: Thu Jan 01 00:00:02 1970 +0000
30 summary: Adding to a branch
33 summary: Adding to a branch
31
34
32 changeset: 1:dd6b440dd85a
35 changeset: 1:dd6b440dd85a
33 branch: a
36 branch: a
34 user: test
37 user: test
35 date: Thu Jan 01 00:00:01 1970 +0000
38 date: Thu Jan 01 00:00:01 1970 +0000
36 summary: Adding a branch
39 summary: Adding a branch
37
40
38 ---- Branch b
41 ---- Branch b
39 changeset: 4:aee39cd168d0
42 changeset: 4:aee39cd168d0
40 branch: b
43 branch: b
41 user: test
44 user: test
42 date: Thu Jan 01 00:00:03 1970 +0000
45 date: Thu Jan 01 00:00:03 1970 +0000
43 summary: Adding b branch head 1
46 summary: Adding b branch head 1
44
47
45 changeset: 3:ac22033332d1
48 changeset: 3:ac22033332d1
46 branch: b
49 branch: b
47 parent: 0:19709c5a4e75
50 parent: 0:19709c5a4e75
48 user: test
51 user: test
49 date: Thu Jan 01 00:00:02 1970 +0000
52 date: Thu Jan 01 00:00:02 1970 +0000
50 summary: Adding b branch
53 summary: Adding b branch
51
54
52 ---- going to test branch closing
55 ---- going to test branch closing
53 a branch name much longer than the default justification used by branches 7:10ff5895aa57
56 a branch name much longer than the default justification used by branches 7:10ff5895aa57
54 b 4:aee39cd168d0
57 b 4:aee39cd168d0
55 c 6:589736a22561 (inactive)
58 c 6:589736a22561 (inactive)
56 a 5:d8cbc61dbaa6 (inactive)
59 a 5:d8cbc61dbaa6 (inactive)
57 default 0:19709c5a4e75 (inactive)
60 default 0:19709c5a4e75 (inactive)
58 2 files updated, 0 files merged, 4 files removed, 0 files unresolved
61 2 files updated, 0 files merged, 4 files removed, 0 files unresolved
59 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
62 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
60 created new head
63 created new head
61 b 10:bfbe841b666e
64 b 10:bfbe841b666e
62 a branch name much longer than the default justification used by branches 7:10ff5895aa57
65 a branch name much longer than the default justification used by branches 7:10ff5895aa57
63 c 6:589736a22561 (inactive)
66 c 6:589736a22561 (inactive)
64 a 5:d8cbc61dbaa6 (inactive)
67 a 5:d8cbc61dbaa6 (inactive)
65 default 0:19709c5a4e75 (inactive)
68 default 0:19709c5a4e75 (inactive)
66 changeset: 10:bfbe841b666e
69 changeset: 10:bfbe841b666e
67 branch: b
70 branch: b
68 tag: tip
71 tag: tip
69 user: test
72 user: test
70 date: Thu Jan 01 00:00:09 1970 +0000
73 date: Thu Jan 01 00:00:09 1970 +0000
71 summary: adding another cset to branch b
74 summary: adding another cset to branch b
72
75
73 changeset: 8:eebb944467c9
76 changeset: 8:eebb944467c9
74 branch: b
77 branch: b
75 parent: 4:aee39cd168d0
78 parent: 4:aee39cd168d0
76 user: test
79 user: test
77 date: Thu Jan 01 00:00:07 1970 +0000
80 date: Thu Jan 01 00:00:07 1970 +0000
78 summary: adding cset to branch b
81 summary: adding cset to branch b
79
82
80 changeset: 7:10ff5895aa57
83 changeset: 7:10ff5895aa57
81 branch: a branch name much longer than the default justification used by branches
84 branch: a branch name much longer than the default justification used by branches
82 user: test
85 user: test
83 date: Thu Jan 01 00:00:06 1970 +0000
86 date: Thu Jan 01 00:00:06 1970 +0000
84 summary: Adding d branch
87 summary: Adding d branch
85
88
86 changeset: 6:589736a22561
89 changeset: 6:589736a22561
87 branch: c
90 branch: c
88 user: test
91 user: test
89 date: Thu Jan 01 00:00:05 1970 +0000
92 date: Thu Jan 01 00:00:05 1970 +0000
90 summary: Adding c branch
93 summary: Adding c branch
91
94
92 changeset: 5:d8cbc61dbaa6
95 changeset: 5:d8cbc61dbaa6
93 branch: a
96 branch: a
94 parent: 2:881fe2b92ad0
97 parent: 2:881fe2b92ad0
95 user: test
98 user: test
96 date: Thu Jan 01 00:00:04 1970 +0000
99 date: Thu Jan 01 00:00:04 1970 +0000
97 summary: Adding b branch head 2
100 summary: Adding b branch head 2
98
101
99 changeset: 0:19709c5a4e75
102 changeset: 0:19709c5a4e75
100 user: test
103 user: test
101 date: Thu Jan 01 00:00:00 1970 +0000
104 date: Thu Jan 01 00:00:00 1970 +0000
102 summary: Adding root node
105 summary: Adding root node
103
106
104 changeset: 10:bfbe841b666e
107 changeset: 10:bfbe841b666e
105 branch: b
108 branch: b
106 tag: tip
109 tag: tip
107 user: test
110 user: test
108 date: Thu Jan 01 00:00:09 1970 +0000
111 date: Thu Jan 01 00:00:09 1970 +0000
109 summary: adding another cset to branch b
112 summary: adding another cset to branch b
110
113
111 changeset: 8:eebb944467c9
114 changeset: 8:eebb944467c9
112 branch: b
115 branch: b
113 parent: 4:aee39cd168d0
116 parent: 4:aee39cd168d0
114 user: test
117 user: test
115 date: Thu Jan 01 00:00:07 1970 +0000
118 date: Thu Jan 01 00:00:07 1970 +0000
116 summary: adding cset to branch b
119 summary: adding cset to branch b
117
120
118 changeset: 7:10ff5895aa57
121 changeset: 7:10ff5895aa57
119 branch: a branch name much longer than the default justification used by branches
122 branch: a branch name much longer than the default justification used by branches
120 user: test
123 user: test
121 date: Thu Jan 01 00:00:06 1970 +0000
124 date: Thu Jan 01 00:00:06 1970 +0000
122 summary: Adding d branch
125 summary: Adding d branch
123
126
124 changeset: 6:589736a22561
127 changeset: 6:589736a22561
125 branch: c
128 branch: c
126 user: test
129 user: test
127 date: Thu Jan 01 00:00:05 1970 +0000
130 date: Thu Jan 01 00:00:05 1970 +0000
128 summary: Adding c branch
131 summary: Adding c branch
129
132
130 changeset: 5:d8cbc61dbaa6
133 changeset: 5:d8cbc61dbaa6
131 branch: a
134 branch: a
132 parent: 2:881fe2b92ad0
135 parent: 2:881fe2b92ad0
133 user: test
136 user: test
134 date: Thu Jan 01 00:00:04 1970 +0000
137 date: Thu Jan 01 00:00:04 1970 +0000
135 summary: Adding b branch head 2
138 summary: Adding b branch head 2
136
139
137 changeset: 0:19709c5a4e75
140 changeset: 0:19709c5a4e75
138 user: test
141 user: test
139 date: Thu Jan 01 00:00:00 1970 +0000
142 date: Thu Jan 01 00:00:00 1970 +0000
140 summary: Adding root node
143 summary: Adding root node
141
144
142 b 8:eebb944467c9
145 b 8:eebb944467c9
143 a branch name much longer than the default justification used by branches 7:10ff5895aa57
146 a branch name much longer than the default justification used by branches 7:10ff5895aa57
144 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
147 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
145 --- b branch should be inactive
148 --- b branch should be inactive
146 a branch name much longer than the default justification used by branches 7:10ff5895aa57
149 a branch name much longer than the default justification used by branches 7:10ff5895aa57
147 c 6:589736a22561 (inactive)
150 c 6:589736a22561 (inactive)
148 a 5:d8cbc61dbaa6 (inactive)
151 a 5:d8cbc61dbaa6 (inactive)
149 default 0:19709c5a4e75 (inactive)
152 default 0:19709c5a4e75 (inactive)
150 a branch name much longer than the default justification used by branches 7:10ff5895aa57
153 a branch name much longer than the default justification used by branches 7:10ff5895aa57
151 b 12:2da6583810df (closed)
154 b 12:2da6583810df (closed)
152 c 6:589736a22561 (inactive)
155 c 6:589736a22561 (inactive)
153 a 5:d8cbc61dbaa6 (inactive)
156 a 5:d8cbc61dbaa6 (inactive)
154 default 0:19709c5a4e75 (inactive)
157 default 0:19709c5a4e75 (inactive)
155 a branch name much longer than the default justification used by branches 7:10ff5895aa57
158 a branch name much longer than the default justification used by branches 7:10ff5895aa57
156 no open branch heads found on branches b
159 no open branch heads found on branches b
157 changeset: 12:2da6583810df
160 changeset: 12:2da6583810df
158 branch: b
161 branch: b
159 tag: tip
162 tag: tip
160 parent: 8:eebb944467c9
163 parent: 8:eebb944467c9
161 user: test
164 user: test
162 date: Thu Jan 01 00:00:09 1970 +0000
165 date: Thu Jan 01 00:00:09 1970 +0000
163 summary: close this part branch too
166 summary: close this part branch too
164
167
165 changeset: 11:c84627f3c15d
168 changeset: 11:c84627f3c15d
166 branch: b
169 branch: b
167 user: test
170 user: test
168 date: Thu Jan 01 00:00:09 1970 +0000
171 date: Thu Jan 01 00:00:09 1970 +0000
169 summary: prune bad branch
172 summary: prune bad branch
170
173
171 --- branch b is back in action
174 --- branch b is back in action
172 b 13:6ac12926b8c3
175 b 13:6ac12926b8c3
173 a branch name much longer than the default justification used by branches 7:10ff5895aa57
176 a branch name much longer than the default justification used by branches 7:10ff5895aa57
174 ---- test heads listings
177 ---- test heads listings
175 changeset: 13:6ac12926b8c3
178 changeset: 13:6ac12926b8c3
176 branch: b
179 branch: b
177 tag: tip
180 tag: tip
178 user: test
181 user: test
179 date: Thu Jan 01 00:00:09 1970 +0000
182 date: Thu Jan 01 00:00:09 1970 +0000
180 summary: reopen branch with a change
183 summary: reopen branch with a change
181
184
182 changeset: 7:10ff5895aa57
185 changeset: 7:10ff5895aa57
183 branch: a branch name much longer than the default justification used by branches
186 branch: a branch name much longer than the default justification used by branches
184 user: test
187 user: test
185 date: Thu Jan 01 00:00:06 1970 +0000
188 date: Thu Jan 01 00:00:06 1970 +0000
186 summary: Adding d branch
189 summary: Adding d branch
187
190
188 changeset: 6:589736a22561
191 changeset: 6:589736a22561
189 branch: c
192 branch: c
190 user: test
193 user: test
191 date: Thu Jan 01 00:00:05 1970 +0000
194 date: Thu Jan 01 00:00:05 1970 +0000
192 summary: Adding c branch
195 summary: Adding c branch
193
196
194 changeset: 5:d8cbc61dbaa6
197 changeset: 5:d8cbc61dbaa6
195 branch: a
198 branch: a
196 parent: 2:881fe2b92ad0
199 parent: 2:881fe2b92ad0
197 user: test
200 user: test
198 date: Thu Jan 01 00:00:04 1970 +0000
201 date: Thu Jan 01 00:00:04 1970 +0000
199 summary: Adding b branch head 2
202 summary: Adding b branch head 2
200
203
201 changeset: 0:19709c5a4e75
204 changeset: 0:19709c5a4e75
202 user: test
205 user: test
203 date: Thu Jan 01 00:00:00 1970 +0000
206 date: Thu Jan 01 00:00:00 1970 +0000
204 summary: Adding root node
207 summary: Adding root node
205
208
206 % branch default
209 % branch default
207 changeset: 0:19709c5a4e75
210 changeset: 0:19709c5a4e75
208 user: test
211 user: test
209 date: Thu Jan 01 00:00:00 1970 +0000
212 date: Thu Jan 01 00:00:00 1970 +0000
210 summary: Adding root node
213 summary: Adding root node
211
214
212 % branch a
215 % branch a
213 changeset: 5:d8cbc61dbaa6
216 changeset: 5:d8cbc61dbaa6
214 branch: a
217 branch: a
215 parent: 2:881fe2b92ad0
218 parent: 2:881fe2b92ad0
216 user: test
219 user: test
217 date: Thu Jan 01 00:00:04 1970 +0000
220 date: Thu Jan 01 00:00:04 1970 +0000
218 summary: Adding b branch head 2
221 summary: Adding b branch head 2
219
222
220 no open branch heads found on branches a
223 no open branch heads found on branches a
221 % branch b
224 % branch b
222 changeset: 13:6ac12926b8c3
225 changeset: 13:6ac12926b8c3
223 branch: b
226 branch: b
224 tag: tip
227 tag: tip
225 user: test
228 user: test
226 date: Thu Jan 01 00:00:09 1970 +0000
229 date: Thu Jan 01 00:00:09 1970 +0000
227 summary: reopen branch with a change
230 summary: reopen branch with a change
228
231
229 changeset: 13:6ac12926b8c3
232 changeset: 13:6ac12926b8c3
230 branch: b
233 branch: b
231 tag: tip
234 tag: tip
232 user: test
235 user: test
233 date: Thu Jan 01 00:00:09 1970 +0000
236 date: Thu Jan 01 00:00:09 1970 +0000
234 summary: reopen branch with a change
237 summary: reopen branch with a change
235
238
236 changeset: 11:c84627f3c15d
239 changeset: 11:c84627f3c15d
237 branch: b
240 branch: b
238 user: test
241 user: test
239 date: Thu Jan 01 00:00:09 1970 +0000
242 date: Thu Jan 01 00:00:09 1970 +0000
240 summary: prune bad branch
243 summary: prune bad branch
241
244
General Comments 0
You need to be logged in to leave comments. Login now