##// END OF EJS Templates
merge with stable
Matt Mackall -
r14268:a55a0045 merge default
parent child Browse files
Show More
@@ -1,216 +1,208 b''
1 # Mercurial bookmark support code
1 # Mercurial bookmark support code
2 #
2 #
3 # Copyright 2008 David Soria Parra <dsp@php.net>
3 # Copyright 2008 David Soria Parra <dsp@php.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from mercurial.i18n import _
8 from mercurial.i18n import _
9 from mercurial.node import hex
9 from mercurial.node import hex
10 from mercurial import encoding, error, util
10 from mercurial import encoding, error, util
11 import errno, os
11 import errno, os
12
12
13 def valid(mark):
13 def valid(mark):
14 for c in (':', '\0', '\n', '\r'):
14 for c in (':', '\0', '\n', '\r'):
15 if c in mark:
15 if c in mark:
16 return False
16 return False
17 return True
17 return True
18
18
19 def read(repo):
19 def read(repo):
20 '''Parse .hg/bookmarks file and return a dictionary
20 '''Parse .hg/bookmarks file and return a dictionary
21
21
22 Bookmarks are stored as {HASH}\\s{NAME}\\n (localtags format) values
22 Bookmarks are stored as {HASH}\\s{NAME}\\n (localtags format) values
23 in the .hg/bookmarks file.
23 in the .hg/bookmarks file.
24 Read the file and return a (name=>nodeid) dictionary
24 Read the file and return a (name=>nodeid) dictionary
25 '''
25 '''
26 bookmarks = {}
26 bookmarks = {}
27 try:
27 try:
28 for line in repo.opener('bookmarks'):
28 for line in repo.opener('bookmarks'):
29 sha, refspec = line.strip().split(' ', 1)
29 sha, refspec = line.strip().split(' ', 1)
30 refspec = encoding.tolocal(refspec)
30 refspec = encoding.tolocal(refspec)
31 try:
31 try:
32 bookmarks[refspec] = repo.changelog.lookup(sha)
32 bookmarks[refspec] = repo.changelog.lookup(sha)
33 except error.RepoLookupError:
33 except error.RepoLookupError:
34 pass
34 pass
35 except IOError, inst:
35 except IOError, inst:
36 if inst.errno != errno.ENOENT:
36 if inst.errno != errno.ENOENT:
37 raise
37 raise
38 return bookmarks
38 return bookmarks
39
39
40 def readcurrent(repo):
40 def readcurrent(repo):
41 '''Get the current bookmark
41 '''Get the current bookmark
42
42
43 If we use gittishsh branches we have a current bookmark that
43 If we use gittishsh branches we have a current bookmark that
44 we are on. This function returns the name of the bookmark. It
44 we are on. This function returns the name of the bookmark. It
45 is stored in .hg/bookmarks.current
45 is stored in .hg/bookmarks.current
46 '''
46 '''
47 mark = None
47 mark = None
48 try:
48 try:
49 file = repo.opener('bookmarks.current')
49 file = repo.opener('bookmarks.current')
50 except IOError, inst:
50 except IOError, inst:
51 if inst.errno != errno.ENOENT:
51 if inst.errno != errno.ENOENT:
52 raise
52 raise
53 return None
53 return None
54 try:
54 try:
55 # No readline() in posixfile_nt, reading everything is cheap
55 # No readline() in posixfile_nt, reading everything is cheap
56 mark = encoding.tolocal((file.readlines() or [''])[0])
56 mark = encoding.tolocal((file.readlines() or [''])[0])
57 if mark == '' or mark not in repo._bookmarks:
57 if mark == '' or mark not in repo._bookmarks:
58 mark = None
58 mark = None
59 finally:
59 finally:
60 file.close()
60 file.close()
61 return mark
61 return mark
62
62
63 def write(repo):
63 def write(repo):
64 '''Write bookmarks
64 '''Write bookmarks
65
65
66 Write the given bookmark => hash dictionary to the .hg/bookmarks file
66 Write the given bookmark => hash dictionary to the .hg/bookmarks file
67 in a format equal to those of localtags.
67 in a format equal to those of localtags.
68
68
69 We also store a backup of the previous state in undo.bookmarks that
69 We also store a backup of the previous state in undo.bookmarks that
70 can be copied back on rollback.
70 can be copied back on rollback.
71 '''
71 '''
72 refs = repo._bookmarks
72 refs = repo._bookmarks
73
73
74 try:
75 bms = repo.opener.read('bookmarks')
76 except IOError, inst:
77 if inst.errno != errno.ENOENT:
78 raise
79 bms = ''
80 repo.opener.write('undo.bookmarks', bms)
81
82 if repo._bookmarkcurrent not in refs:
74 if repo._bookmarkcurrent not in refs:
83 setcurrent(repo, None)
75 setcurrent(repo, None)
84 for mark in refs.keys():
76 for mark in refs.keys():
85 if not valid(mark):
77 if not valid(mark):
86 raise util.Abort(_("bookmark '%s' contains illegal "
78 raise util.Abort(_("bookmark '%s' contains illegal "
87 "character" % mark))
79 "character" % mark))
88
80
89 wlock = repo.wlock()
81 wlock = repo.wlock()
90 try:
82 try:
91
83
92 file = repo.opener('bookmarks', 'w', atomictemp=True)
84 file = repo.opener('bookmarks', 'w', atomictemp=True)
93 for refspec, node in refs.iteritems():
85 for refspec, node in refs.iteritems():
94 file.write("%s %s\n" % (hex(node), encoding.fromlocal(refspec)))
86 file.write("%s %s\n" % (hex(node), encoding.fromlocal(refspec)))
95 file.rename()
87 file.rename()
96
88
97 # touch 00changelog.i so hgweb reloads bookmarks (no lock needed)
89 # touch 00changelog.i so hgweb reloads bookmarks (no lock needed)
98 try:
90 try:
99 os.utime(repo.sjoin('00changelog.i'), None)
91 os.utime(repo.sjoin('00changelog.i'), None)
100 except OSError:
92 except OSError:
101 pass
93 pass
102
94
103 finally:
95 finally:
104 wlock.release()
96 wlock.release()
105
97
106 def setcurrent(repo, mark):
98 def setcurrent(repo, mark):
107 '''Set the name of the bookmark that we are currently on
99 '''Set the name of the bookmark that we are currently on
108
100
109 Set the name of the bookmark that we are on (hg update <bookmark>).
101 Set the name of the bookmark that we are on (hg update <bookmark>).
110 The name is recorded in .hg/bookmarks.current
102 The name is recorded in .hg/bookmarks.current
111 '''
103 '''
112 current = repo._bookmarkcurrent
104 current = repo._bookmarkcurrent
113 if current == mark:
105 if current == mark:
114 return
106 return
115
107
116 if mark not in repo._bookmarks:
108 if mark not in repo._bookmarks:
117 mark = ''
109 mark = ''
118 if not valid(mark):
110 if not valid(mark):
119 raise util.Abort(_("bookmark '%s' contains illegal "
111 raise util.Abort(_("bookmark '%s' contains illegal "
120 "character" % mark))
112 "character" % mark))
121
113
122 wlock = repo.wlock()
114 wlock = repo.wlock()
123 try:
115 try:
124 file = repo.opener('bookmarks.current', 'w', atomictemp=True)
116 file = repo.opener('bookmarks.current', 'w', atomictemp=True)
125 file.write(mark)
117 file.write(mark)
126 file.rename()
118 file.rename()
127 finally:
119 finally:
128 wlock.release()
120 wlock.release()
129 repo._bookmarkcurrent = mark
121 repo._bookmarkcurrent = mark
130
122
131 def updatecurrentbookmark(repo, oldnode, curbranch):
123 def updatecurrentbookmark(repo, oldnode, curbranch):
132 try:
124 try:
133 update(repo, oldnode, repo.branchtags()[curbranch])
125 update(repo, oldnode, repo.branchtags()[curbranch])
134 except KeyError:
126 except KeyError:
135 if curbranch == "default": # no default branch!
127 if curbranch == "default": # no default branch!
136 update(repo, oldnode, repo.lookup("tip"))
128 update(repo, oldnode, repo.lookup("tip"))
137 else:
129 else:
138 raise util.Abort(_("branch %s not found") % curbranch)
130 raise util.Abort(_("branch %s not found") % curbranch)
139
131
140 def update(repo, parents, node):
132 def update(repo, parents, node):
141 marks = repo._bookmarks
133 marks = repo._bookmarks
142 update = False
134 update = False
143 mark = repo._bookmarkcurrent
135 mark = repo._bookmarkcurrent
144 if mark and marks[mark] in parents:
136 if mark and marks[mark] in parents:
145 old = repo[marks[mark]]
137 old = repo[marks[mark]]
146 new = repo[node]
138 new = repo[node]
147 if new in old.descendants():
139 if new in old.descendants():
148 marks[mark] = new.node()
140 marks[mark] = new.node()
149 update = True
141 update = True
150 if update:
142 if update:
151 write(repo)
143 write(repo)
152
144
153 def listbookmarks(repo):
145 def listbookmarks(repo):
154 # We may try to list bookmarks on a repo type that does not
146 # We may try to list bookmarks on a repo type that does not
155 # support it (e.g., statichttprepository).
147 # support it (e.g., statichttprepository).
156 if not hasattr(repo, '_bookmarks'):
148 if not hasattr(repo, '_bookmarks'):
157 return {}
149 return {}
158
150
159 d = {}
151 d = {}
160 for k, v in repo._bookmarks.iteritems():
152 for k, v in repo._bookmarks.iteritems():
161 d[k] = hex(v)
153 d[k] = hex(v)
162 return d
154 return d
163
155
164 def pushbookmark(repo, key, old, new):
156 def pushbookmark(repo, key, old, new):
165 w = repo.wlock()
157 w = repo.wlock()
166 try:
158 try:
167 marks = repo._bookmarks
159 marks = repo._bookmarks
168 if hex(marks.get(key, '')) != old:
160 if hex(marks.get(key, '')) != old:
169 return False
161 return False
170 if new == '':
162 if new == '':
171 del marks[key]
163 del marks[key]
172 else:
164 else:
173 if new not in repo:
165 if new not in repo:
174 return False
166 return False
175 marks[key] = repo[new].node()
167 marks[key] = repo[new].node()
176 write(repo)
168 write(repo)
177 return True
169 return True
178 finally:
170 finally:
179 w.release()
171 w.release()
180
172
181 def updatefromremote(ui, repo, remote):
173 def updatefromremote(ui, repo, remote):
182 ui.debug("checking for updated bookmarks\n")
174 ui.debug("checking for updated bookmarks\n")
183 rb = remote.listkeys('bookmarks')
175 rb = remote.listkeys('bookmarks')
184 changed = False
176 changed = False
185 for k in rb.keys():
177 for k in rb.keys():
186 if k in repo._bookmarks:
178 if k in repo._bookmarks:
187 nr, nl = rb[k], repo._bookmarks[k]
179 nr, nl = rb[k], repo._bookmarks[k]
188 if nr in repo:
180 if nr in repo:
189 cr = repo[nr]
181 cr = repo[nr]
190 cl = repo[nl]
182 cl = repo[nl]
191 if cl.rev() >= cr.rev():
183 if cl.rev() >= cr.rev():
192 continue
184 continue
193 if cr in cl.descendants():
185 if cr in cl.descendants():
194 repo._bookmarks[k] = cr.node()
186 repo._bookmarks[k] = cr.node()
195 changed = True
187 changed = True
196 ui.status(_("updating bookmark %s\n") % k)
188 ui.status(_("updating bookmark %s\n") % k)
197 else:
189 else:
198 ui.warn(_("not updating divergent"
190 ui.warn(_("not updating divergent"
199 " bookmark %s\n") % k)
191 " bookmark %s\n") % k)
200 if changed:
192 if changed:
201 write(repo)
193 write(repo)
202
194
203 def diff(ui, repo, remote):
195 def diff(ui, repo, remote):
204 ui.status(_("searching for changed bookmarks\n"))
196 ui.status(_("searching for changed bookmarks\n"))
205
197
206 lmarks = repo.listkeys('bookmarks')
198 lmarks = repo.listkeys('bookmarks')
207 rmarks = remote.listkeys('bookmarks')
199 rmarks = remote.listkeys('bookmarks')
208
200
209 diff = sorted(set(rmarks) - set(lmarks))
201 diff = sorted(set(rmarks) - set(lmarks))
210 for k in diff:
202 for k in diff:
211 ui.write(" %-25s %s\n" % (k, rmarks[k][:12]))
203 ui.write(" %-25s %s\n" % (k, rmarks[k][:12]))
212
204
213 if len(diff) <= 0:
205 if len(diff) <= 0:
214 ui.status(_("no changed bookmarks found\n"))
206 ui.status(_("no changed bookmarks found\n"))
215 return 1
207 return 1
216 return 0
208 return 0
@@ -1,1957 +1,1972 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import bin, hex, nullid, nullrev, short
8 from node import bin, hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import repo, changegroup, subrepo, discovery, pushkey
10 import repo, changegroup, subrepo, discovery, pushkey
11 import changelog, dirstate, filelog, manifest, context, bookmarks
11 import changelog, dirstate, filelog, manifest, context, bookmarks
12 import lock, transaction, store, encoding
12 import lock, transaction, store, encoding
13 import scmutil, util, extensions, hook, error
13 import scmutil, util, extensions, hook, error
14 import match as matchmod
14 import match as matchmod
15 import merge as mergemod
15 import merge as mergemod
16 import tags as tagsmod
16 import tags as tagsmod
17 from lock import release
17 from lock import release
18 import weakref, errno, os, time, inspect
18 import weakref, errno, os, time, inspect
19 propertycache = util.propertycache
19 propertycache = util.propertycache
20
20
21 class localrepository(repo.repository):
21 class localrepository(repo.repository):
22 capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey',
22 capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey',
23 'known', 'getbundle'))
23 'known', 'getbundle'))
24 supportedformats = set(('revlogv1',))
24 supportedformats = set(('revlogv1',))
25 supported = supportedformats | set(('store', 'fncache', 'shared',
25 supported = supportedformats | set(('store', 'fncache', 'shared',
26 'dotencode'))
26 'dotencode'))
27
27
28 def __init__(self, baseui, path=None, create=0):
28 def __init__(self, baseui, path=None, create=0):
29 repo.repository.__init__(self)
29 repo.repository.__init__(self)
30 self.root = os.path.realpath(util.expandpath(path))
30 self.root = os.path.realpath(util.expandpath(path))
31 self.path = os.path.join(self.root, ".hg")
31 self.path = os.path.join(self.root, ".hg")
32 self.origroot = path
32 self.origroot = path
33 self.auditor = scmutil.pathauditor(self.root, self._checknested)
33 self.auditor = scmutil.pathauditor(self.root, self._checknested)
34 self.opener = scmutil.opener(self.path)
34 self.opener = scmutil.opener(self.path)
35 self.wopener = scmutil.opener(self.root)
35 self.wopener = scmutil.opener(self.root)
36 self.baseui = baseui
36 self.baseui = baseui
37 self.ui = baseui.copy()
37 self.ui = baseui.copy()
38
38
39 try:
39 try:
40 self.ui.readconfig(self.join("hgrc"), self.root)
40 self.ui.readconfig(self.join("hgrc"), self.root)
41 extensions.loadall(self.ui)
41 extensions.loadall(self.ui)
42 except IOError:
42 except IOError:
43 pass
43 pass
44
44
45 if not os.path.isdir(self.path):
45 if not os.path.isdir(self.path):
46 if create:
46 if create:
47 if not os.path.exists(path):
47 if not os.path.exists(path):
48 util.makedirs(path)
48 util.makedirs(path)
49 util.makedir(self.path, notindexed=True)
49 util.makedir(self.path, notindexed=True)
50 requirements = ["revlogv1"]
50 requirements = ["revlogv1"]
51 if self.ui.configbool('format', 'usestore', True):
51 if self.ui.configbool('format', 'usestore', True):
52 os.mkdir(os.path.join(self.path, "store"))
52 os.mkdir(os.path.join(self.path, "store"))
53 requirements.append("store")
53 requirements.append("store")
54 if self.ui.configbool('format', 'usefncache', True):
54 if self.ui.configbool('format', 'usefncache', True):
55 requirements.append("fncache")
55 requirements.append("fncache")
56 if self.ui.configbool('format', 'dotencode', True):
56 if self.ui.configbool('format', 'dotencode', True):
57 requirements.append('dotencode')
57 requirements.append('dotencode')
58 # create an invalid changelog
58 # create an invalid changelog
59 self.opener.append(
59 self.opener.append(
60 "00changelog.i",
60 "00changelog.i",
61 '\0\0\0\2' # represents revlogv2
61 '\0\0\0\2' # represents revlogv2
62 ' dummy changelog to prevent using the old repo layout'
62 ' dummy changelog to prevent using the old repo layout'
63 )
63 )
64 else:
64 else:
65 raise error.RepoError(_("repository %s not found") % path)
65 raise error.RepoError(_("repository %s not found") % path)
66 elif create:
66 elif create:
67 raise error.RepoError(_("repository %s already exists") % path)
67 raise error.RepoError(_("repository %s already exists") % path)
68 else:
68 else:
69 # find requirements
69 # find requirements
70 requirements = set()
70 requirements = set()
71 try:
71 try:
72 requirements = set(self.opener.read("requires").splitlines())
72 requirements = set(self.opener.read("requires").splitlines())
73 except IOError, inst:
73 except IOError, inst:
74 if inst.errno != errno.ENOENT:
74 if inst.errno != errno.ENOENT:
75 raise
75 raise
76 for r in requirements - self.supported:
76 for r in requirements - self.supported:
77 raise error.RequirementError(
77 raise error.RequirementError(
78 _("requirement '%s' not supported") % r)
78 _("requirement '%s' not supported") % r)
79
79
80 self.sharedpath = self.path
80 self.sharedpath = self.path
81 try:
81 try:
82 s = os.path.realpath(self.opener.read("sharedpath"))
82 s = os.path.realpath(self.opener.read("sharedpath"))
83 if not os.path.exists(s):
83 if not os.path.exists(s):
84 raise error.RepoError(
84 raise error.RepoError(
85 _('.hg/sharedpath points to nonexistent directory %s') % s)
85 _('.hg/sharedpath points to nonexistent directory %s') % s)
86 self.sharedpath = s
86 self.sharedpath = s
87 except IOError, inst:
87 except IOError, inst:
88 if inst.errno != errno.ENOENT:
88 if inst.errno != errno.ENOENT:
89 raise
89 raise
90
90
91 self.store = store.store(requirements, self.sharedpath, scmutil.opener)
91 self.store = store.store(requirements, self.sharedpath, scmutil.opener)
92 self.spath = self.store.path
92 self.spath = self.store.path
93 self.sopener = self.store.opener
93 self.sopener = self.store.opener
94 self.sjoin = self.store.join
94 self.sjoin = self.store.join
95 self.opener.createmode = self.store.createmode
95 self.opener.createmode = self.store.createmode
96 self._applyrequirements(requirements)
96 self._applyrequirements(requirements)
97 if create:
97 if create:
98 self._writerequirements()
98 self._writerequirements()
99
99
100 # These two define the set of tags for this repository. _tags
100 # These two define the set of tags for this repository. _tags
101 # maps tag name to node; _tagtypes maps tag name to 'global' or
101 # maps tag name to node; _tagtypes maps tag name to 'global' or
102 # 'local'. (Global tags are defined by .hgtags across all
102 # 'local'. (Global tags are defined by .hgtags across all
103 # heads, and local tags are defined in .hg/localtags.) They
103 # heads, and local tags are defined in .hg/localtags.) They
104 # constitute the in-memory cache of tags.
104 # constitute the in-memory cache of tags.
105 self._tags = None
105 self._tags = None
106 self._tagtypes = None
106 self._tagtypes = None
107
107
108 self._branchcache = None
108 self._branchcache = None
109 self._branchcachetip = None
109 self._branchcachetip = None
110 self.nodetagscache = None
110 self.nodetagscache = None
111 self.filterpats = {}
111 self.filterpats = {}
112 self._datafilters = {}
112 self._datafilters = {}
113 self._transref = self._lockref = self._wlockref = None
113 self._transref = self._lockref = self._wlockref = None
114
114
115 def _applyrequirements(self, requirements):
115 def _applyrequirements(self, requirements):
116 self.requirements = requirements
116 self.requirements = requirements
117 self.sopener.options = {}
117 self.sopener.options = {}
118
118
119 def _writerequirements(self):
119 def _writerequirements(self):
120 reqfile = self.opener("requires", "w")
120 reqfile = self.opener("requires", "w")
121 for r in self.requirements:
121 for r in self.requirements:
122 reqfile.write("%s\n" % r)
122 reqfile.write("%s\n" % r)
123 reqfile.close()
123 reqfile.close()
124
124
125 def _checknested(self, path):
125 def _checknested(self, path):
126 """Determine if path is a legal nested repository."""
126 """Determine if path is a legal nested repository."""
127 if not path.startswith(self.root):
127 if not path.startswith(self.root):
128 return False
128 return False
129 subpath = path[len(self.root) + 1:]
129 subpath = path[len(self.root) + 1:]
130
130
131 # XXX: Checking against the current working copy is wrong in
131 # XXX: Checking against the current working copy is wrong in
132 # the sense that it can reject things like
132 # the sense that it can reject things like
133 #
133 #
134 # $ hg cat -r 10 sub/x.txt
134 # $ hg cat -r 10 sub/x.txt
135 #
135 #
136 # if sub/ is no longer a subrepository in the working copy
136 # if sub/ is no longer a subrepository in the working copy
137 # parent revision.
137 # parent revision.
138 #
138 #
139 # However, it can of course also allow things that would have
139 # However, it can of course also allow things that would have
140 # been rejected before, such as the above cat command if sub/
140 # been rejected before, such as the above cat command if sub/
141 # is a subrepository now, but was a normal directory before.
141 # is a subrepository now, but was a normal directory before.
142 # The old path auditor would have rejected by mistake since it
142 # The old path auditor would have rejected by mistake since it
143 # panics when it sees sub/.hg/.
143 # panics when it sees sub/.hg/.
144 #
144 #
145 # All in all, checking against the working copy seems sensible
145 # All in all, checking against the working copy seems sensible
146 # since we want to prevent access to nested repositories on
146 # since we want to prevent access to nested repositories on
147 # the filesystem *now*.
147 # the filesystem *now*.
148 ctx = self[None]
148 ctx = self[None]
149 parts = util.splitpath(subpath)
149 parts = util.splitpath(subpath)
150 while parts:
150 while parts:
151 prefix = os.sep.join(parts)
151 prefix = os.sep.join(parts)
152 if prefix in ctx.substate:
152 if prefix in ctx.substate:
153 if prefix == subpath:
153 if prefix == subpath:
154 return True
154 return True
155 else:
155 else:
156 sub = ctx.sub(prefix)
156 sub = ctx.sub(prefix)
157 return sub.checknested(subpath[len(prefix) + 1:])
157 return sub.checknested(subpath[len(prefix) + 1:])
158 else:
158 else:
159 parts.pop()
159 parts.pop()
160 return False
160 return False
161
161
162 @util.propertycache
162 @util.propertycache
163 def _bookmarks(self):
163 def _bookmarks(self):
164 return bookmarks.read(self)
164 return bookmarks.read(self)
165
165
166 @util.propertycache
166 @util.propertycache
167 def _bookmarkcurrent(self):
167 def _bookmarkcurrent(self):
168 return bookmarks.readcurrent(self)
168 return bookmarks.readcurrent(self)
169
169
170 @propertycache
170 @propertycache
171 def changelog(self):
171 def changelog(self):
172 c = changelog.changelog(self.sopener)
172 c = changelog.changelog(self.sopener)
173 if 'HG_PENDING' in os.environ:
173 if 'HG_PENDING' in os.environ:
174 p = os.environ['HG_PENDING']
174 p = os.environ['HG_PENDING']
175 if p.startswith(self.root):
175 if p.startswith(self.root):
176 c.readpending('00changelog.i.a')
176 c.readpending('00changelog.i.a')
177 self.sopener.options['defversion'] = c.version
177 self.sopener.options['defversion'] = c.version
178 return c
178 return c
179
179
180 @propertycache
180 @propertycache
181 def manifest(self):
181 def manifest(self):
182 return manifest.manifest(self.sopener)
182 return manifest.manifest(self.sopener)
183
183
184 @propertycache
184 @propertycache
185 def dirstate(self):
185 def dirstate(self):
186 warned = [0]
186 warned = [0]
187 def validate(node):
187 def validate(node):
188 try:
188 try:
189 self.changelog.rev(node)
189 self.changelog.rev(node)
190 return node
190 return node
191 except error.LookupError:
191 except error.LookupError:
192 if not warned[0]:
192 if not warned[0]:
193 warned[0] = True
193 warned[0] = True
194 self.ui.warn(_("warning: ignoring unknown"
194 self.ui.warn(_("warning: ignoring unknown"
195 " working parent %s!\n") % short(node))
195 " working parent %s!\n") % short(node))
196 return nullid
196 return nullid
197
197
198 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
198 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
199
199
200 def __getitem__(self, changeid):
200 def __getitem__(self, changeid):
201 if changeid is None:
201 if changeid is None:
202 return context.workingctx(self)
202 return context.workingctx(self)
203 return context.changectx(self, changeid)
203 return context.changectx(self, changeid)
204
204
205 def __contains__(self, changeid):
205 def __contains__(self, changeid):
206 try:
206 try:
207 return bool(self.lookup(changeid))
207 return bool(self.lookup(changeid))
208 except error.RepoLookupError:
208 except error.RepoLookupError:
209 return False
209 return False
210
210
211 def __nonzero__(self):
211 def __nonzero__(self):
212 return True
212 return True
213
213
214 def __len__(self):
214 def __len__(self):
215 return len(self.changelog)
215 return len(self.changelog)
216
216
217 def __iter__(self):
217 def __iter__(self):
218 for i in xrange(len(self)):
218 for i in xrange(len(self)):
219 yield i
219 yield i
220
220
221 def url(self):
221 def url(self):
222 return 'file:' + self.root
222 return 'file:' + self.root
223
223
224 def hook(self, name, throw=False, **args):
224 def hook(self, name, throw=False, **args):
225 return hook.hook(self.ui, self, name, throw, **args)
225 return hook.hook(self.ui, self, name, throw, **args)
226
226
227 tag_disallowed = ':\r\n'
227 tag_disallowed = ':\r\n'
228
228
229 def _tag(self, names, node, message, local, user, date, extra={}):
229 def _tag(self, names, node, message, local, user, date, extra={}):
230 if isinstance(names, str):
230 if isinstance(names, str):
231 allchars = names
231 allchars = names
232 names = (names,)
232 names = (names,)
233 else:
233 else:
234 allchars = ''.join(names)
234 allchars = ''.join(names)
235 for c in self.tag_disallowed:
235 for c in self.tag_disallowed:
236 if c in allchars:
236 if c in allchars:
237 raise util.Abort(_('%r cannot be used in a tag name') % c)
237 raise util.Abort(_('%r cannot be used in a tag name') % c)
238
238
239 branches = self.branchmap()
239 branches = self.branchmap()
240 for name in names:
240 for name in names:
241 self.hook('pretag', throw=True, node=hex(node), tag=name,
241 self.hook('pretag', throw=True, node=hex(node), tag=name,
242 local=local)
242 local=local)
243 if name in branches:
243 if name in branches:
244 self.ui.warn(_("warning: tag %s conflicts with existing"
244 self.ui.warn(_("warning: tag %s conflicts with existing"
245 " branch name\n") % name)
245 " branch name\n") % name)
246
246
247 def writetags(fp, names, munge, prevtags):
247 def writetags(fp, names, munge, prevtags):
248 fp.seek(0, 2)
248 fp.seek(0, 2)
249 if prevtags and prevtags[-1] != '\n':
249 if prevtags and prevtags[-1] != '\n':
250 fp.write('\n')
250 fp.write('\n')
251 for name in names:
251 for name in names:
252 m = munge and munge(name) or name
252 m = munge and munge(name) or name
253 if self._tagtypes and name in self._tagtypes:
253 if self._tagtypes and name in self._tagtypes:
254 old = self._tags.get(name, nullid)
254 old = self._tags.get(name, nullid)
255 fp.write('%s %s\n' % (hex(old), m))
255 fp.write('%s %s\n' % (hex(old), m))
256 fp.write('%s %s\n' % (hex(node), m))
256 fp.write('%s %s\n' % (hex(node), m))
257 fp.close()
257 fp.close()
258
258
259 prevtags = ''
259 prevtags = ''
260 if local:
260 if local:
261 try:
261 try:
262 fp = self.opener('localtags', 'r+')
262 fp = self.opener('localtags', 'r+')
263 except IOError:
263 except IOError:
264 fp = self.opener('localtags', 'a')
264 fp = self.opener('localtags', 'a')
265 else:
265 else:
266 prevtags = fp.read()
266 prevtags = fp.read()
267
267
268 # local tags are stored in the current charset
268 # local tags are stored in the current charset
269 writetags(fp, names, None, prevtags)
269 writetags(fp, names, None, prevtags)
270 for name in names:
270 for name in names:
271 self.hook('tag', node=hex(node), tag=name, local=local)
271 self.hook('tag', node=hex(node), tag=name, local=local)
272 return
272 return
273
273
274 try:
274 try:
275 fp = self.wfile('.hgtags', 'rb+')
275 fp = self.wfile('.hgtags', 'rb+')
276 except IOError:
276 except IOError:
277 fp = self.wfile('.hgtags', 'ab')
277 fp = self.wfile('.hgtags', 'ab')
278 else:
278 else:
279 prevtags = fp.read()
279 prevtags = fp.read()
280
280
281 # committed tags are stored in UTF-8
281 # committed tags are stored in UTF-8
282 writetags(fp, names, encoding.fromlocal, prevtags)
282 writetags(fp, names, encoding.fromlocal, prevtags)
283
283
284 fp.close()
284 fp.close()
285
285
286 if '.hgtags' not in self.dirstate:
286 if '.hgtags' not in self.dirstate:
287 self[None].add(['.hgtags'])
287 self[None].add(['.hgtags'])
288
288
289 m = matchmod.exact(self.root, '', ['.hgtags'])
289 m = matchmod.exact(self.root, '', ['.hgtags'])
290 tagnode = self.commit(message, user, date, extra=extra, match=m)
290 tagnode = self.commit(message, user, date, extra=extra, match=m)
291
291
292 for name in names:
292 for name in names:
293 self.hook('tag', node=hex(node), tag=name, local=local)
293 self.hook('tag', node=hex(node), tag=name, local=local)
294
294
295 return tagnode
295 return tagnode
296
296
297 def tag(self, names, node, message, local, user, date):
297 def tag(self, names, node, message, local, user, date):
298 '''tag a revision with one or more symbolic names.
298 '''tag a revision with one or more symbolic names.
299
299
300 names is a list of strings or, when adding a single tag, names may be a
300 names is a list of strings or, when adding a single tag, names may be a
301 string.
301 string.
302
302
303 if local is True, the tags are stored in a per-repository file.
303 if local is True, the tags are stored in a per-repository file.
304 otherwise, they are stored in the .hgtags file, and a new
304 otherwise, they are stored in the .hgtags file, and a new
305 changeset is committed with the change.
305 changeset is committed with the change.
306
306
307 keyword arguments:
307 keyword arguments:
308
308
309 local: whether to store tags in non-version-controlled file
309 local: whether to store tags in non-version-controlled file
310 (default False)
310 (default False)
311
311
312 message: commit message to use if committing
312 message: commit message to use if committing
313
313
314 user: name of user to use if committing
314 user: name of user to use if committing
315
315
316 date: date tuple to use if committing'''
316 date: date tuple to use if committing'''
317
317
318 if not local:
318 if not local:
319 for x in self.status()[:5]:
319 for x in self.status()[:5]:
320 if '.hgtags' in x:
320 if '.hgtags' in x:
321 raise util.Abort(_('working copy of .hgtags is changed '
321 raise util.Abort(_('working copy of .hgtags is changed '
322 '(please commit .hgtags manually)'))
322 '(please commit .hgtags manually)'))
323
323
324 self.tags() # instantiate the cache
324 self.tags() # instantiate the cache
325 self._tag(names, node, message, local, user, date)
325 self._tag(names, node, message, local, user, date)
326
326
327 def tags(self):
327 def tags(self):
328 '''return a mapping of tag to node'''
328 '''return a mapping of tag to node'''
329 if self._tags is None:
329 if self._tags is None:
330 (self._tags, self._tagtypes) = self._findtags()
330 (self._tags, self._tagtypes) = self._findtags()
331
331
332 return self._tags
332 return self._tags
333
333
334 def _findtags(self):
334 def _findtags(self):
335 '''Do the hard work of finding tags. Return a pair of dicts
335 '''Do the hard work of finding tags. Return a pair of dicts
336 (tags, tagtypes) where tags maps tag name to node, and tagtypes
336 (tags, tagtypes) where tags maps tag name to node, and tagtypes
337 maps tag name to a string like \'global\' or \'local\'.
337 maps tag name to a string like \'global\' or \'local\'.
338 Subclasses or extensions are free to add their own tags, but
338 Subclasses or extensions are free to add their own tags, but
339 should be aware that the returned dicts will be retained for the
339 should be aware that the returned dicts will be retained for the
340 duration of the localrepo object.'''
340 duration of the localrepo object.'''
341
341
342 # XXX what tagtype should subclasses/extensions use? Currently
342 # XXX what tagtype should subclasses/extensions use? Currently
343 # mq and bookmarks add tags, but do not set the tagtype at all.
343 # mq and bookmarks add tags, but do not set the tagtype at all.
344 # Should each extension invent its own tag type? Should there
344 # Should each extension invent its own tag type? Should there
345 # be one tagtype for all such "virtual" tags? Or is the status
345 # be one tagtype for all such "virtual" tags? Or is the status
346 # quo fine?
346 # quo fine?
347
347
348 alltags = {} # map tag name to (node, hist)
348 alltags = {} # map tag name to (node, hist)
349 tagtypes = {}
349 tagtypes = {}
350
350
351 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
351 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
352 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
352 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
353
353
354 # Build the return dicts. Have to re-encode tag names because
354 # Build the return dicts. Have to re-encode tag names because
355 # the tags module always uses UTF-8 (in order not to lose info
355 # the tags module always uses UTF-8 (in order not to lose info
356 # writing to the cache), but the rest of Mercurial wants them in
356 # writing to the cache), but the rest of Mercurial wants them in
357 # local encoding.
357 # local encoding.
358 tags = {}
358 tags = {}
359 for (name, (node, hist)) in alltags.iteritems():
359 for (name, (node, hist)) in alltags.iteritems():
360 if node != nullid:
360 if node != nullid:
361 try:
361 try:
362 # ignore tags to unknown nodes
362 # ignore tags to unknown nodes
363 self.changelog.lookup(node)
363 self.changelog.lookup(node)
364 tags[encoding.tolocal(name)] = node
364 tags[encoding.tolocal(name)] = node
365 except error.LookupError:
365 except error.LookupError:
366 pass
366 pass
367 tags['tip'] = self.changelog.tip()
367 tags['tip'] = self.changelog.tip()
368 tagtypes = dict([(encoding.tolocal(name), value)
368 tagtypes = dict([(encoding.tolocal(name), value)
369 for (name, value) in tagtypes.iteritems()])
369 for (name, value) in tagtypes.iteritems()])
370 return (tags, tagtypes)
370 return (tags, tagtypes)
371
371
372 def tagtype(self, tagname):
372 def tagtype(self, tagname):
373 '''
373 '''
374 return the type of the given tag. result can be:
374 return the type of the given tag. result can be:
375
375
376 'local' : a local tag
376 'local' : a local tag
377 'global' : a global tag
377 'global' : a global tag
378 None : tag does not exist
378 None : tag does not exist
379 '''
379 '''
380
380
381 self.tags()
381 self.tags()
382
382
383 return self._tagtypes.get(tagname)
383 return self._tagtypes.get(tagname)
384
384
385 def tagslist(self):
385 def tagslist(self):
386 '''return a list of tags ordered by revision'''
386 '''return a list of tags ordered by revision'''
387 l = []
387 l = []
388 for t, n in self.tags().iteritems():
388 for t, n in self.tags().iteritems():
389 r = self.changelog.rev(n)
389 r = self.changelog.rev(n)
390 l.append((r, t, n))
390 l.append((r, t, n))
391 return [(t, n) for r, t, n in sorted(l)]
391 return [(t, n) for r, t, n in sorted(l)]
392
392
393 def nodetags(self, node):
393 def nodetags(self, node):
394 '''return the tags associated with a node'''
394 '''return the tags associated with a node'''
395 if not self.nodetagscache:
395 if not self.nodetagscache:
396 self.nodetagscache = {}
396 self.nodetagscache = {}
397 for t, n in self.tags().iteritems():
397 for t, n in self.tags().iteritems():
398 self.nodetagscache.setdefault(n, []).append(t)
398 self.nodetagscache.setdefault(n, []).append(t)
399 for tags in self.nodetagscache.itervalues():
399 for tags in self.nodetagscache.itervalues():
400 tags.sort()
400 tags.sort()
401 return self.nodetagscache.get(node, [])
401 return self.nodetagscache.get(node, [])
402
402
403 def nodebookmarks(self, node):
403 def nodebookmarks(self, node):
404 marks = []
404 marks = []
405 for bookmark, n in self._bookmarks.iteritems():
405 for bookmark, n in self._bookmarks.iteritems():
406 if n == node:
406 if n == node:
407 marks.append(bookmark)
407 marks.append(bookmark)
408 return sorted(marks)
408 return sorted(marks)
409
409
410 def _branchtags(self, partial, lrev):
410 def _branchtags(self, partial, lrev):
411 # TODO: rename this function?
411 # TODO: rename this function?
412 tiprev = len(self) - 1
412 tiprev = len(self) - 1
413 if lrev != tiprev:
413 if lrev != tiprev:
414 ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
414 ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
415 self._updatebranchcache(partial, ctxgen)
415 self._updatebranchcache(partial, ctxgen)
416 self._writebranchcache(partial, self.changelog.tip(), tiprev)
416 self._writebranchcache(partial, self.changelog.tip(), tiprev)
417
417
418 return partial
418 return partial
419
419
420 def updatebranchcache(self):
420 def updatebranchcache(self):
421 tip = self.changelog.tip()
421 tip = self.changelog.tip()
422 if self._branchcache is not None and self._branchcachetip == tip:
422 if self._branchcache is not None and self._branchcachetip == tip:
423 return self._branchcache
423 return self._branchcache
424
424
425 oldtip = self._branchcachetip
425 oldtip = self._branchcachetip
426 self._branchcachetip = tip
426 self._branchcachetip = tip
427 if oldtip is None or oldtip not in self.changelog.nodemap:
427 if oldtip is None or oldtip not in self.changelog.nodemap:
428 partial, last, lrev = self._readbranchcache()
428 partial, last, lrev = self._readbranchcache()
429 else:
429 else:
430 lrev = self.changelog.rev(oldtip)
430 lrev = self.changelog.rev(oldtip)
431 partial = self._branchcache
431 partial = self._branchcache
432
432
433 self._branchtags(partial, lrev)
433 self._branchtags(partial, lrev)
434 # this private cache holds all heads (not just tips)
434 # this private cache holds all heads (not just tips)
435 self._branchcache = partial
435 self._branchcache = partial
436
436
437 def branchmap(self):
437 def branchmap(self):
438 '''returns a dictionary {branch: [branchheads]}'''
438 '''returns a dictionary {branch: [branchheads]}'''
439 self.updatebranchcache()
439 self.updatebranchcache()
440 return self._branchcache
440 return self._branchcache
441
441
442 def branchtags(self):
442 def branchtags(self):
443 '''return a dict where branch names map to the tipmost head of
443 '''return a dict where branch names map to the tipmost head of
444 the branch, open heads come before closed'''
444 the branch, open heads come before closed'''
445 bt = {}
445 bt = {}
446 for bn, heads in self.branchmap().iteritems():
446 for bn, heads in self.branchmap().iteritems():
447 tip = heads[-1]
447 tip = heads[-1]
448 for h in reversed(heads):
448 for h in reversed(heads):
449 if 'close' not in self.changelog.read(h)[5]:
449 if 'close' not in self.changelog.read(h)[5]:
450 tip = h
450 tip = h
451 break
451 break
452 bt[bn] = tip
452 bt[bn] = tip
453 return bt
453 return bt
454
454
455 def _readbranchcache(self):
455 def _readbranchcache(self):
456 partial = {}
456 partial = {}
457 try:
457 try:
458 f = self.opener("cache/branchheads")
458 f = self.opener("cache/branchheads")
459 lines = f.read().split('\n')
459 lines = f.read().split('\n')
460 f.close()
460 f.close()
461 except (IOError, OSError):
461 except (IOError, OSError):
462 return {}, nullid, nullrev
462 return {}, nullid, nullrev
463
463
464 try:
464 try:
465 last, lrev = lines.pop(0).split(" ", 1)
465 last, lrev = lines.pop(0).split(" ", 1)
466 last, lrev = bin(last), int(lrev)
466 last, lrev = bin(last), int(lrev)
467 if lrev >= len(self) or self[lrev].node() != last:
467 if lrev >= len(self) or self[lrev].node() != last:
468 # invalidate the cache
468 # invalidate the cache
469 raise ValueError('invalidating branch cache (tip differs)')
469 raise ValueError('invalidating branch cache (tip differs)')
470 for l in lines:
470 for l in lines:
471 if not l:
471 if not l:
472 continue
472 continue
473 node, label = l.split(" ", 1)
473 node, label = l.split(" ", 1)
474 label = encoding.tolocal(label.strip())
474 label = encoding.tolocal(label.strip())
475 partial.setdefault(label, []).append(bin(node))
475 partial.setdefault(label, []).append(bin(node))
476 except KeyboardInterrupt:
476 except KeyboardInterrupt:
477 raise
477 raise
478 except Exception, inst:
478 except Exception, inst:
479 if self.ui.debugflag:
479 if self.ui.debugflag:
480 self.ui.warn(str(inst), '\n')
480 self.ui.warn(str(inst), '\n')
481 partial, last, lrev = {}, nullid, nullrev
481 partial, last, lrev = {}, nullid, nullrev
482 return partial, last, lrev
482 return partial, last, lrev
483
483
484 def _writebranchcache(self, branches, tip, tiprev):
484 def _writebranchcache(self, branches, tip, tiprev):
485 try:
485 try:
486 f = self.opener("cache/branchheads", "w", atomictemp=True)
486 f = self.opener("cache/branchheads", "w", atomictemp=True)
487 f.write("%s %s\n" % (hex(tip), tiprev))
487 f.write("%s %s\n" % (hex(tip), tiprev))
488 for label, nodes in branches.iteritems():
488 for label, nodes in branches.iteritems():
489 for node in nodes:
489 for node in nodes:
490 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
490 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
491 f.rename()
491 f.rename()
492 except (IOError, OSError):
492 except (IOError, OSError):
493 pass
493 pass
494
494
495 def _updatebranchcache(self, partial, ctxgen):
495 def _updatebranchcache(self, partial, ctxgen):
496 # collect new branch entries
496 # collect new branch entries
497 newbranches = {}
497 newbranches = {}
498 for c in ctxgen:
498 for c in ctxgen:
499 newbranches.setdefault(c.branch(), []).append(c.node())
499 newbranches.setdefault(c.branch(), []).append(c.node())
500 # if older branchheads are reachable from new ones, they aren't
500 # if older branchheads are reachable from new ones, they aren't
501 # really branchheads. Note checking parents is insufficient:
501 # really branchheads. Note checking parents is insufficient:
502 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
502 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
503 for branch, newnodes in newbranches.iteritems():
503 for branch, newnodes in newbranches.iteritems():
504 bheads = partial.setdefault(branch, [])
504 bheads = partial.setdefault(branch, [])
505 bheads.extend(newnodes)
505 bheads.extend(newnodes)
506 if len(bheads) <= 1:
506 if len(bheads) <= 1:
507 continue
507 continue
508 bheads = sorted(bheads, key=lambda x: self[x].rev())
508 bheads = sorted(bheads, key=lambda x: self[x].rev())
509 # starting from tip means fewer passes over reachable
509 # starting from tip means fewer passes over reachable
510 while newnodes:
510 while newnodes:
511 latest = newnodes.pop()
511 latest = newnodes.pop()
512 if latest not in bheads:
512 if latest not in bheads:
513 continue
513 continue
514 minbhrev = self[bheads[0]].node()
514 minbhrev = self[bheads[0]].node()
515 reachable = self.changelog.reachable(latest, minbhrev)
515 reachable = self.changelog.reachable(latest, minbhrev)
516 reachable.remove(latest)
516 reachable.remove(latest)
517 if reachable:
517 if reachable:
518 bheads = [b for b in bheads if b not in reachable]
518 bheads = [b for b in bheads if b not in reachable]
519 partial[branch] = bheads
519 partial[branch] = bheads
520
520
521 def lookup(self, key):
521 def lookup(self, key):
522 if isinstance(key, int):
522 if isinstance(key, int):
523 return self.changelog.node(key)
523 return self.changelog.node(key)
524 elif key == '.':
524 elif key == '.':
525 return self.dirstate.p1()
525 return self.dirstate.p1()
526 elif key == 'null':
526 elif key == 'null':
527 return nullid
527 return nullid
528 elif key == 'tip':
528 elif key == 'tip':
529 return self.changelog.tip()
529 return self.changelog.tip()
530 n = self.changelog._match(key)
530 n = self.changelog._match(key)
531 if n:
531 if n:
532 return n
532 return n
533 if key in self._bookmarks:
533 if key in self._bookmarks:
534 return self._bookmarks[key]
534 return self._bookmarks[key]
535 if key in self.tags():
535 if key in self.tags():
536 return self.tags()[key]
536 return self.tags()[key]
537 if key in self.branchtags():
537 if key in self.branchtags():
538 return self.branchtags()[key]
538 return self.branchtags()[key]
539 n = self.changelog._partialmatch(key)
539 n = self.changelog._partialmatch(key)
540 if n:
540 if n:
541 return n
541 return n
542
542
543 # can't find key, check if it might have come from damaged dirstate
543 # can't find key, check if it might have come from damaged dirstate
544 if key in self.dirstate.parents():
544 if key in self.dirstate.parents():
545 raise error.Abort(_("working directory has unknown parent '%s'!")
545 raise error.Abort(_("working directory has unknown parent '%s'!")
546 % short(key))
546 % short(key))
547 try:
547 try:
548 if len(key) == 20:
548 if len(key) == 20:
549 key = hex(key)
549 key = hex(key)
550 except TypeError:
550 except TypeError:
551 pass
551 pass
552 raise error.RepoLookupError(_("unknown revision '%s'") % key)
552 raise error.RepoLookupError(_("unknown revision '%s'") % key)
553
553
554 def lookupbranch(self, key, remote=None):
554 def lookupbranch(self, key, remote=None):
555 repo = remote or self
555 repo = remote or self
556 if key in repo.branchmap():
556 if key in repo.branchmap():
557 return key
557 return key
558
558
559 repo = (remote and remote.local()) and remote or self
559 repo = (remote and remote.local()) and remote or self
560 return repo[key].branch()
560 return repo[key].branch()
561
561
562 def known(self, nodes):
562 def known(self, nodes):
563 nm = self.changelog.nodemap
563 nm = self.changelog.nodemap
564 return [(n in nm) for n in nodes]
564 return [(n in nm) for n in nodes]
565
565
566 def local(self):
566 def local(self):
567 return True
567 return True
568
568
569 def join(self, f):
569 def join(self, f):
570 return os.path.join(self.path, f)
570 return os.path.join(self.path, f)
571
571
572 def wjoin(self, f):
572 def wjoin(self, f):
573 return os.path.join(self.root, f)
573 return os.path.join(self.root, f)
574
574
575 def file(self, f):
575 def file(self, f):
576 if f[0] == '/':
576 if f[0] == '/':
577 f = f[1:]
577 f = f[1:]
578 return filelog.filelog(self.sopener, f)
578 return filelog.filelog(self.sopener, f)
579
579
580 def changectx(self, changeid):
580 def changectx(self, changeid):
581 return self[changeid]
581 return self[changeid]
582
582
583 def parents(self, changeid=None):
583 def parents(self, changeid=None):
584 '''get list of changectxs for parents of changeid'''
584 '''get list of changectxs for parents of changeid'''
585 return self[changeid].parents()
585 return self[changeid].parents()
586
586
587 def filectx(self, path, changeid=None, fileid=None):
587 def filectx(self, path, changeid=None, fileid=None):
588 """changeid can be a changeset revision, node, or tag.
588 """changeid can be a changeset revision, node, or tag.
589 fileid can be a file revision or node."""
589 fileid can be a file revision or node."""
590 return context.filectx(self, path, changeid, fileid)
590 return context.filectx(self, path, changeid, fileid)
591
591
592 def getcwd(self):
592 def getcwd(self):
593 return self.dirstate.getcwd()
593 return self.dirstate.getcwd()
594
594
595 def pathto(self, f, cwd=None):
595 def pathto(self, f, cwd=None):
596 return self.dirstate.pathto(f, cwd)
596 return self.dirstate.pathto(f, cwd)
597
597
598 def wfile(self, f, mode='r'):
598 def wfile(self, f, mode='r'):
599 return self.wopener(f, mode)
599 return self.wopener(f, mode)
600
600
601 def _link(self, f):
601 def _link(self, f):
602 return os.path.islink(self.wjoin(f))
602 return os.path.islink(self.wjoin(f))
603
603
604 def _loadfilter(self, filter):
604 def _loadfilter(self, filter):
605 if filter not in self.filterpats:
605 if filter not in self.filterpats:
606 l = []
606 l = []
607 for pat, cmd in self.ui.configitems(filter):
607 for pat, cmd in self.ui.configitems(filter):
608 if cmd == '!':
608 if cmd == '!':
609 continue
609 continue
610 mf = matchmod.match(self.root, '', [pat])
610 mf = matchmod.match(self.root, '', [pat])
611 fn = None
611 fn = None
612 params = cmd
612 params = cmd
613 for name, filterfn in self._datafilters.iteritems():
613 for name, filterfn in self._datafilters.iteritems():
614 if cmd.startswith(name):
614 if cmd.startswith(name):
615 fn = filterfn
615 fn = filterfn
616 params = cmd[len(name):].lstrip()
616 params = cmd[len(name):].lstrip()
617 break
617 break
618 if not fn:
618 if not fn:
619 fn = lambda s, c, **kwargs: util.filter(s, c)
619 fn = lambda s, c, **kwargs: util.filter(s, c)
620 # Wrap old filters not supporting keyword arguments
620 # Wrap old filters not supporting keyword arguments
621 if not inspect.getargspec(fn)[2]:
621 if not inspect.getargspec(fn)[2]:
622 oldfn = fn
622 oldfn = fn
623 fn = lambda s, c, **kwargs: oldfn(s, c)
623 fn = lambda s, c, **kwargs: oldfn(s, c)
624 l.append((mf, fn, params))
624 l.append((mf, fn, params))
625 self.filterpats[filter] = l
625 self.filterpats[filter] = l
626 return self.filterpats[filter]
626 return self.filterpats[filter]
627
627
628 def _filter(self, filterpats, filename, data):
628 def _filter(self, filterpats, filename, data):
629 for mf, fn, cmd in filterpats:
629 for mf, fn, cmd in filterpats:
630 if mf(filename):
630 if mf(filename):
631 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
631 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
632 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
632 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
633 break
633 break
634
634
635 return data
635 return data
636
636
637 @propertycache
637 @propertycache
638 def _encodefilterpats(self):
638 def _encodefilterpats(self):
639 return self._loadfilter('encode')
639 return self._loadfilter('encode')
640
640
641 @propertycache
641 @propertycache
642 def _decodefilterpats(self):
642 def _decodefilterpats(self):
643 return self._loadfilter('decode')
643 return self._loadfilter('decode')
644
644
645 def adddatafilter(self, name, filter):
645 def adddatafilter(self, name, filter):
646 self._datafilters[name] = filter
646 self._datafilters[name] = filter
647
647
648 def wread(self, filename):
648 def wread(self, filename):
649 if self._link(filename):
649 if self._link(filename):
650 data = os.readlink(self.wjoin(filename))
650 data = os.readlink(self.wjoin(filename))
651 else:
651 else:
652 data = self.wopener.read(filename)
652 data = self.wopener.read(filename)
653 return self._filter(self._encodefilterpats, filename, data)
653 return self._filter(self._encodefilterpats, filename, data)
654
654
655 def wwrite(self, filename, data, flags):
655 def wwrite(self, filename, data, flags):
656 data = self._filter(self._decodefilterpats, filename, data)
656 data = self._filter(self._decodefilterpats, filename, data)
657 if 'l' in flags:
657 if 'l' in flags:
658 self.wopener.symlink(data, filename)
658 self.wopener.symlink(data, filename)
659 else:
659 else:
660 self.wopener.write(filename, data)
660 self.wopener.write(filename, data)
661 if 'x' in flags:
661 if 'x' in flags:
662 util.setflags(self.wjoin(filename), False, True)
662 util.setflags(self.wjoin(filename), False, True)
663
663
664 def wwritedata(self, filename, data):
664 def wwritedata(self, filename, data):
665 return self._filter(self._decodefilterpats, filename, data)
665 return self._filter(self._decodefilterpats, filename, data)
666
666
667 def transaction(self, desc):
667 def transaction(self, desc):
668 tr = self._transref and self._transref() or None
668 tr = self._transref and self._transref() or None
669 if tr and tr.running():
669 if tr and tr.running():
670 return tr.nest()
670 return tr.nest()
671
671
672 # abort here if the journal already exists
672 # abort here if the journal already exists
673 if os.path.exists(self.sjoin("journal")):
673 if os.path.exists(self.sjoin("journal")):
674 raise error.RepoError(
674 raise error.RepoError(
675 _("abandoned transaction found - run hg recover"))
675 _("abandoned transaction found - run hg recover"))
676
676
677 journalfiles = self._writejournal(desc)
678 renames = [(x, undoname(x)) for x in journalfiles]
679
680 tr = transaction.transaction(self.ui.warn, self.sopener,
681 self.sjoin("journal"),
682 aftertrans(renames),
683 self.store.createmode)
684 self._transref = weakref.ref(tr)
685 return tr
686
687 def _writejournal(self, desc):
677 # save dirstate for rollback
688 # save dirstate for rollback
678 try:
689 try:
679 ds = self.opener.read("dirstate")
690 ds = self.opener.read("dirstate")
680 except IOError:
691 except IOError:
681 ds = ""
692 ds = ""
682 self.opener.write("journal.dirstate", ds)
693 self.opener.write("journal.dirstate", ds)
683 self.opener.write("journal.branch",
694 self.opener.write("journal.branch",
684 encoding.fromlocal(self.dirstate.branch()))
695 encoding.fromlocal(self.dirstate.branch()))
685 self.opener.write("journal.desc",
696 self.opener.write("journal.desc",
686 "%d\n%s\n" % (len(self), desc))
697 "%d\n%s\n" % (len(self), desc))
687
698
688 renames = [(self.sjoin("journal"), self.sjoin("undo")),
699 bkname = self.join('bookmarks')
689 (self.join("journal.dirstate"), self.join("undo.dirstate")),
700 if os.path.exists(bkname):
690 (self.join("journal.branch"), self.join("undo.branch")),
701 util.copyfile(bkname, self.join('journal.bookmarks'))
691 (self.join("journal.desc"), self.join("undo.desc"))]
702 else:
692 tr = transaction.transaction(self.ui.warn, self.sopener,
703 self.opener('journal.bookmarks', 'w').write('')
693 self.sjoin("journal"),
704
694 aftertrans(renames),
705 return (self.sjoin('journal'), self.join('journal.dirstate'),
695 self.store.createmode)
706 self.join('journal.branch'), self.join('journal.desc'),
696 self._transref = weakref.ref(tr)
707 self.join('journal.bookmarks'))
697 return tr
698
708
699 def recover(self):
709 def recover(self):
700 lock = self.lock()
710 lock = self.lock()
701 try:
711 try:
702 if os.path.exists(self.sjoin("journal")):
712 if os.path.exists(self.sjoin("journal")):
703 self.ui.status(_("rolling back interrupted transaction\n"))
713 self.ui.status(_("rolling back interrupted transaction\n"))
704 transaction.rollback(self.sopener, self.sjoin("journal"),
714 transaction.rollback(self.sopener, self.sjoin("journal"),
705 self.ui.warn)
715 self.ui.warn)
706 self.invalidate()
716 self.invalidate()
707 return True
717 return True
708 else:
718 else:
709 self.ui.warn(_("no interrupted transaction available\n"))
719 self.ui.warn(_("no interrupted transaction available\n"))
710 return False
720 return False
711 finally:
721 finally:
712 lock.release()
722 lock.release()
713
723
714 def rollback(self, dryrun=False):
724 def rollback(self, dryrun=False):
715 wlock = lock = None
725 wlock = lock = None
716 try:
726 try:
717 wlock = self.wlock()
727 wlock = self.wlock()
718 lock = self.lock()
728 lock = self.lock()
719 if os.path.exists(self.sjoin("undo")):
729 if os.path.exists(self.sjoin("undo")):
720 try:
730 try:
721 args = self.opener.read("undo.desc").splitlines()
731 args = self.opener.read("undo.desc").splitlines()
722 if len(args) >= 3 and self.ui.verbose:
732 if len(args) >= 3 and self.ui.verbose:
723 desc = _("repository tip rolled back to revision %s"
733 desc = _("repository tip rolled back to revision %s"
724 " (undo %s: %s)\n") % (
734 " (undo %s: %s)\n") % (
725 int(args[0]) - 1, args[1], args[2])
735 int(args[0]) - 1, args[1], args[2])
726 elif len(args) >= 2:
736 elif len(args) >= 2:
727 desc = _("repository tip rolled back to revision %s"
737 desc = _("repository tip rolled back to revision %s"
728 " (undo %s)\n") % (
738 " (undo %s)\n") % (
729 int(args[0]) - 1, args[1])
739 int(args[0]) - 1, args[1])
730 except IOError:
740 except IOError:
731 desc = _("rolling back unknown transaction\n")
741 desc = _("rolling back unknown transaction\n")
732 self.ui.status(desc)
742 self.ui.status(desc)
733 if dryrun:
743 if dryrun:
734 return
744 return
735 transaction.rollback(self.sopener, self.sjoin("undo"),
745 transaction.rollback(self.sopener, self.sjoin("undo"),
736 self.ui.warn)
746 self.ui.warn)
737 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
747 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
738 if os.path.exists(self.join('undo.bookmarks')):
748 if os.path.exists(self.join('undo.bookmarks')):
739 util.rename(self.join('undo.bookmarks'),
749 util.rename(self.join('undo.bookmarks'),
740 self.join('bookmarks'))
750 self.join('bookmarks'))
741 try:
751 try:
742 branch = self.opener.read("undo.branch")
752 branch = self.opener.read("undo.branch")
743 self.dirstate.setbranch(branch)
753 self.dirstate.setbranch(branch)
744 except IOError:
754 except IOError:
745 self.ui.warn(_("named branch could not be reset, "
755 self.ui.warn(_("named branch could not be reset, "
746 "current branch is still: %s\n")
756 "current branch is still: %s\n")
747 % self.dirstate.branch())
757 % self.dirstate.branch())
748 self.invalidate()
758 self.invalidate()
749 self.dirstate.invalidate()
759 self.dirstate.invalidate()
750 self.destroyed()
760 self.destroyed()
751 parents = tuple([p.rev() for p in self.parents()])
761 parents = tuple([p.rev() for p in self.parents()])
752 if len(parents) > 1:
762 if len(parents) > 1:
753 self.ui.status(_("working directory now based on "
763 self.ui.status(_("working directory now based on "
754 "revisions %d and %d\n") % parents)
764 "revisions %d and %d\n") % parents)
755 else:
765 else:
756 self.ui.status(_("working directory now based on "
766 self.ui.status(_("working directory now based on "
757 "revision %d\n") % parents)
767 "revision %d\n") % parents)
758 else:
768 else:
759 self.ui.warn(_("no rollback information available\n"))
769 self.ui.warn(_("no rollback information available\n"))
760 return 1
770 return 1
761 finally:
771 finally:
762 release(lock, wlock)
772 release(lock, wlock)
763
773
764 def invalidatecaches(self):
774 def invalidatecaches(self):
765 self._tags = None
775 self._tags = None
766 self._tagtypes = None
776 self._tagtypes = None
767 self.nodetagscache = None
777 self.nodetagscache = None
768 self._branchcache = None # in UTF-8
778 self._branchcache = None # in UTF-8
769 self._branchcachetip = None
779 self._branchcachetip = None
770
780
771 def invalidate(self):
781 def invalidate(self):
772 for a in ("changelog", "manifest", "_bookmarks", "_bookmarkcurrent"):
782 for a in ("changelog", "manifest", "_bookmarks", "_bookmarkcurrent"):
773 if a in self.__dict__:
783 if a in self.__dict__:
774 delattr(self, a)
784 delattr(self, a)
775 self.invalidatecaches()
785 self.invalidatecaches()
776
786
777 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
787 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
778 try:
788 try:
779 l = lock.lock(lockname, 0, releasefn, desc=desc)
789 l = lock.lock(lockname, 0, releasefn, desc=desc)
780 except error.LockHeld, inst:
790 except error.LockHeld, inst:
781 if not wait:
791 if not wait:
782 raise
792 raise
783 self.ui.warn(_("waiting for lock on %s held by %r\n") %
793 self.ui.warn(_("waiting for lock on %s held by %r\n") %
784 (desc, inst.locker))
794 (desc, inst.locker))
785 # default to 600 seconds timeout
795 # default to 600 seconds timeout
786 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
796 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
787 releasefn, desc=desc)
797 releasefn, desc=desc)
788 if acquirefn:
798 if acquirefn:
789 acquirefn()
799 acquirefn()
790 return l
800 return l
791
801
792 def lock(self, wait=True):
802 def lock(self, wait=True):
793 '''Lock the repository store (.hg/store) and return a weak reference
803 '''Lock the repository store (.hg/store) and return a weak reference
794 to the lock. Use this before modifying the store (e.g. committing or
804 to the lock. Use this before modifying the store (e.g. committing or
795 stripping). If you are opening a transaction, get a lock as well.)'''
805 stripping). If you are opening a transaction, get a lock as well.)'''
796 l = self._lockref and self._lockref()
806 l = self._lockref and self._lockref()
797 if l is not None and l.held:
807 if l is not None and l.held:
798 l.lock()
808 l.lock()
799 return l
809 return l
800
810
801 l = self._lock(self.sjoin("lock"), wait, self.store.write,
811 l = self._lock(self.sjoin("lock"), wait, self.store.write,
802 self.invalidate, _('repository %s') % self.origroot)
812 self.invalidate, _('repository %s') % self.origroot)
803 self._lockref = weakref.ref(l)
813 self._lockref = weakref.ref(l)
804 return l
814 return l
805
815
806 def wlock(self, wait=True):
816 def wlock(self, wait=True):
807 '''Lock the non-store parts of the repository (everything under
817 '''Lock the non-store parts of the repository (everything under
808 .hg except .hg/store) and return a weak reference to the lock.
818 .hg except .hg/store) and return a weak reference to the lock.
809 Use this before modifying files in .hg.'''
819 Use this before modifying files in .hg.'''
810 l = self._wlockref and self._wlockref()
820 l = self._wlockref and self._wlockref()
811 if l is not None and l.held:
821 if l is not None and l.held:
812 l.lock()
822 l.lock()
813 return l
823 return l
814
824
815 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
825 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
816 self.dirstate.invalidate, _('working directory of %s') %
826 self.dirstate.invalidate, _('working directory of %s') %
817 self.origroot)
827 self.origroot)
818 self._wlockref = weakref.ref(l)
828 self._wlockref = weakref.ref(l)
819 return l
829 return l
820
830
821 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
831 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
822 """
832 """
823 commit an individual file as part of a larger transaction
833 commit an individual file as part of a larger transaction
824 """
834 """
825
835
826 fname = fctx.path()
836 fname = fctx.path()
827 text = fctx.data()
837 text = fctx.data()
828 flog = self.file(fname)
838 flog = self.file(fname)
829 fparent1 = manifest1.get(fname, nullid)
839 fparent1 = manifest1.get(fname, nullid)
830 fparent2 = fparent2o = manifest2.get(fname, nullid)
840 fparent2 = fparent2o = manifest2.get(fname, nullid)
831
841
832 meta = {}
842 meta = {}
833 copy = fctx.renamed()
843 copy = fctx.renamed()
834 if copy and copy[0] != fname:
844 if copy and copy[0] != fname:
835 # Mark the new revision of this file as a copy of another
845 # Mark the new revision of this file as a copy of another
836 # file. This copy data will effectively act as a parent
846 # file. This copy data will effectively act as a parent
837 # of this new revision. If this is a merge, the first
847 # of this new revision. If this is a merge, the first
838 # parent will be the nullid (meaning "look up the copy data")
848 # parent will be the nullid (meaning "look up the copy data")
839 # and the second one will be the other parent. For example:
849 # and the second one will be the other parent. For example:
840 #
850 #
841 # 0 --- 1 --- 3 rev1 changes file foo
851 # 0 --- 1 --- 3 rev1 changes file foo
842 # \ / rev2 renames foo to bar and changes it
852 # \ / rev2 renames foo to bar and changes it
843 # \- 2 -/ rev3 should have bar with all changes and
853 # \- 2 -/ rev3 should have bar with all changes and
844 # should record that bar descends from
854 # should record that bar descends from
845 # bar in rev2 and foo in rev1
855 # bar in rev2 and foo in rev1
846 #
856 #
847 # this allows this merge to succeed:
857 # this allows this merge to succeed:
848 #
858 #
849 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
859 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
850 # \ / merging rev3 and rev4 should use bar@rev2
860 # \ / merging rev3 and rev4 should use bar@rev2
851 # \- 2 --- 4 as the merge base
861 # \- 2 --- 4 as the merge base
852 #
862 #
853
863
854 cfname = copy[0]
864 cfname = copy[0]
855 crev = manifest1.get(cfname)
865 crev = manifest1.get(cfname)
856 newfparent = fparent2
866 newfparent = fparent2
857
867
858 if manifest2: # branch merge
868 if manifest2: # branch merge
859 if fparent2 == nullid or crev is None: # copied on remote side
869 if fparent2 == nullid or crev is None: # copied on remote side
860 if cfname in manifest2:
870 if cfname in manifest2:
861 crev = manifest2[cfname]
871 crev = manifest2[cfname]
862 newfparent = fparent1
872 newfparent = fparent1
863
873
864 # find source in nearest ancestor if we've lost track
874 # find source in nearest ancestor if we've lost track
865 if not crev:
875 if not crev:
866 self.ui.debug(" %s: searching for copy revision for %s\n" %
876 self.ui.debug(" %s: searching for copy revision for %s\n" %
867 (fname, cfname))
877 (fname, cfname))
868 for ancestor in self[None].ancestors():
878 for ancestor in self[None].ancestors():
869 if cfname in ancestor:
879 if cfname in ancestor:
870 crev = ancestor[cfname].filenode()
880 crev = ancestor[cfname].filenode()
871 break
881 break
872
882
873 if crev:
883 if crev:
874 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
884 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
875 meta["copy"] = cfname
885 meta["copy"] = cfname
876 meta["copyrev"] = hex(crev)
886 meta["copyrev"] = hex(crev)
877 fparent1, fparent2 = nullid, newfparent
887 fparent1, fparent2 = nullid, newfparent
878 else:
888 else:
879 self.ui.warn(_("warning: can't find ancestor for '%s' "
889 self.ui.warn(_("warning: can't find ancestor for '%s' "
880 "copied from '%s'!\n") % (fname, cfname))
890 "copied from '%s'!\n") % (fname, cfname))
881
891
882 elif fparent2 != nullid:
892 elif fparent2 != nullid:
883 # is one parent an ancestor of the other?
893 # is one parent an ancestor of the other?
884 fparentancestor = flog.ancestor(fparent1, fparent2)
894 fparentancestor = flog.ancestor(fparent1, fparent2)
885 if fparentancestor == fparent1:
895 if fparentancestor == fparent1:
886 fparent1, fparent2 = fparent2, nullid
896 fparent1, fparent2 = fparent2, nullid
887 elif fparentancestor == fparent2:
897 elif fparentancestor == fparent2:
888 fparent2 = nullid
898 fparent2 = nullid
889
899
890 # is the file changed?
900 # is the file changed?
891 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
901 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
892 changelist.append(fname)
902 changelist.append(fname)
893 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
903 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
894
904
895 # are just the flags changed during merge?
905 # are just the flags changed during merge?
896 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
906 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
897 changelist.append(fname)
907 changelist.append(fname)
898
908
899 return fparent1
909 return fparent1
900
910
901 def commit(self, text="", user=None, date=None, match=None, force=False,
911 def commit(self, text="", user=None, date=None, match=None, force=False,
902 editor=False, extra={}):
912 editor=False, extra={}):
903 """Add a new revision to current repository.
913 """Add a new revision to current repository.
904
914
905 Revision information is gathered from the working directory,
915 Revision information is gathered from the working directory,
906 match can be used to filter the committed files. If editor is
916 match can be used to filter the committed files. If editor is
907 supplied, it is called to get a commit message.
917 supplied, it is called to get a commit message.
908 """
918 """
909
919
910 def fail(f, msg):
920 def fail(f, msg):
911 raise util.Abort('%s: %s' % (f, msg))
921 raise util.Abort('%s: %s' % (f, msg))
912
922
913 if not match:
923 if not match:
914 match = matchmod.always(self.root, '')
924 match = matchmod.always(self.root, '')
915
925
916 if not force:
926 if not force:
917 vdirs = []
927 vdirs = []
918 match.dir = vdirs.append
928 match.dir = vdirs.append
919 match.bad = fail
929 match.bad = fail
920
930
921 wlock = self.wlock()
931 wlock = self.wlock()
922 try:
932 try:
923 wctx = self[None]
933 wctx = self[None]
924 merge = len(wctx.parents()) > 1
934 merge = len(wctx.parents()) > 1
925
935
926 if (not force and merge and match and
936 if (not force and merge and match and
927 (match.files() or match.anypats())):
937 (match.files() or match.anypats())):
928 raise util.Abort(_('cannot partially commit a merge '
938 raise util.Abort(_('cannot partially commit a merge '
929 '(do not specify files or patterns)'))
939 '(do not specify files or patterns)'))
930
940
931 changes = self.status(match=match, clean=force)
941 changes = self.status(match=match, clean=force)
932 if force:
942 if force:
933 changes[0].extend(changes[6]) # mq may commit unchanged files
943 changes[0].extend(changes[6]) # mq may commit unchanged files
934
944
935 # check subrepos
945 # check subrepos
936 subs = []
946 subs = []
937 removedsubs = set()
947 removedsubs = set()
938 for p in wctx.parents():
948 for p in wctx.parents():
939 removedsubs.update(s for s in p.substate if match(s))
949 removedsubs.update(s for s in p.substate if match(s))
940 for s in wctx.substate:
950 for s in wctx.substate:
941 removedsubs.discard(s)
951 removedsubs.discard(s)
942 if match(s) and wctx.sub(s).dirty():
952 if match(s) and wctx.sub(s).dirty():
943 subs.append(s)
953 subs.append(s)
944 if (subs or removedsubs):
954 if (subs or removedsubs):
945 if (not match('.hgsub') and
955 if (not match('.hgsub') and
946 '.hgsub' in (wctx.modified() + wctx.added())):
956 '.hgsub' in (wctx.modified() + wctx.added())):
947 raise util.Abort(_("can't commit subrepos without .hgsub"))
957 raise util.Abort(_("can't commit subrepos without .hgsub"))
948 if '.hgsubstate' not in changes[0]:
958 if '.hgsubstate' not in changes[0]:
949 changes[0].insert(0, '.hgsubstate')
959 changes[0].insert(0, '.hgsubstate')
950
960
951 if subs and not self.ui.configbool('ui', 'commitsubrepos', True):
961 if subs and not self.ui.configbool('ui', 'commitsubrepos', True):
952 changedsubs = [s for s in subs if wctx.sub(s).dirty(True)]
962 changedsubs = [s for s in subs if wctx.sub(s).dirty(True)]
953 if changedsubs:
963 if changedsubs:
954 raise util.Abort(_("uncommitted changes in subrepo %s")
964 raise util.Abort(_("uncommitted changes in subrepo %s")
955 % changedsubs[0])
965 % changedsubs[0])
956
966
957 # make sure all explicit patterns are matched
967 # make sure all explicit patterns are matched
958 if not force and match.files():
968 if not force and match.files():
959 matched = set(changes[0] + changes[1] + changes[2])
969 matched = set(changes[0] + changes[1] + changes[2])
960
970
961 for f in match.files():
971 for f in match.files():
962 if f == '.' or f in matched or f in wctx.substate:
972 if f == '.' or f in matched or f in wctx.substate:
963 continue
973 continue
964 if f in changes[3]: # missing
974 if f in changes[3]: # missing
965 fail(f, _('file not found!'))
975 fail(f, _('file not found!'))
966 if f in vdirs: # visited directory
976 if f in vdirs: # visited directory
967 d = f + '/'
977 d = f + '/'
968 for mf in matched:
978 for mf in matched:
969 if mf.startswith(d):
979 if mf.startswith(d):
970 break
980 break
971 else:
981 else:
972 fail(f, _("no match under directory!"))
982 fail(f, _("no match under directory!"))
973 elif f not in self.dirstate:
983 elif f not in self.dirstate:
974 fail(f, _("file not tracked!"))
984 fail(f, _("file not tracked!"))
975
985
976 if (not force and not extra.get("close") and not merge
986 if (not force and not extra.get("close") and not merge
977 and not (changes[0] or changes[1] or changes[2])
987 and not (changes[0] or changes[1] or changes[2])
978 and wctx.branch() == wctx.p1().branch()):
988 and wctx.branch() == wctx.p1().branch()):
979 return None
989 return None
980
990
981 ms = mergemod.mergestate(self)
991 ms = mergemod.mergestate(self)
982 for f in changes[0]:
992 for f in changes[0]:
983 if f in ms and ms[f] == 'u':
993 if f in ms and ms[f] == 'u':
984 raise util.Abort(_("unresolved merge conflicts "
994 raise util.Abort(_("unresolved merge conflicts "
985 "(see hg help resolve)"))
995 "(see hg help resolve)"))
986
996
987 cctx = context.workingctx(self, text, user, date, extra, changes)
997 cctx = context.workingctx(self, text, user, date, extra, changes)
988 if editor:
998 if editor:
989 cctx._text = editor(self, cctx, subs)
999 cctx._text = editor(self, cctx, subs)
990 edited = (text != cctx._text)
1000 edited = (text != cctx._text)
991
1001
992 # commit subs
1002 # commit subs
993 if subs or removedsubs:
1003 if subs or removedsubs:
994 state = wctx.substate.copy()
1004 state = wctx.substate.copy()
995 for s in sorted(subs):
1005 for s in sorted(subs):
996 sub = wctx.sub(s)
1006 sub = wctx.sub(s)
997 self.ui.status(_('committing subrepository %s\n') %
1007 self.ui.status(_('committing subrepository %s\n') %
998 subrepo.subrelpath(sub))
1008 subrepo.subrelpath(sub))
999 sr = sub.commit(cctx._text, user, date)
1009 sr = sub.commit(cctx._text, user, date)
1000 state[s] = (state[s][0], sr)
1010 state[s] = (state[s][0], sr)
1001 subrepo.writestate(self, state)
1011 subrepo.writestate(self, state)
1002
1012
1003 # Save commit message in case this transaction gets rolled back
1013 # Save commit message in case this transaction gets rolled back
1004 # (e.g. by a pretxncommit hook). Leave the content alone on
1014 # (e.g. by a pretxncommit hook). Leave the content alone on
1005 # the assumption that the user will use the same editor again.
1015 # the assumption that the user will use the same editor again.
1006 msgfile = self.opener('last-message.txt', 'wb')
1016 msgfile = self.opener('last-message.txt', 'wb')
1007 msgfile.write(cctx._text)
1017 msgfile.write(cctx._text)
1008 msgfile.close()
1018 msgfile.close()
1009
1019
1010 p1, p2 = self.dirstate.parents()
1020 p1, p2 = self.dirstate.parents()
1011 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1021 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1012 try:
1022 try:
1013 self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
1023 self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
1014 ret = self.commitctx(cctx, True)
1024 ret = self.commitctx(cctx, True)
1015 except:
1025 except:
1016 if edited:
1026 if edited:
1017 msgfn = self.pathto(msgfile.name[len(self.root)+1:])
1027 msgfn = self.pathto(msgfile.name[len(self.root)+1:])
1018 self.ui.write(
1028 self.ui.write(
1019 _('note: commit message saved in %s\n') % msgfn)
1029 _('note: commit message saved in %s\n') % msgfn)
1020 raise
1030 raise
1021
1031
1022 # update bookmarks, dirstate and mergestate
1032 # update bookmarks, dirstate and mergestate
1023 bookmarks.update(self, p1, ret)
1033 bookmarks.update(self, p1, ret)
1024 for f in changes[0] + changes[1]:
1034 for f in changes[0] + changes[1]:
1025 self.dirstate.normal(f)
1035 self.dirstate.normal(f)
1026 for f in changes[2]:
1036 for f in changes[2]:
1027 self.dirstate.forget(f)
1037 self.dirstate.forget(f)
1028 self.dirstate.setparents(ret)
1038 self.dirstate.setparents(ret)
1029 ms.reset()
1039 ms.reset()
1030 finally:
1040 finally:
1031 wlock.release()
1041 wlock.release()
1032
1042
1033 self.hook("commit", node=hex(ret), parent1=hookp1, parent2=hookp2)
1043 self.hook("commit", node=hex(ret), parent1=hookp1, parent2=hookp2)
1034 return ret
1044 return ret
1035
1045
1036 def commitctx(self, ctx, error=False):
1046 def commitctx(self, ctx, error=False):
1037 """Add a new revision to current repository.
1047 """Add a new revision to current repository.
1038 Revision information is passed via the context argument.
1048 Revision information is passed via the context argument.
1039 """
1049 """
1040
1050
1041 tr = lock = None
1051 tr = lock = None
1042 removed = list(ctx.removed())
1052 removed = list(ctx.removed())
1043 p1, p2 = ctx.p1(), ctx.p2()
1053 p1, p2 = ctx.p1(), ctx.p2()
1044 user = ctx.user()
1054 user = ctx.user()
1045
1055
1046 lock = self.lock()
1056 lock = self.lock()
1047 try:
1057 try:
1048 tr = self.transaction("commit")
1058 tr = self.transaction("commit")
1049 trp = weakref.proxy(tr)
1059 trp = weakref.proxy(tr)
1050
1060
1051 if ctx.files():
1061 if ctx.files():
1052 m1 = p1.manifest().copy()
1062 m1 = p1.manifest().copy()
1053 m2 = p2.manifest()
1063 m2 = p2.manifest()
1054
1064
1055 # check in files
1065 # check in files
1056 new = {}
1066 new = {}
1057 changed = []
1067 changed = []
1058 linkrev = len(self)
1068 linkrev = len(self)
1059 for f in sorted(ctx.modified() + ctx.added()):
1069 for f in sorted(ctx.modified() + ctx.added()):
1060 self.ui.note(f + "\n")
1070 self.ui.note(f + "\n")
1061 try:
1071 try:
1062 fctx = ctx[f]
1072 fctx = ctx[f]
1063 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1073 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1064 changed)
1074 changed)
1065 m1.set(f, fctx.flags())
1075 m1.set(f, fctx.flags())
1066 except OSError, inst:
1076 except OSError, inst:
1067 self.ui.warn(_("trouble committing %s!\n") % f)
1077 self.ui.warn(_("trouble committing %s!\n") % f)
1068 raise
1078 raise
1069 except IOError, inst:
1079 except IOError, inst:
1070 errcode = getattr(inst, 'errno', errno.ENOENT)
1080 errcode = getattr(inst, 'errno', errno.ENOENT)
1071 if error or errcode and errcode != errno.ENOENT:
1081 if error or errcode and errcode != errno.ENOENT:
1072 self.ui.warn(_("trouble committing %s!\n") % f)
1082 self.ui.warn(_("trouble committing %s!\n") % f)
1073 raise
1083 raise
1074 else:
1084 else:
1075 removed.append(f)
1085 removed.append(f)
1076
1086
1077 # update manifest
1087 # update manifest
1078 m1.update(new)
1088 m1.update(new)
1079 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1089 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1080 drop = [f for f in removed if f in m1]
1090 drop = [f for f in removed if f in m1]
1081 for f in drop:
1091 for f in drop:
1082 del m1[f]
1092 del m1[f]
1083 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1093 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1084 p2.manifestnode(), (new, drop))
1094 p2.manifestnode(), (new, drop))
1085 files = changed + removed
1095 files = changed + removed
1086 else:
1096 else:
1087 mn = p1.manifestnode()
1097 mn = p1.manifestnode()
1088 files = []
1098 files = []
1089
1099
1090 # update changelog
1100 # update changelog
1091 self.changelog.delayupdate()
1101 self.changelog.delayupdate()
1092 n = self.changelog.add(mn, files, ctx.description(),
1102 n = self.changelog.add(mn, files, ctx.description(),
1093 trp, p1.node(), p2.node(),
1103 trp, p1.node(), p2.node(),
1094 user, ctx.date(), ctx.extra().copy())
1104 user, ctx.date(), ctx.extra().copy())
1095 p = lambda: self.changelog.writepending() and self.root or ""
1105 p = lambda: self.changelog.writepending() and self.root or ""
1096 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1106 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1097 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1107 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1098 parent2=xp2, pending=p)
1108 parent2=xp2, pending=p)
1099 self.changelog.finalize(trp)
1109 self.changelog.finalize(trp)
1100 tr.close()
1110 tr.close()
1101
1111
1102 if self._branchcache:
1112 if self._branchcache:
1103 self.updatebranchcache()
1113 self.updatebranchcache()
1104 return n
1114 return n
1105 finally:
1115 finally:
1106 if tr:
1116 if tr:
1107 tr.release()
1117 tr.release()
1108 lock.release()
1118 lock.release()
1109
1119
1110 def destroyed(self):
1120 def destroyed(self):
1111 '''Inform the repository that nodes have been destroyed.
1121 '''Inform the repository that nodes have been destroyed.
1112 Intended for use by strip and rollback, so there's a common
1122 Intended for use by strip and rollback, so there's a common
1113 place for anything that has to be done after destroying history.'''
1123 place for anything that has to be done after destroying history.'''
1114 # XXX it might be nice if we could take the list of destroyed
1124 # XXX it might be nice if we could take the list of destroyed
1115 # nodes, but I don't see an easy way for rollback() to do that
1125 # nodes, but I don't see an easy way for rollback() to do that
1116
1126
1117 # Ensure the persistent tag cache is updated. Doing it now
1127 # Ensure the persistent tag cache is updated. Doing it now
1118 # means that the tag cache only has to worry about destroyed
1128 # means that the tag cache only has to worry about destroyed
1119 # heads immediately after a strip/rollback. That in turn
1129 # heads immediately after a strip/rollback. That in turn
1120 # guarantees that "cachetip == currenttip" (comparing both rev
1130 # guarantees that "cachetip == currenttip" (comparing both rev
1121 # and node) always means no nodes have been added or destroyed.
1131 # and node) always means no nodes have been added or destroyed.
1122
1132
1123 # XXX this is suboptimal when qrefresh'ing: we strip the current
1133 # XXX this is suboptimal when qrefresh'ing: we strip the current
1124 # head, refresh the tag cache, then immediately add a new head.
1134 # head, refresh the tag cache, then immediately add a new head.
1125 # But I think doing it this way is necessary for the "instant
1135 # But I think doing it this way is necessary for the "instant
1126 # tag cache retrieval" case to work.
1136 # tag cache retrieval" case to work.
1127 self.invalidatecaches()
1137 self.invalidatecaches()
1128
1138
1129 def walk(self, match, node=None):
1139 def walk(self, match, node=None):
1130 '''
1140 '''
1131 walk recursively through the directory tree or a given
1141 walk recursively through the directory tree or a given
1132 changeset, finding all files matched by the match
1142 changeset, finding all files matched by the match
1133 function
1143 function
1134 '''
1144 '''
1135 return self[node].walk(match)
1145 return self[node].walk(match)
1136
1146
1137 def status(self, node1='.', node2=None, match=None,
1147 def status(self, node1='.', node2=None, match=None,
1138 ignored=False, clean=False, unknown=False,
1148 ignored=False, clean=False, unknown=False,
1139 listsubrepos=False):
1149 listsubrepos=False):
1140 """return status of files between two nodes or node and working directory
1150 """return status of files between two nodes or node and working directory
1141
1151
1142 If node1 is None, use the first dirstate parent instead.
1152 If node1 is None, use the first dirstate parent instead.
1143 If node2 is None, compare node1 with working directory.
1153 If node2 is None, compare node1 with working directory.
1144 """
1154 """
1145
1155
1146 def mfmatches(ctx):
1156 def mfmatches(ctx):
1147 mf = ctx.manifest().copy()
1157 mf = ctx.manifest().copy()
1148 for fn in mf.keys():
1158 for fn in mf.keys():
1149 if not match(fn):
1159 if not match(fn):
1150 del mf[fn]
1160 del mf[fn]
1151 return mf
1161 return mf
1152
1162
1153 if isinstance(node1, context.changectx):
1163 if isinstance(node1, context.changectx):
1154 ctx1 = node1
1164 ctx1 = node1
1155 else:
1165 else:
1156 ctx1 = self[node1]
1166 ctx1 = self[node1]
1157 if isinstance(node2, context.changectx):
1167 if isinstance(node2, context.changectx):
1158 ctx2 = node2
1168 ctx2 = node2
1159 else:
1169 else:
1160 ctx2 = self[node2]
1170 ctx2 = self[node2]
1161
1171
1162 working = ctx2.rev() is None
1172 working = ctx2.rev() is None
1163 parentworking = working and ctx1 == self['.']
1173 parentworking = working and ctx1 == self['.']
1164 match = match or matchmod.always(self.root, self.getcwd())
1174 match = match or matchmod.always(self.root, self.getcwd())
1165 listignored, listclean, listunknown = ignored, clean, unknown
1175 listignored, listclean, listunknown = ignored, clean, unknown
1166
1176
1167 # load earliest manifest first for caching reasons
1177 # load earliest manifest first for caching reasons
1168 if not working and ctx2.rev() < ctx1.rev():
1178 if not working and ctx2.rev() < ctx1.rev():
1169 ctx2.manifest()
1179 ctx2.manifest()
1170
1180
1171 if not parentworking:
1181 if not parentworking:
1172 def bad(f, msg):
1182 def bad(f, msg):
1173 if f not in ctx1:
1183 if f not in ctx1:
1174 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1184 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1175 match.bad = bad
1185 match.bad = bad
1176
1186
1177 if working: # we need to scan the working dir
1187 if working: # we need to scan the working dir
1178 subrepos = []
1188 subrepos = []
1179 if '.hgsub' in self.dirstate:
1189 if '.hgsub' in self.dirstate:
1180 subrepos = ctx1.substate.keys()
1190 subrepos = ctx1.substate.keys()
1181 s = self.dirstate.status(match, subrepos, listignored,
1191 s = self.dirstate.status(match, subrepos, listignored,
1182 listclean, listunknown)
1192 listclean, listunknown)
1183 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1193 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1184
1194
1185 # check for any possibly clean files
1195 # check for any possibly clean files
1186 if parentworking and cmp:
1196 if parentworking and cmp:
1187 fixup = []
1197 fixup = []
1188 # do a full compare of any files that might have changed
1198 # do a full compare of any files that might have changed
1189 for f in sorted(cmp):
1199 for f in sorted(cmp):
1190 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1200 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1191 or ctx1[f].cmp(ctx2[f])):
1201 or ctx1[f].cmp(ctx2[f])):
1192 modified.append(f)
1202 modified.append(f)
1193 else:
1203 else:
1194 fixup.append(f)
1204 fixup.append(f)
1195
1205
1196 # update dirstate for files that are actually clean
1206 # update dirstate for files that are actually clean
1197 if fixup:
1207 if fixup:
1198 if listclean:
1208 if listclean:
1199 clean += fixup
1209 clean += fixup
1200
1210
1201 try:
1211 try:
1202 # updating the dirstate is optional
1212 # updating the dirstate is optional
1203 # so we don't wait on the lock
1213 # so we don't wait on the lock
1204 wlock = self.wlock(False)
1214 wlock = self.wlock(False)
1205 try:
1215 try:
1206 for f in fixup:
1216 for f in fixup:
1207 self.dirstate.normal(f)
1217 self.dirstate.normal(f)
1208 finally:
1218 finally:
1209 wlock.release()
1219 wlock.release()
1210 except error.LockError:
1220 except error.LockError:
1211 pass
1221 pass
1212
1222
1213 if not parentworking:
1223 if not parentworking:
1214 mf1 = mfmatches(ctx1)
1224 mf1 = mfmatches(ctx1)
1215 if working:
1225 if working:
1216 # we are comparing working dir against non-parent
1226 # we are comparing working dir against non-parent
1217 # generate a pseudo-manifest for the working dir
1227 # generate a pseudo-manifest for the working dir
1218 mf2 = mfmatches(self['.'])
1228 mf2 = mfmatches(self['.'])
1219 for f in cmp + modified + added:
1229 for f in cmp + modified + added:
1220 mf2[f] = None
1230 mf2[f] = None
1221 mf2.set(f, ctx2.flags(f))
1231 mf2.set(f, ctx2.flags(f))
1222 for f in removed:
1232 for f in removed:
1223 if f in mf2:
1233 if f in mf2:
1224 del mf2[f]
1234 del mf2[f]
1225 else:
1235 else:
1226 # we are comparing two revisions
1236 # we are comparing two revisions
1227 deleted, unknown, ignored = [], [], []
1237 deleted, unknown, ignored = [], [], []
1228 mf2 = mfmatches(ctx2)
1238 mf2 = mfmatches(ctx2)
1229
1239
1230 modified, added, clean = [], [], []
1240 modified, added, clean = [], [], []
1231 for fn in mf2:
1241 for fn in mf2:
1232 if fn in mf1:
1242 if fn in mf1:
1233 if (fn not in deleted and
1243 if (fn not in deleted and
1234 (mf1.flags(fn) != mf2.flags(fn) or
1244 (mf1.flags(fn) != mf2.flags(fn) or
1235 (mf1[fn] != mf2[fn] and
1245 (mf1[fn] != mf2[fn] and
1236 (mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
1246 (mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
1237 modified.append(fn)
1247 modified.append(fn)
1238 elif listclean:
1248 elif listclean:
1239 clean.append(fn)
1249 clean.append(fn)
1240 del mf1[fn]
1250 del mf1[fn]
1241 elif fn not in deleted:
1251 elif fn not in deleted:
1242 added.append(fn)
1252 added.append(fn)
1243 removed = mf1.keys()
1253 removed = mf1.keys()
1244
1254
1245 r = modified, added, removed, deleted, unknown, ignored, clean
1255 r = modified, added, removed, deleted, unknown, ignored, clean
1246
1256
1247 if listsubrepos:
1257 if listsubrepos:
1248 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1258 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1249 if working:
1259 if working:
1250 rev2 = None
1260 rev2 = None
1251 else:
1261 else:
1252 rev2 = ctx2.substate[subpath][1]
1262 rev2 = ctx2.substate[subpath][1]
1253 try:
1263 try:
1254 submatch = matchmod.narrowmatcher(subpath, match)
1264 submatch = matchmod.narrowmatcher(subpath, match)
1255 s = sub.status(rev2, match=submatch, ignored=listignored,
1265 s = sub.status(rev2, match=submatch, ignored=listignored,
1256 clean=listclean, unknown=listunknown,
1266 clean=listclean, unknown=listunknown,
1257 listsubrepos=True)
1267 listsubrepos=True)
1258 for rfiles, sfiles in zip(r, s):
1268 for rfiles, sfiles in zip(r, s):
1259 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1269 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1260 except error.LookupError:
1270 except error.LookupError:
1261 self.ui.status(_("skipping missing subrepository: %s\n")
1271 self.ui.status(_("skipping missing subrepository: %s\n")
1262 % subpath)
1272 % subpath)
1263
1273
1264 for l in r:
1274 for l in r:
1265 l.sort()
1275 l.sort()
1266 return r
1276 return r
1267
1277
1268 def heads(self, start=None):
1278 def heads(self, start=None):
1269 heads = self.changelog.heads(start)
1279 heads = self.changelog.heads(start)
1270 # sort the output in rev descending order
1280 # sort the output in rev descending order
1271 return sorted(heads, key=self.changelog.rev, reverse=True)
1281 return sorted(heads, key=self.changelog.rev, reverse=True)
1272
1282
1273 def branchheads(self, branch=None, start=None, closed=False):
1283 def branchheads(self, branch=None, start=None, closed=False):
1274 '''return a (possibly filtered) list of heads for the given branch
1284 '''return a (possibly filtered) list of heads for the given branch
1275
1285
1276 Heads are returned in topological order, from newest to oldest.
1286 Heads are returned in topological order, from newest to oldest.
1277 If branch is None, use the dirstate branch.
1287 If branch is None, use the dirstate branch.
1278 If start is not None, return only heads reachable from start.
1288 If start is not None, return only heads reachable from start.
1279 If closed is True, return heads that are marked as closed as well.
1289 If closed is True, return heads that are marked as closed as well.
1280 '''
1290 '''
1281 if branch is None:
1291 if branch is None:
1282 branch = self[None].branch()
1292 branch = self[None].branch()
1283 branches = self.branchmap()
1293 branches = self.branchmap()
1284 if branch not in branches:
1294 if branch not in branches:
1285 return []
1295 return []
1286 # the cache returns heads ordered lowest to highest
1296 # the cache returns heads ordered lowest to highest
1287 bheads = list(reversed(branches[branch]))
1297 bheads = list(reversed(branches[branch]))
1288 if start is not None:
1298 if start is not None:
1289 # filter out the heads that cannot be reached from startrev
1299 # filter out the heads that cannot be reached from startrev
1290 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1300 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1291 bheads = [h for h in bheads if h in fbheads]
1301 bheads = [h for h in bheads if h in fbheads]
1292 if not closed:
1302 if not closed:
1293 bheads = [h for h in bheads if
1303 bheads = [h for h in bheads if
1294 ('close' not in self.changelog.read(h)[5])]
1304 ('close' not in self.changelog.read(h)[5])]
1295 return bheads
1305 return bheads
1296
1306
1297 def branches(self, nodes):
1307 def branches(self, nodes):
1298 if not nodes:
1308 if not nodes:
1299 nodes = [self.changelog.tip()]
1309 nodes = [self.changelog.tip()]
1300 b = []
1310 b = []
1301 for n in nodes:
1311 for n in nodes:
1302 t = n
1312 t = n
1303 while 1:
1313 while 1:
1304 p = self.changelog.parents(n)
1314 p = self.changelog.parents(n)
1305 if p[1] != nullid or p[0] == nullid:
1315 if p[1] != nullid or p[0] == nullid:
1306 b.append((t, n, p[0], p[1]))
1316 b.append((t, n, p[0], p[1]))
1307 break
1317 break
1308 n = p[0]
1318 n = p[0]
1309 return b
1319 return b
1310
1320
1311 def between(self, pairs):
1321 def between(self, pairs):
1312 r = []
1322 r = []
1313
1323
1314 for top, bottom in pairs:
1324 for top, bottom in pairs:
1315 n, l, i = top, [], 0
1325 n, l, i = top, [], 0
1316 f = 1
1326 f = 1
1317
1327
1318 while n != bottom and n != nullid:
1328 while n != bottom and n != nullid:
1319 p = self.changelog.parents(n)[0]
1329 p = self.changelog.parents(n)[0]
1320 if i == f:
1330 if i == f:
1321 l.append(n)
1331 l.append(n)
1322 f = f * 2
1332 f = f * 2
1323 n = p
1333 n = p
1324 i += 1
1334 i += 1
1325
1335
1326 r.append(l)
1336 r.append(l)
1327
1337
1328 return r
1338 return r
1329
1339
1330 def pull(self, remote, heads=None, force=False):
1340 def pull(self, remote, heads=None, force=False):
1331 lock = self.lock()
1341 lock = self.lock()
1332 try:
1342 try:
1333 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1343 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1334 force=force)
1344 force=force)
1335 common, fetch, rheads = tmp
1345 common, fetch, rheads = tmp
1336 if not fetch:
1346 if not fetch:
1337 self.ui.status(_("no changes found\n"))
1347 self.ui.status(_("no changes found\n"))
1338 result = 0
1348 result = 0
1339 else:
1349 else:
1340 if heads is None and list(common) == [nullid]:
1350 if heads is None and list(common) == [nullid]:
1341 self.ui.status(_("requesting all changes\n"))
1351 self.ui.status(_("requesting all changes\n"))
1342 elif heads is None and remote.capable('changegroupsubset'):
1352 elif heads is None and remote.capable('changegroupsubset'):
1343 # issue1320, avoid a race if remote changed after discovery
1353 # issue1320, avoid a race if remote changed after discovery
1344 heads = rheads
1354 heads = rheads
1345
1355
1346 if remote.capable('getbundle'):
1356 if remote.capable('getbundle'):
1347 cg = remote.getbundle('pull', common=common,
1357 cg = remote.getbundle('pull', common=common,
1348 heads=heads or rheads)
1358 heads=heads or rheads)
1349 elif heads is None:
1359 elif heads is None:
1350 cg = remote.changegroup(fetch, 'pull')
1360 cg = remote.changegroup(fetch, 'pull')
1351 elif not remote.capable('changegroupsubset'):
1361 elif not remote.capable('changegroupsubset'):
1352 raise util.Abort(_("partial pull cannot be done because "
1362 raise util.Abort(_("partial pull cannot be done because "
1353 "other repository doesn't support "
1363 "other repository doesn't support "
1354 "changegroupsubset."))
1364 "changegroupsubset."))
1355 else:
1365 else:
1356 cg = remote.changegroupsubset(fetch, heads, 'pull')
1366 cg = remote.changegroupsubset(fetch, heads, 'pull')
1357 result = self.addchangegroup(cg, 'pull', remote.url(),
1367 result = self.addchangegroup(cg, 'pull', remote.url(),
1358 lock=lock)
1368 lock=lock)
1359 finally:
1369 finally:
1360 lock.release()
1370 lock.release()
1361
1371
1362 return result
1372 return result
1363
1373
1364 def checkpush(self, force, revs):
1374 def checkpush(self, force, revs):
1365 """Extensions can override this function if additional checks have
1375 """Extensions can override this function if additional checks have
1366 to be performed before pushing, or call it if they override push
1376 to be performed before pushing, or call it if they override push
1367 command.
1377 command.
1368 """
1378 """
1369 pass
1379 pass
1370
1380
1371 def push(self, remote, force=False, revs=None, newbranch=False):
1381 def push(self, remote, force=False, revs=None, newbranch=False):
1372 '''Push outgoing changesets (limited by revs) from the current
1382 '''Push outgoing changesets (limited by revs) from the current
1373 repository to remote. Return an integer:
1383 repository to remote. Return an integer:
1374 - 0 means HTTP error *or* nothing to push
1384 - 0 means HTTP error *or* nothing to push
1375 - 1 means we pushed and remote head count is unchanged *or*
1385 - 1 means we pushed and remote head count is unchanged *or*
1376 we have outgoing changesets but refused to push
1386 we have outgoing changesets but refused to push
1377 - other values as described by addchangegroup()
1387 - other values as described by addchangegroup()
1378 '''
1388 '''
1379 # there are two ways to push to remote repo:
1389 # there are two ways to push to remote repo:
1380 #
1390 #
1381 # addchangegroup assumes local user can lock remote
1391 # addchangegroup assumes local user can lock remote
1382 # repo (local filesystem, old ssh servers).
1392 # repo (local filesystem, old ssh servers).
1383 #
1393 #
1384 # unbundle assumes local user cannot lock remote repo (new ssh
1394 # unbundle assumes local user cannot lock remote repo (new ssh
1385 # servers, http servers).
1395 # servers, http servers).
1386
1396
1387 self.checkpush(force, revs)
1397 self.checkpush(force, revs)
1388 lock = None
1398 lock = None
1389 unbundle = remote.capable('unbundle')
1399 unbundle = remote.capable('unbundle')
1390 if not unbundle:
1400 if not unbundle:
1391 lock = remote.lock()
1401 lock = remote.lock()
1392 try:
1402 try:
1393 cg, remote_heads = discovery.prepush(self, remote, force, revs,
1403 cg, remote_heads = discovery.prepush(self, remote, force, revs,
1394 newbranch)
1404 newbranch)
1395 ret = remote_heads
1405 ret = remote_heads
1396 if cg is not None:
1406 if cg is not None:
1397 if unbundle:
1407 if unbundle:
1398 # local repo finds heads on server, finds out what
1408 # local repo finds heads on server, finds out what
1399 # revs it must push. once revs transferred, if server
1409 # revs it must push. once revs transferred, if server
1400 # finds it has different heads (someone else won
1410 # finds it has different heads (someone else won
1401 # commit/push race), server aborts.
1411 # commit/push race), server aborts.
1402 if force:
1412 if force:
1403 remote_heads = ['force']
1413 remote_heads = ['force']
1404 # ssh: return remote's addchangegroup()
1414 # ssh: return remote's addchangegroup()
1405 # http: return remote's addchangegroup() or 0 for error
1415 # http: return remote's addchangegroup() or 0 for error
1406 ret = remote.unbundle(cg, remote_heads, 'push')
1416 ret = remote.unbundle(cg, remote_heads, 'push')
1407 else:
1417 else:
1408 # we return an integer indicating remote head count change
1418 # we return an integer indicating remote head count change
1409 ret = remote.addchangegroup(cg, 'push', self.url(),
1419 ret = remote.addchangegroup(cg, 'push', self.url(),
1410 lock=lock)
1420 lock=lock)
1411 finally:
1421 finally:
1412 if lock is not None:
1422 if lock is not None:
1413 lock.release()
1423 lock.release()
1414
1424
1415 self.ui.debug("checking for updated bookmarks\n")
1425 self.ui.debug("checking for updated bookmarks\n")
1416 rb = remote.listkeys('bookmarks')
1426 rb = remote.listkeys('bookmarks')
1417 for k in rb.keys():
1427 for k in rb.keys():
1418 if k in self._bookmarks:
1428 if k in self._bookmarks:
1419 nr, nl = rb[k], hex(self._bookmarks[k])
1429 nr, nl = rb[k], hex(self._bookmarks[k])
1420 if nr in self:
1430 if nr in self:
1421 cr = self[nr]
1431 cr = self[nr]
1422 cl = self[nl]
1432 cl = self[nl]
1423 if cl in cr.descendants():
1433 if cl in cr.descendants():
1424 r = remote.pushkey('bookmarks', k, nr, nl)
1434 r = remote.pushkey('bookmarks', k, nr, nl)
1425 if r:
1435 if r:
1426 self.ui.status(_("updating bookmark %s\n") % k)
1436 self.ui.status(_("updating bookmark %s\n") % k)
1427 else:
1437 else:
1428 self.ui.warn(_('updating bookmark %s'
1438 self.ui.warn(_('updating bookmark %s'
1429 ' failed!\n') % k)
1439 ' failed!\n') % k)
1430
1440
1431 return ret
1441 return ret
1432
1442
1433 def changegroupinfo(self, nodes, source):
1443 def changegroupinfo(self, nodes, source):
1434 if self.ui.verbose or source == 'bundle':
1444 if self.ui.verbose or source == 'bundle':
1435 self.ui.status(_("%d changesets found\n") % len(nodes))
1445 self.ui.status(_("%d changesets found\n") % len(nodes))
1436 if self.ui.debugflag:
1446 if self.ui.debugflag:
1437 self.ui.debug("list of changesets:\n")
1447 self.ui.debug("list of changesets:\n")
1438 for node in nodes:
1448 for node in nodes:
1439 self.ui.debug("%s\n" % hex(node))
1449 self.ui.debug("%s\n" % hex(node))
1440
1450
1441 def changegroupsubset(self, bases, heads, source):
1451 def changegroupsubset(self, bases, heads, source):
1442 """Compute a changegroup consisting of all the nodes that are
1452 """Compute a changegroup consisting of all the nodes that are
1443 descendents of any of the bases and ancestors of any of the heads.
1453 descendents of any of the bases and ancestors of any of the heads.
1444 Return a chunkbuffer object whose read() method will return
1454 Return a chunkbuffer object whose read() method will return
1445 successive changegroup chunks.
1455 successive changegroup chunks.
1446
1456
1447 It is fairly complex as determining which filenodes and which
1457 It is fairly complex as determining which filenodes and which
1448 manifest nodes need to be included for the changeset to be complete
1458 manifest nodes need to be included for the changeset to be complete
1449 is non-trivial.
1459 is non-trivial.
1450
1460
1451 Another wrinkle is doing the reverse, figuring out which changeset in
1461 Another wrinkle is doing the reverse, figuring out which changeset in
1452 the changegroup a particular filenode or manifestnode belongs to.
1462 the changegroup a particular filenode or manifestnode belongs to.
1453 """
1463 """
1454 cl = self.changelog
1464 cl = self.changelog
1455 if not bases:
1465 if not bases:
1456 bases = [nullid]
1466 bases = [nullid]
1457 csets, bases, heads = cl.nodesbetween(bases, heads)
1467 csets, bases, heads = cl.nodesbetween(bases, heads)
1458 # We assume that all ancestors of bases are known
1468 # We assume that all ancestors of bases are known
1459 common = set(cl.ancestors(*[cl.rev(n) for n in bases]))
1469 common = set(cl.ancestors(*[cl.rev(n) for n in bases]))
1460 return self._changegroupsubset(common, csets, heads, source)
1470 return self._changegroupsubset(common, csets, heads, source)
1461
1471
1462 def getbundle(self, source, heads=None, common=None):
1472 def getbundle(self, source, heads=None, common=None):
1463 """Like changegroupsubset, but returns the set difference between the
1473 """Like changegroupsubset, but returns the set difference between the
1464 ancestors of heads and the ancestors common.
1474 ancestors of heads and the ancestors common.
1465
1475
1466 If heads is None, use the local heads. If common is None, use [nullid].
1476 If heads is None, use the local heads. If common is None, use [nullid].
1467
1477
1468 The nodes in common might not all be known locally due to the way the
1478 The nodes in common might not all be known locally due to the way the
1469 current discovery protocol works.
1479 current discovery protocol works.
1470 """
1480 """
1471 cl = self.changelog
1481 cl = self.changelog
1472 if common:
1482 if common:
1473 nm = cl.nodemap
1483 nm = cl.nodemap
1474 common = [n for n in common if n in nm]
1484 common = [n for n in common if n in nm]
1475 else:
1485 else:
1476 common = [nullid]
1486 common = [nullid]
1477 if not heads:
1487 if not heads:
1478 heads = cl.heads()
1488 heads = cl.heads()
1479 common, missing = cl.findcommonmissing(common, heads)
1489 common, missing = cl.findcommonmissing(common, heads)
1480 if not missing:
1490 if not missing:
1481 return None
1491 return None
1482 return self._changegroupsubset(common, missing, heads, source)
1492 return self._changegroupsubset(common, missing, heads, source)
1483
1493
1484 def _changegroupsubset(self, commonrevs, csets, heads, source):
1494 def _changegroupsubset(self, commonrevs, csets, heads, source):
1485
1495
1486 cl = self.changelog
1496 cl = self.changelog
1487 mf = self.manifest
1497 mf = self.manifest
1488 mfs = {} # needed manifests
1498 mfs = {} # needed manifests
1489 fnodes = {} # needed file nodes
1499 fnodes = {} # needed file nodes
1490 changedfiles = set()
1500 changedfiles = set()
1491 fstate = ['', {}]
1501 fstate = ['', {}]
1492 count = [0]
1502 count = [0]
1493
1503
1494 # can we go through the fast path ?
1504 # can we go through the fast path ?
1495 heads.sort()
1505 heads.sort()
1496 if heads == sorted(self.heads()):
1506 if heads == sorted(self.heads()):
1497 return self._changegroup(csets, source)
1507 return self._changegroup(csets, source)
1498
1508
1499 # slow path
1509 # slow path
1500 self.hook('preoutgoing', throw=True, source=source)
1510 self.hook('preoutgoing', throw=True, source=source)
1501 self.changegroupinfo(csets, source)
1511 self.changegroupinfo(csets, source)
1502
1512
1503 # filter any nodes that claim to be part of the known set
1513 # filter any nodes that claim to be part of the known set
1504 def prune(revlog, missing):
1514 def prune(revlog, missing):
1505 for n in missing:
1515 for n in missing:
1506 if revlog.linkrev(revlog.rev(n)) not in commonrevs:
1516 if revlog.linkrev(revlog.rev(n)) not in commonrevs:
1507 yield n
1517 yield n
1508
1518
1509 def lookup(revlog, x):
1519 def lookup(revlog, x):
1510 if revlog == cl:
1520 if revlog == cl:
1511 c = cl.read(x)
1521 c = cl.read(x)
1512 changedfiles.update(c[3])
1522 changedfiles.update(c[3])
1513 mfs.setdefault(c[0], x)
1523 mfs.setdefault(c[0], x)
1514 count[0] += 1
1524 count[0] += 1
1515 self.ui.progress(_('bundling'), count[0], unit=_('changesets'))
1525 self.ui.progress(_('bundling'), count[0], unit=_('changesets'))
1516 return x
1526 return x
1517 elif revlog == mf:
1527 elif revlog == mf:
1518 clnode = mfs[x]
1528 clnode = mfs[x]
1519 mdata = mf.readfast(x)
1529 mdata = mf.readfast(x)
1520 for f in changedfiles:
1530 for f in changedfiles:
1521 if f in mdata:
1531 if f in mdata:
1522 fnodes.setdefault(f, {}).setdefault(mdata[f], clnode)
1532 fnodes.setdefault(f, {}).setdefault(mdata[f], clnode)
1523 count[0] += 1
1533 count[0] += 1
1524 self.ui.progress(_('bundling'), count[0],
1534 self.ui.progress(_('bundling'), count[0],
1525 unit=_('manifests'), total=len(mfs))
1535 unit=_('manifests'), total=len(mfs))
1526 return mfs[x]
1536 return mfs[x]
1527 else:
1537 else:
1528 self.ui.progress(
1538 self.ui.progress(
1529 _('bundling'), count[0], item=fstate[0],
1539 _('bundling'), count[0], item=fstate[0],
1530 unit=_('files'), total=len(changedfiles))
1540 unit=_('files'), total=len(changedfiles))
1531 return fstate[1][x]
1541 return fstate[1][x]
1532
1542
1533 bundler = changegroup.bundle10(lookup)
1543 bundler = changegroup.bundle10(lookup)
1534
1544
1535 def gengroup():
1545 def gengroup():
1536 # Create a changenode group generator that will call our functions
1546 # Create a changenode group generator that will call our functions
1537 # back to lookup the owning changenode and collect information.
1547 # back to lookup the owning changenode and collect information.
1538 for chunk in cl.group(csets, bundler):
1548 for chunk in cl.group(csets, bundler):
1539 yield chunk
1549 yield chunk
1540 self.ui.progress(_('bundling'), None)
1550 self.ui.progress(_('bundling'), None)
1541
1551
1542 # Create a generator for the manifestnodes that calls our lookup
1552 # Create a generator for the manifestnodes that calls our lookup
1543 # and data collection functions back.
1553 # and data collection functions back.
1544 count[0] = 0
1554 count[0] = 0
1545 for chunk in mf.group(prune(mf, mfs), bundler):
1555 for chunk in mf.group(prune(mf, mfs), bundler):
1546 yield chunk
1556 yield chunk
1547 self.ui.progress(_('bundling'), None)
1557 self.ui.progress(_('bundling'), None)
1548
1558
1549 mfs.clear()
1559 mfs.clear()
1550
1560
1551 # Go through all our files in order sorted by name.
1561 # Go through all our files in order sorted by name.
1552 count[0] = 0
1562 count[0] = 0
1553 for fname in sorted(changedfiles):
1563 for fname in sorted(changedfiles):
1554 filerevlog = self.file(fname)
1564 filerevlog = self.file(fname)
1555 if not len(filerevlog):
1565 if not len(filerevlog):
1556 raise util.Abort(_("empty or missing revlog for %s") % fname)
1566 raise util.Abort(_("empty or missing revlog for %s") % fname)
1557 fstate[0] = fname
1567 fstate[0] = fname
1558 fstate[1] = fnodes.pop(fname, {})
1568 fstate[1] = fnodes.pop(fname, {})
1559 first = True
1569 first = True
1560
1570
1561 for chunk in filerevlog.group(prune(filerevlog, fstate[1]),
1571 for chunk in filerevlog.group(prune(filerevlog, fstate[1]),
1562 bundler):
1572 bundler):
1563 if first:
1573 if first:
1564 if chunk == bundler.close():
1574 if chunk == bundler.close():
1565 break
1575 break
1566 count[0] += 1
1576 count[0] += 1
1567 yield bundler.fileheader(fname)
1577 yield bundler.fileheader(fname)
1568 first = False
1578 first = False
1569 yield chunk
1579 yield chunk
1570 # Signal that no more groups are left.
1580 # Signal that no more groups are left.
1571 yield bundler.close()
1581 yield bundler.close()
1572 self.ui.progress(_('bundling'), None)
1582 self.ui.progress(_('bundling'), None)
1573
1583
1574 if csets:
1584 if csets:
1575 self.hook('outgoing', node=hex(csets[0]), source=source)
1585 self.hook('outgoing', node=hex(csets[0]), source=source)
1576
1586
1577 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1587 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1578
1588
1579 def changegroup(self, basenodes, source):
1589 def changegroup(self, basenodes, source):
1580 # to avoid a race we use changegroupsubset() (issue1320)
1590 # to avoid a race we use changegroupsubset() (issue1320)
1581 return self.changegroupsubset(basenodes, self.heads(), source)
1591 return self.changegroupsubset(basenodes, self.heads(), source)
1582
1592
1583 def _changegroup(self, nodes, source):
1593 def _changegroup(self, nodes, source):
1584 """Compute the changegroup of all nodes that we have that a recipient
1594 """Compute the changegroup of all nodes that we have that a recipient
1585 doesn't. Return a chunkbuffer object whose read() method will return
1595 doesn't. Return a chunkbuffer object whose read() method will return
1586 successive changegroup chunks.
1596 successive changegroup chunks.
1587
1597
1588 This is much easier than the previous function as we can assume that
1598 This is much easier than the previous function as we can assume that
1589 the recipient has any changenode we aren't sending them.
1599 the recipient has any changenode we aren't sending them.
1590
1600
1591 nodes is the set of nodes to send"""
1601 nodes is the set of nodes to send"""
1592
1602
1593 cl = self.changelog
1603 cl = self.changelog
1594 mf = self.manifest
1604 mf = self.manifest
1595 mfs = {}
1605 mfs = {}
1596 changedfiles = set()
1606 changedfiles = set()
1597 fstate = ['']
1607 fstate = ['']
1598 count = [0]
1608 count = [0]
1599
1609
1600 self.hook('preoutgoing', throw=True, source=source)
1610 self.hook('preoutgoing', throw=True, source=source)
1601 self.changegroupinfo(nodes, source)
1611 self.changegroupinfo(nodes, source)
1602
1612
1603 revset = set([cl.rev(n) for n in nodes])
1613 revset = set([cl.rev(n) for n in nodes])
1604
1614
1605 def gennodelst(log):
1615 def gennodelst(log):
1606 for r in log:
1616 for r in log:
1607 if log.linkrev(r) in revset:
1617 if log.linkrev(r) in revset:
1608 yield log.node(r)
1618 yield log.node(r)
1609
1619
1610 def lookup(revlog, x):
1620 def lookup(revlog, x):
1611 if revlog == cl:
1621 if revlog == cl:
1612 c = cl.read(x)
1622 c = cl.read(x)
1613 changedfiles.update(c[3])
1623 changedfiles.update(c[3])
1614 mfs.setdefault(c[0], x)
1624 mfs.setdefault(c[0], x)
1615 count[0] += 1
1625 count[0] += 1
1616 self.ui.progress(_('bundling'), count[0], unit=_('changesets'))
1626 self.ui.progress(_('bundling'), count[0], unit=_('changesets'))
1617 return x
1627 return x
1618 elif revlog == mf:
1628 elif revlog == mf:
1619 count[0] += 1
1629 count[0] += 1
1620 self.ui.progress(_('bundling'), count[0],
1630 self.ui.progress(_('bundling'), count[0],
1621 unit=_('manifests'), total=len(mfs))
1631 unit=_('manifests'), total=len(mfs))
1622 return cl.node(revlog.linkrev(revlog.rev(x)))
1632 return cl.node(revlog.linkrev(revlog.rev(x)))
1623 else:
1633 else:
1624 self.ui.progress(
1634 self.ui.progress(
1625 _('bundling'), count[0], item=fstate[0],
1635 _('bundling'), count[0], item=fstate[0],
1626 total=len(changedfiles), unit=_('files'))
1636 total=len(changedfiles), unit=_('files'))
1627 return cl.node(revlog.linkrev(revlog.rev(x)))
1637 return cl.node(revlog.linkrev(revlog.rev(x)))
1628
1638
1629 bundler = changegroup.bundle10(lookup)
1639 bundler = changegroup.bundle10(lookup)
1630
1640
1631 def gengroup():
1641 def gengroup():
1632 '''yield a sequence of changegroup chunks (strings)'''
1642 '''yield a sequence of changegroup chunks (strings)'''
1633 # construct a list of all changed files
1643 # construct a list of all changed files
1634
1644
1635 for chunk in cl.group(nodes, bundler):
1645 for chunk in cl.group(nodes, bundler):
1636 yield chunk
1646 yield chunk
1637 self.ui.progress(_('bundling'), None)
1647 self.ui.progress(_('bundling'), None)
1638
1648
1639 count[0] = 0
1649 count[0] = 0
1640 for chunk in mf.group(gennodelst(mf), bundler):
1650 for chunk in mf.group(gennodelst(mf), bundler):
1641 yield chunk
1651 yield chunk
1642 self.ui.progress(_('bundling'), None)
1652 self.ui.progress(_('bundling'), None)
1643
1653
1644 count[0] = 0
1654 count[0] = 0
1645 for fname in sorted(changedfiles):
1655 for fname in sorted(changedfiles):
1646 filerevlog = self.file(fname)
1656 filerevlog = self.file(fname)
1647 if not len(filerevlog):
1657 if not len(filerevlog):
1648 raise util.Abort(_("empty or missing revlog for %s") % fname)
1658 raise util.Abort(_("empty or missing revlog for %s") % fname)
1649 fstate[0] = fname
1659 fstate[0] = fname
1650 first = True
1660 first = True
1651 for chunk in filerevlog.group(gennodelst(filerevlog), bundler):
1661 for chunk in filerevlog.group(gennodelst(filerevlog), bundler):
1652 if first:
1662 if first:
1653 if chunk == bundler.close():
1663 if chunk == bundler.close():
1654 break
1664 break
1655 count[0] += 1
1665 count[0] += 1
1656 yield bundler.fileheader(fname)
1666 yield bundler.fileheader(fname)
1657 first = False
1667 first = False
1658 yield chunk
1668 yield chunk
1659 yield bundler.close()
1669 yield bundler.close()
1660 self.ui.progress(_('bundling'), None)
1670 self.ui.progress(_('bundling'), None)
1661
1671
1662 if nodes:
1672 if nodes:
1663 self.hook('outgoing', node=hex(nodes[0]), source=source)
1673 self.hook('outgoing', node=hex(nodes[0]), source=source)
1664
1674
1665 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1675 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1666
1676
1667 def addchangegroup(self, source, srctype, url, emptyok=False, lock=None):
1677 def addchangegroup(self, source, srctype, url, emptyok=False, lock=None):
1668 """Add the changegroup returned by source.read() to this repo.
1678 """Add the changegroup returned by source.read() to this repo.
1669 srctype is a string like 'push', 'pull', or 'unbundle'. url is
1679 srctype is a string like 'push', 'pull', or 'unbundle'. url is
1670 the URL of the repo where this changegroup is coming from.
1680 the URL of the repo where this changegroup is coming from.
1671 If lock is not None, the function takes ownership of the lock
1681 If lock is not None, the function takes ownership of the lock
1672 and releases it after the changegroup is added.
1682 and releases it after the changegroup is added.
1673
1683
1674 Return an integer summarizing the change to this repo:
1684 Return an integer summarizing the change to this repo:
1675 - nothing changed or no source: 0
1685 - nothing changed or no source: 0
1676 - more heads than before: 1+added heads (2..n)
1686 - more heads than before: 1+added heads (2..n)
1677 - fewer heads than before: -1-removed heads (-2..-n)
1687 - fewer heads than before: -1-removed heads (-2..-n)
1678 - number of heads stays the same: 1
1688 - number of heads stays the same: 1
1679 """
1689 """
1680 def csmap(x):
1690 def csmap(x):
1681 self.ui.debug("add changeset %s\n" % short(x))
1691 self.ui.debug("add changeset %s\n" % short(x))
1682 return len(cl)
1692 return len(cl)
1683
1693
1684 def revmap(x):
1694 def revmap(x):
1685 return cl.rev(x)
1695 return cl.rev(x)
1686
1696
1687 if not source:
1697 if not source:
1688 return 0
1698 return 0
1689
1699
1690 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1700 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1691
1701
1692 changesets = files = revisions = 0
1702 changesets = files = revisions = 0
1693 efiles = set()
1703 efiles = set()
1694
1704
1695 # write changelog data to temp files so concurrent readers will not see
1705 # write changelog data to temp files so concurrent readers will not see
1696 # inconsistent view
1706 # inconsistent view
1697 cl = self.changelog
1707 cl = self.changelog
1698 cl.delayupdate()
1708 cl.delayupdate()
1699 oldheads = cl.heads()
1709 oldheads = cl.heads()
1700
1710
1701 tr = self.transaction("\n".join([srctype, util.hidepassword(url)]))
1711 tr = self.transaction("\n".join([srctype, util.hidepassword(url)]))
1702 try:
1712 try:
1703 trp = weakref.proxy(tr)
1713 trp = weakref.proxy(tr)
1704 # pull off the changeset group
1714 # pull off the changeset group
1705 self.ui.status(_("adding changesets\n"))
1715 self.ui.status(_("adding changesets\n"))
1706 clstart = len(cl)
1716 clstart = len(cl)
1707 class prog(object):
1717 class prog(object):
1708 step = _('changesets')
1718 step = _('changesets')
1709 count = 1
1719 count = 1
1710 ui = self.ui
1720 ui = self.ui
1711 total = None
1721 total = None
1712 def __call__(self):
1722 def __call__(self):
1713 self.ui.progress(self.step, self.count, unit=_('chunks'),
1723 self.ui.progress(self.step, self.count, unit=_('chunks'),
1714 total=self.total)
1724 total=self.total)
1715 self.count += 1
1725 self.count += 1
1716 pr = prog()
1726 pr = prog()
1717 source.callback = pr
1727 source.callback = pr
1718
1728
1719 source.changelogheader()
1729 source.changelogheader()
1720 if (cl.addgroup(source, csmap, trp) is None
1730 if (cl.addgroup(source, csmap, trp) is None
1721 and not emptyok):
1731 and not emptyok):
1722 raise util.Abort(_("received changelog group is empty"))
1732 raise util.Abort(_("received changelog group is empty"))
1723 clend = len(cl)
1733 clend = len(cl)
1724 changesets = clend - clstart
1734 changesets = clend - clstart
1725 for c in xrange(clstart, clend):
1735 for c in xrange(clstart, clend):
1726 efiles.update(self[c].files())
1736 efiles.update(self[c].files())
1727 efiles = len(efiles)
1737 efiles = len(efiles)
1728 self.ui.progress(_('changesets'), None)
1738 self.ui.progress(_('changesets'), None)
1729
1739
1730 # pull off the manifest group
1740 # pull off the manifest group
1731 self.ui.status(_("adding manifests\n"))
1741 self.ui.status(_("adding manifests\n"))
1732 pr.step = _('manifests')
1742 pr.step = _('manifests')
1733 pr.count = 1
1743 pr.count = 1
1734 pr.total = changesets # manifests <= changesets
1744 pr.total = changesets # manifests <= changesets
1735 # no need to check for empty manifest group here:
1745 # no need to check for empty manifest group here:
1736 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1746 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1737 # no new manifest will be created and the manifest group will
1747 # no new manifest will be created and the manifest group will
1738 # be empty during the pull
1748 # be empty during the pull
1739 source.manifestheader()
1749 source.manifestheader()
1740 self.manifest.addgroup(source, revmap, trp)
1750 self.manifest.addgroup(source, revmap, trp)
1741 self.ui.progress(_('manifests'), None)
1751 self.ui.progress(_('manifests'), None)
1742
1752
1743 needfiles = {}
1753 needfiles = {}
1744 if self.ui.configbool('server', 'validate', default=False):
1754 if self.ui.configbool('server', 'validate', default=False):
1745 # validate incoming csets have their manifests
1755 # validate incoming csets have their manifests
1746 for cset in xrange(clstart, clend):
1756 for cset in xrange(clstart, clend):
1747 mfest = self.changelog.read(self.changelog.node(cset))[0]
1757 mfest = self.changelog.read(self.changelog.node(cset))[0]
1748 mfest = self.manifest.readdelta(mfest)
1758 mfest = self.manifest.readdelta(mfest)
1749 # store file nodes we must see
1759 # store file nodes we must see
1750 for f, n in mfest.iteritems():
1760 for f, n in mfest.iteritems():
1751 needfiles.setdefault(f, set()).add(n)
1761 needfiles.setdefault(f, set()).add(n)
1752
1762
1753 # process the files
1763 # process the files
1754 self.ui.status(_("adding file changes\n"))
1764 self.ui.status(_("adding file changes\n"))
1755 pr.step = 'files'
1765 pr.step = 'files'
1756 pr.count = 1
1766 pr.count = 1
1757 pr.total = efiles
1767 pr.total = efiles
1758 source.callback = None
1768 source.callback = None
1759
1769
1760 while 1:
1770 while 1:
1761 chunkdata = source.filelogheader()
1771 chunkdata = source.filelogheader()
1762 if not chunkdata:
1772 if not chunkdata:
1763 break
1773 break
1764 f = chunkdata["filename"]
1774 f = chunkdata["filename"]
1765 self.ui.debug("adding %s revisions\n" % f)
1775 self.ui.debug("adding %s revisions\n" % f)
1766 pr()
1776 pr()
1767 fl = self.file(f)
1777 fl = self.file(f)
1768 o = len(fl)
1778 o = len(fl)
1769 if fl.addgroup(source, revmap, trp) is None:
1779 if fl.addgroup(source, revmap, trp) is None:
1770 raise util.Abort(_("received file revlog group is empty"))
1780 raise util.Abort(_("received file revlog group is empty"))
1771 revisions += len(fl) - o
1781 revisions += len(fl) - o
1772 files += 1
1782 files += 1
1773 if f in needfiles:
1783 if f in needfiles:
1774 needs = needfiles[f]
1784 needs = needfiles[f]
1775 for new in xrange(o, len(fl)):
1785 for new in xrange(o, len(fl)):
1776 n = fl.node(new)
1786 n = fl.node(new)
1777 if n in needs:
1787 if n in needs:
1778 needs.remove(n)
1788 needs.remove(n)
1779 if not needs:
1789 if not needs:
1780 del needfiles[f]
1790 del needfiles[f]
1781 self.ui.progress(_('files'), None)
1791 self.ui.progress(_('files'), None)
1782
1792
1783 for f, needs in needfiles.iteritems():
1793 for f, needs in needfiles.iteritems():
1784 fl = self.file(f)
1794 fl = self.file(f)
1785 for n in needs:
1795 for n in needs:
1786 try:
1796 try:
1787 fl.rev(n)
1797 fl.rev(n)
1788 except error.LookupError:
1798 except error.LookupError:
1789 raise util.Abort(
1799 raise util.Abort(
1790 _('missing file data for %s:%s - run hg verify') %
1800 _('missing file data for %s:%s - run hg verify') %
1791 (f, hex(n)))
1801 (f, hex(n)))
1792
1802
1793 dh = 0
1803 dh = 0
1794 if oldheads:
1804 if oldheads:
1795 heads = cl.heads()
1805 heads = cl.heads()
1796 dh = len(heads) - len(oldheads)
1806 dh = len(heads) - len(oldheads)
1797 for h in heads:
1807 for h in heads:
1798 if h not in oldheads and 'close' in self[h].extra():
1808 if h not in oldheads and 'close' in self[h].extra():
1799 dh -= 1
1809 dh -= 1
1800 htext = ""
1810 htext = ""
1801 if dh:
1811 if dh:
1802 htext = _(" (%+d heads)") % dh
1812 htext = _(" (%+d heads)") % dh
1803
1813
1804 self.ui.status(_("added %d changesets"
1814 self.ui.status(_("added %d changesets"
1805 " with %d changes to %d files%s\n")
1815 " with %d changes to %d files%s\n")
1806 % (changesets, revisions, files, htext))
1816 % (changesets, revisions, files, htext))
1807
1817
1808 if changesets > 0:
1818 if changesets > 0:
1809 p = lambda: cl.writepending() and self.root or ""
1819 p = lambda: cl.writepending() and self.root or ""
1810 self.hook('pretxnchangegroup', throw=True,
1820 self.hook('pretxnchangegroup', throw=True,
1811 node=hex(cl.node(clstart)), source=srctype,
1821 node=hex(cl.node(clstart)), source=srctype,
1812 url=url, pending=p)
1822 url=url, pending=p)
1813
1823
1814 # make changelog see real files again
1824 # make changelog see real files again
1815 cl.finalize(trp)
1825 cl.finalize(trp)
1816
1826
1817 tr.close()
1827 tr.close()
1818 finally:
1828 finally:
1819 tr.release()
1829 tr.release()
1820 if lock:
1830 if lock:
1821 lock.release()
1831 lock.release()
1822
1832
1823 if changesets > 0:
1833 if changesets > 0:
1824 # forcefully update the on-disk branch cache
1834 # forcefully update the on-disk branch cache
1825 self.ui.debug("updating the branch cache\n")
1835 self.ui.debug("updating the branch cache\n")
1826 self.updatebranchcache()
1836 self.updatebranchcache()
1827 self.hook("changegroup", node=hex(cl.node(clstart)),
1837 self.hook("changegroup", node=hex(cl.node(clstart)),
1828 source=srctype, url=url)
1838 source=srctype, url=url)
1829
1839
1830 for i in xrange(clstart, clend):
1840 for i in xrange(clstart, clend):
1831 self.hook("incoming", node=hex(cl.node(i)),
1841 self.hook("incoming", node=hex(cl.node(i)),
1832 source=srctype, url=url)
1842 source=srctype, url=url)
1833
1843
1834 # never return 0 here:
1844 # never return 0 here:
1835 if dh < 0:
1845 if dh < 0:
1836 return dh - 1
1846 return dh - 1
1837 else:
1847 else:
1838 return dh + 1
1848 return dh + 1
1839
1849
1840 def stream_in(self, remote, requirements):
1850 def stream_in(self, remote, requirements):
1841 lock = self.lock()
1851 lock = self.lock()
1842 try:
1852 try:
1843 fp = remote.stream_out()
1853 fp = remote.stream_out()
1844 l = fp.readline()
1854 l = fp.readline()
1845 try:
1855 try:
1846 resp = int(l)
1856 resp = int(l)
1847 except ValueError:
1857 except ValueError:
1848 raise error.ResponseError(
1858 raise error.ResponseError(
1849 _('Unexpected response from remote server:'), l)
1859 _('Unexpected response from remote server:'), l)
1850 if resp == 1:
1860 if resp == 1:
1851 raise util.Abort(_('operation forbidden by server'))
1861 raise util.Abort(_('operation forbidden by server'))
1852 elif resp == 2:
1862 elif resp == 2:
1853 raise util.Abort(_('locking the remote repository failed'))
1863 raise util.Abort(_('locking the remote repository failed'))
1854 elif resp != 0:
1864 elif resp != 0:
1855 raise util.Abort(_('the server sent an unknown error code'))
1865 raise util.Abort(_('the server sent an unknown error code'))
1856 self.ui.status(_('streaming all changes\n'))
1866 self.ui.status(_('streaming all changes\n'))
1857 l = fp.readline()
1867 l = fp.readline()
1858 try:
1868 try:
1859 total_files, total_bytes = map(int, l.split(' ', 1))
1869 total_files, total_bytes = map(int, l.split(' ', 1))
1860 except (ValueError, TypeError):
1870 except (ValueError, TypeError):
1861 raise error.ResponseError(
1871 raise error.ResponseError(
1862 _('Unexpected response from remote server:'), l)
1872 _('Unexpected response from remote server:'), l)
1863 self.ui.status(_('%d files to transfer, %s of data\n') %
1873 self.ui.status(_('%d files to transfer, %s of data\n') %
1864 (total_files, util.bytecount(total_bytes)))
1874 (total_files, util.bytecount(total_bytes)))
1865 start = time.time()
1875 start = time.time()
1866 for i in xrange(total_files):
1876 for i in xrange(total_files):
1867 # XXX doesn't support '\n' or '\r' in filenames
1877 # XXX doesn't support '\n' or '\r' in filenames
1868 l = fp.readline()
1878 l = fp.readline()
1869 try:
1879 try:
1870 name, size = l.split('\0', 1)
1880 name, size = l.split('\0', 1)
1871 size = int(size)
1881 size = int(size)
1872 except (ValueError, TypeError):
1882 except (ValueError, TypeError):
1873 raise error.ResponseError(
1883 raise error.ResponseError(
1874 _('Unexpected response from remote server:'), l)
1884 _('Unexpected response from remote server:'), l)
1875 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1885 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1876 # for backwards compat, name was partially encoded
1886 # for backwards compat, name was partially encoded
1877 ofp = self.sopener(store.decodedir(name), 'w')
1887 ofp = self.sopener(store.decodedir(name), 'w')
1878 for chunk in util.filechunkiter(fp, limit=size):
1888 for chunk in util.filechunkiter(fp, limit=size):
1879 ofp.write(chunk)
1889 ofp.write(chunk)
1880 ofp.close()
1890 ofp.close()
1881 elapsed = time.time() - start
1891 elapsed = time.time() - start
1882 if elapsed <= 0:
1892 if elapsed <= 0:
1883 elapsed = 0.001
1893 elapsed = 0.001
1884 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1894 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1885 (util.bytecount(total_bytes), elapsed,
1895 (util.bytecount(total_bytes), elapsed,
1886 util.bytecount(total_bytes / elapsed)))
1896 util.bytecount(total_bytes / elapsed)))
1887
1897
1888 # new requirements = old non-format requirements + new format-related
1898 # new requirements = old non-format requirements + new format-related
1889 # requirements from the streamed-in repository
1899 # requirements from the streamed-in repository
1890 requirements.update(set(self.requirements) - self.supportedformats)
1900 requirements.update(set(self.requirements) - self.supportedformats)
1891 self._applyrequirements(requirements)
1901 self._applyrequirements(requirements)
1892 self._writerequirements()
1902 self._writerequirements()
1893
1903
1894 self.invalidate()
1904 self.invalidate()
1895 return len(self.heads()) + 1
1905 return len(self.heads()) + 1
1896 finally:
1906 finally:
1897 lock.release()
1907 lock.release()
1898
1908
1899 def clone(self, remote, heads=[], stream=False):
1909 def clone(self, remote, heads=[], stream=False):
1900 '''clone remote repository.
1910 '''clone remote repository.
1901
1911
1902 keyword arguments:
1912 keyword arguments:
1903 heads: list of revs to clone (forces use of pull)
1913 heads: list of revs to clone (forces use of pull)
1904 stream: use streaming clone if possible'''
1914 stream: use streaming clone if possible'''
1905
1915
1906 # now, all clients that can request uncompressed clones can
1916 # now, all clients that can request uncompressed clones can
1907 # read repo formats supported by all servers that can serve
1917 # read repo formats supported by all servers that can serve
1908 # them.
1918 # them.
1909
1919
1910 # if revlog format changes, client will have to check version
1920 # if revlog format changes, client will have to check version
1911 # and format flags on "stream" capability, and use
1921 # and format flags on "stream" capability, and use
1912 # uncompressed only if compatible.
1922 # uncompressed only if compatible.
1913
1923
1914 if stream and not heads:
1924 if stream and not heads:
1915 # 'stream' means remote revlog format is revlogv1 only
1925 # 'stream' means remote revlog format is revlogv1 only
1916 if remote.capable('stream'):
1926 if remote.capable('stream'):
1917 return self.stream_in(remote, set(('revlogv1',)))
1927 return self.stream_in(remote, set(('revlogv1',)))
1918 # otherwise, 'streamreqs' contains the remote revlog format
1928 # otherwise, 'streamreqs' contains the remote revlog format
1919 streamreqs = remote.capable('streamreqs')
1929 streamreqs = remote.capable('streamreqs')
1920 if streamreqs:
1930 if streamreqs:
1921 streamreqs = set(streamreqs.split(','))
1931 streamreqs = set(streamreqs.split(','))
1922 # if we support it, stream in and adjust our requirements
1932 # if we support it, stream in and adjust our requirements
1923 if not streamreqs - self.supportedformats:
1933 if not streamreqs - self.supportedformats:
1924 return self.stream_in(remote, streamreqs)
1934 return self.stream_in(remote, streamreqs)
1925 return self.pull(remote, heads)
1935 return self.pull(remote, heads)
1926
1936
1927 def pushkey(self, namespace, key, old, new):
1937 def pushkey(self, namespace, key, old, new):
1928 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
1938 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
1929 old=old, new=new)
1939 old=old, new=new)
1930 ret = pushkey.push(self, namespace, key, old, new)
1940 ret = pushkey.push(self, namespace, key, old, new)
1931 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
1941 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
1932 ret=ret)
1942 ret=ret)
1933 return ret
1943 return ret
1934
1944
1935 def listkeys(self, namespace):
1945 def listkeys(self, namespace):
1936 self.hook('prelistkeys', throw=True, namespace=namespace)
1946 self.hook('prelistkeys', throw=True, namespace=namespace)
1937 values = pushkey.list(self, namespace)
1947 values = pushkey.list(self, namespace)
1938 self.hook('listkeys', namespace=namespace, values=values)
1948 self.hook('listkeys', namespace=namespace, values=values)
1939 return values
1949 return values
1940
1950
1941 def debugwireargs(self, one, two, three=None, four=None, five=None):
1951 def debugwireargs(self, one, two, three=None, four=None, five=None):
1942 '''used to test argument passing over the wire'''
1952 '''used to test argument passing over the wire'''
1943 return "%s %s %s %s %s" % (one, two, three, four, five)
1953 return "%s %s %s %s %s" % (one, two, three, four, five)
1944
1954
1945 # used to avoid circular references so destructors work
1955 # used to avoid circular references so destructors work
1946 def aftertrans(files):
1956 def aftertrans(files):
1947 renamefiles = [tuple(t) for t in files]
1957 renamefiles = [tuple(t) for t in files]
1948 def a():
1958 def a():
1949 for src, dest in renamefiles:
1959 for src, dest in renamefiles:
1950 util.rename(src, dest)
1960 util.rename(src, dest)
1951 return a
1961 return a
1952
1962
1963 def undoname(fn):
1964 base, name = os.path.split(fn)
1965 assert name.startswith('journal')
1966 return os.path.join(base, name.replace('journal', 'undo', 1))
1967
1953 def instance(ui, path, create):
1968 def instance(ui, path, create):
1954 return localrepository(ui, util.localpath(path), create)
1969 return localrepository(ui, util.localpath(path), create)
1955
1970
1956 def islocal(path):
1971 def islocal(path):
1957 return True
1972 return True
@@ -1,327 +1,344 b''
1 $ hg init
1 $ hg init
2
2
3 no bookmarks
3 no bookmarks
4
4
5 $ hg bookmarks
5 $ hg bookmarks
6 no bookmarks set
6 no bookmarks set
7
7
8 bookmark rev -1
8 bookmark rev -1
9
9
10 $ hg bookmark X
10 $ hg bookmark X
11
11
12 list bookmarks
12 list bookmarks
13
13
14 $ hg bookmarks
14 $ hg bookmarks
15 * X -1:000000000000
15 * X -1:000000000000
16
16
17 list bookmarks with color
17 list bookmarks with color
18
18
19 $ hg --config extensions.color= --config color.mode=ansi \
19 $ hg --config extensions.color= --config color.mode=ansi \
20 > bookmarks --color=always
20 > bookmarks --color=always
21 \x1b[0;32m * X -1:000000000000\x1b[0m (esc)
21 \x1b[0;32m * X -1:000000000000\x1b[0m (esc)
22
22
23 $ echo a > a
23 $ echo a > a
24 $ hg add a
24 $ hg add a
25 $ hg commit -m 0
25 $ hg commit -m 0
26
26
27 bookmark X moved to rev 0
27 bookmark X moved to rev 0
28
28
29 $ hg bookmarks
29 $ hg bookmarks
30 * X 0:f7b1eb17ad24
30 * X 0:f7b1eb17ad24
31
31
32 look up bookmark
32 look up bookmark
33
33
34 $ hg log -r X
34 $ hg log -r X
35 changeset: 0:f7b1eb17ad24
35 changeset: 0:f7b1eb17ad24
36 bookmark: X
36 bookmark: X
37 tag: tip
37 tag: tip
38 user: test
38 user: test
39 date: Thu Jan 01 00:00:00 1970 +0000
39 date: Thu Jan 01 00:00:00 1970 +0000
40 summary: 0
40 summary: 0
41
41
42
42
43 second bookmark for rev 0
43 second bookmark for rev 0
44
44
45 $ hg bookmark X2
45 $ hg bookmark X2
46
46
47 bookmark rev -1 again
47 bookmark rev -1 again
48
48
49 $ hg bookmark -r null Y
49 $ hg bookmark -r null Y
50
50
51 list bookmarks
51 list bookmarks
52
52
53 $ hg bookmarks
53 $ hg bookmarks
54 X 0:f7b1eb17ad24
54 X 0:f7b1eb17ad24
55 * X2 0:f7b1eb17ad24
55 * X2 0:f7b1eb17ad24
56 Y -1:000000000000
56 Y -1:000000000000
57
57
58 $ echo b > b
58 $ echo b > b
59 $ hg add b
59 $ hg add b
60 $ hg commit -m 1
60 $ hg commit -m 1
61
61
62 bookmarks revset
62 bookmarks revset
63
63
64 $ hg log -r 'bookmark()'
64 $ hg log -r 'bookmark()'
65 changeset: 0:f7b1eb17ad24
65 changeset: 0:f7b1eb17ad24
66 bookmark: X
66 bookmark: X
67 user: test
67 user: test
68 date: Thu Jan 01 00:00:00 1970 +0000
68 date: Thu Jan 01 00:00:00 1970 +0000
69 summary: 0
69 summary: 0
70
70
71 changeset: 1:925d80f479bb
71 changeset: 1:925d80f479bb
72 bookmark: X2
72 bookmark: X2
73 tag: tip
73 tag: tip
74 user: test
74 user: test
75 date: Thu Jan 01 00:00:00 1970 +0000
75 date: Thu Jan 01 00:00:00 1970 +0000
76 summary: 1
76 summary: 1
77
77
78 $ hg log -r 'bookmark(Y)'
78 $ hg log -r 'bookmark(Y)'
79 $ hg log -r 'bookmark(X2)'
79 $ hg log -r 'bookmark(X2)'
80 changeset: 1:925d80f479bb
80 changeset: 1:925d80f479bb
81 bookmark: X2
81 bookmark: X2
82 tag: tip
82 tag: tip
83 user: test
83 user: test
84 date: Thu Jan 01 00:00:00 1970 +0000
84 date: Thu Jan 01 00:00:00 1970 +0000
85 summary: 1
85 summary: 1
86
86
87 $ hg log -r 'bookmark(unknown)'
87 $ hg log -r 'bookmark(unknown)'
88 abort: bookmark 'unknown' does not exist
88 abort: bookmark 'unknown' does not exist
89 [255]
89 [255]
90
90
91 $ hg help revsets | grep 'bookmark('
91 $ hg help revsets | grep 'bookmark('
92 "bookmark([name])"
92 "bookmark([name])"
93
93
94 bookmarks X and X2 moved to rev 1, Y at rev -1
94 bookmarks X and X2 moved to rev 1, Y at rev -1
95
95
96 $ hg bookmarks
96 $ hg bookmarks
97 X 0:f7b1eb17ad24
97 X 0:f7b1eb17ad24
98 * X2 1:925d80f479bb
98 * X2 1:925d80f479bb
99 Y -1:000000000000
99 Y -1:000000000000
100
100
101 bookmark rev 0 again
101 bookmark rev 0 again
102
102
103 $ hg bookmark -r 0 Z
103 $ hg bookmark -r 0 Z
104
104
105 $ hg update X
105 $ hg update X
106 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
106 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
107 $ echo c > c
107 $ echo c > c
108 $ hg add c
108 $ hg add c
109 $ hg commit -m 2
109 $ hg commit -m 2
110 created new head
110 created new head
111
111
112 bookmarks X moved to rev 2, Y at rev -1, Z at rev 0
112 bookmarks X moved to rev 2, Y at rev -1, Z at rev 0
113
113
114 $ hg bookmarks
114 $ hg bookmarks
115 * X 2:db815d6d32e6
115 * X 2:db815d6d32e6
116 X2 1:925d80f479bb
116 X2 1:925d80f479bb
117 Y -1:000000000000
117 Y -1:000000000000
118 Z 0:f7b1eb17ad24
118 Z 0:f7b1eb17ad24
119
119
120 rename nonexistent bookmark
120 rename nonexistent bookmark
121
121
122 $ hg bookmark -m A B
122 $ hg bookmark -m A B
123 abort: bookmark 'A' does not exist
123 abort: bookmark 'A' does not exist
124 [255]
124 [255]
125
125
126 rename to existent bookmark
126 rename to existent bookmark
127
127
128 $ hg bookmark -m X Y
128 $ hg bookmark -m X Y
129 abort: bookmark 'Y' already exists (use -f to force)
129 abort: bookmark 'Y' already exists (use -f to force)
130 [255]
130 [255]
131
131
132 force rename to existent bookmark
132 force rename to existent bookmark
133
133
134 $ hg bookmark -f -m X Y
134 $ hg bookmark -f -m X Y
135
135
136 list bookmarks
136 list bookmarks
137
137
138 $ hg bookmark
138 $ hg bookmark
139 X2 1:925d80f479bb
139 X2 1:925d80f479bb
140 * Y 2:db815d6d32e6
140 * Y 2:db815d6d32e6
141 Z 0:f7b1eb17ad24
141 Z 0:f7b1eb17ad24
142
142
143 rename without new name
143 rename without new name
144
144
145 $ hg bookmark -m Y
145 $ hg bookmark -m Y
146 abort: new bookmark name required
146 abort: new bookmark name required
147 [255]
147 [255]
148
148
149 delete without name
149 delete without name
150
150
151 $ hg bookmark -d
151 $ hg bookmark -d
152 abort: bookmark name required
152 abort: bookmark name required
153 [255]
153 [255]
154
154
155 delete nonexistent bookmark
155 delete nonexistent bookmark
156
156
157 $ hg bookmark -d A
157 $ hg bookmark -d A
158 abort: bookmark 'A' does not exist
158 abort: bookmark 'A' does not exist
159 [255]
159 [255]
160
160
161 bookmark name with spaces should be stripped
161 bookmark name with spaces should be stripped
162
162
163 $ hg bookmark ' x y '
163 $ hg bookmark ' x y '
164
164
165 list bookmarks
165 list bookmarks
166
166
167 $ hg bookmarks
167 $ hg bookmarks
168 X2 1:925d80f479bb
168 X2 1:925d80f479bb
169 Y 2:db815d6d32e6
169 Y 2:db815d6d32e6
170 Z 0:f7b1eb17ad24
170 Z 0:f7b1eb17ad24
171 * x y 2:db815d6d32e6
171 * x y 2:db815d6d32e6
172
172
173 look up stripped bookmark name
173 look up stripped bookmark name
174
174
175 $ hg log -r '"x y"'
175 $ hg log -r '"x y"'
176 changeset: 2:db815d6d32e6
176 changeset: 2:db815d6d32e6
177 bookmark: Y
177 bookmark: Y
178 bookmark: x y
178 bookmark: x y
179 tag: tip
179 tag: tip
180 parent: 0:f7b1eb17ad24
180 parent: 0:f7b1eb17ad24
181 user: test
181 user: test
182 date: Thu Jan 01 00:00:00 1970 +0000
182 date: Thu Jan 01 00:00:00 1970 +0000
183 summary: 2
183 summary: 2
184
184
185
185
186 reject bookmark name with newline
186 reject bookmark name with newline
187
187
188 $ hg bookmark '
188 $ hg bookmark '
189 > '
189 > '
190 abort: bookmark name cannot contain newlines
190 abort: bookmark name cannot contain newlines
191 [255]
191 [255]
192
192
193 bookmark with existing name
193 bookmark with existing name
194
194
195 $ hg bookmark Z
195 $ hg bookmark Z
196 abort: bookmark 'Z' already exists (use -f to force)
196 abort: bookmark 'Z' already exists (use -f to force)
197 [255]
197 [255]
198
198
199 force bookmark with existing name
199 force bookmark with existing name
200
200
201 $ hg bookmark -f Z
201 $ hg bookmark -f Z
202
202
203 list bookmarks
203 list bookmarks
204
204
205 $ hg bookmark
205 $ hg bookmark
206 X2 1:925d80f479bb
206 X2 1:925d80f479bb
207 Y 2:db815d6d32e6
207 Y 2:db815d6d32e6
208 * Z 2:db815d6d32e6
208 * Z 2:db815d6d32e6
209 x y 2:db815d6d32e6
209 x y 2:db815d6d32e6
210
210
211 revision but no bookmark name
211 revision but no bookmark name
212
212
213 $ hg bookmark -r .
213 $ hg bookmark -r .
214 abort: bookmark name required
214 abort: bookmark name required
215 [255]
215 [255]
216
216
217 bookmark name with whitespace only
217 bookmark name with whitespace only
218
218
219 $ hg bookmark ' '
219 $ hg bookmark ' '
220 abort: bookmark names cannot consist entirely of whitespace
220 abort: bookmark names cannot consist entirely of whitespace
221 [255]
221 [255]
222
222
223 invalid bookmark
223 invalid bookmark
224
224
225 $ hg bookmark 'foo:bar'
225 $ hg bookmark 'foo:bar'
226 abort: bookmark 'foo:bar' contains illegal character
226 abort: bookmark 'foo:bar' contains illegal character
227 [255]
227 [255]
228
228
229 the bookmark extension should be ignored now that it is part of core
229 the bookmark extension should be ignored now that it is part of core
230
230
231 $ echo "[extensions]" >> $HGRCPATH
231 $ echo "[extensions]" >> $HGRCPATH
232 $ echo "bookmarks=" >> $HGRCPATH
232 $ echo "bookmarks=" >> $HGRCPATH
233 $ hg bookmarks
233 $ hg bookmarks
234 X2 1:925d80f479bb
234 X2 1:925d80f479bb
235 Y 2:db815d6d32e6
235 Y 2:db815d6d32e6
236 * Z 2:db815d6d32e6
236 * Z 2:db815d6d32e6
237 x y 2:db815d6d32e6
237 x y 2:db815d6d32e6
238
238 test summary
239 test summary
239
240
240 $ hg summary
241 $ hg summary
241 parent: 2:db815d6d32e6 tip Y Z x y
242 parent: 2:db815d6d32e6 tip Y Z x y
242 2
243 2
243 branch: default
244 branch: default
244 commit: (clean)
245 commit: (clean)
245 update: 1 new changesets, 2 branch heads (merge)
246 update: 1 new changesets, 2 branch heads (merge)
246
247
247 test id
248 test id
248
249
249 $ hg id
250 $ hg id
250 db815d6d32e6 tip Y/Z/x y
251 db815d6d32e6 tip Y/Z/x y
251
252
253 test rollback
254
255 $ echo foo > f1
256 $ hg ci -Amr
257 adding f1
258 $ hg bookmark -f Y -r 1
259 $ hg bookmark -f Z -r 1
260 $ hg rollback
261 repository tip rolled back to revision 2 (undo commit)
262 working directory now based on revision 2
263 $ hg bookmarks
264 X2 1:925d80f479bb
265 Y 2:db815d6d32e6
266 * Z 2:db815d6d32e6
267 x y 2:db815d6d32e6
268
252 test clone
269 test clone
253
270
254 $ hg bookmarks
271 $ hg bookmarks
255 X2 1:925d80f479bb
272 X2 1:925d80f479bb
256 Y 2:db815d6d32e6
273 Y 2:db815d6d32e6
257 * Z 2:db815d6d32e6
274 * Z 2:db815d6d32e6
258 x y 2:db815d6d32e6
275 x y 2:db815d6d32e6
259 $ hg clone . cloned-bookmarks
276 $ hg clone . cloned-bookmarks
260 updating to branch default
277 updating to branch default
261 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
278 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
262 $ hg -R cloned-bookmarks bookmarks
279 $ hg -R cloned-bookmarks bookmarks
263 X2 1:925d80f479bb
280 X2 1:925d80f479bb
264 Y 2:db815d6d32e6
281 Y 2:db815d6d32e6
265 Z 2:db815d6d32e6
282 Z 2:db815d6d32e6
266 x y 2:db815d6d32e6
283 x y 2:db815d6d32e6
267
284
268 test clone with pull protocol
285 test clone with pull protocol
269
286
270 $ hg clone --pull . cloned-bookmarks-pull
287 $ hg clone --pull . cloned-bookmarks-pull
271 requesting all changes
288 requesting all changes
272 adding changesets
289 adding changesets
273 adding manifests
290 adding manifests
274 adding file changes
291 adding file changes
275 added 3 changesets with 3 changes to 3 files (+1 heads)
292 added 3 changesets with 3 changes to 3 files (+1 heads)
276 updating to branch default
293 updating to branch default
277 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
294 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
278 $ hg -R cloned-bookmarks-pull bookmarks
295 $ hg -R cloned-bookmarks-pull bookmarks
279 X2 1:925d80f479bb
296 X2 1:925d80f479bb
280 Y 2:db815d6d32e6
297 Y 2:db815d6d32e6
281 Z 2:db815d6d32e6
298 Z 2:db815d6d32e6
282 x y 2:db815d6d32e6
299 x y 2:db815d6d32e6
283
300
284 test clone with a specific revision
301 test clone with a specific revision
285
302
286 $ hg clone -r 925d80 . cloned-bookmarks-rev
303 $ hg clone -r 925d80 . cloned-bookmarks-rev
287 adding changesets
304 adding changesets
288 adding manifests
305 adding manifests
289 adding file changes
306 adding file changes
290 added 2 changesets with 2 changes to 2 files
307 added 2 changesets with 2 changes to 2 files
291 updating to branch default
308 updating to branch default
292 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
309 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
293 $ hg -R cloned-bookmarks-rev bookmarks
310 $ hg -R cloned-bookmarks-rev bookmarks
294 X2 1:925d80f479bb
311 X2 1:925d80f479bb
295
312
296 create bundle with two heads
313 create bundle with two heads
297
314
298 $ hg clone . tobundle
315 $ hg clone . tobundle
299 updating to branch default
316 updating to branch default
300 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
317 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
301 $ echo x > tobundle/x
318 $ echo x > tobundle/x
302 $ hg -R tobundle add tobundle/x
319 $ hg -R tobundle add tobundle/x
303 $ hg -R tobundle commit -m'x'
320 $ hg -R tobundle commit -m'x'
304 $ hg -R tobundle update -r -2
321 $ hg -R tobundle update -r -2
305 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
322 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
306 $ echo y > tobundle/y
323 $ echo y > tobundle/y
307 $ hg -R tobundle branch test
324 $ hg -R tobundle branch test
308 marked working directory as branch test
325 marked working directory as branch test
309 $ hg -R tobundle add tobundle/y
326 $ hg -R tobundle add tobundle/y
310 $ hg -R tobundle commit -m'y'
327 $ hg -R tobundle commit -m'y'
311 $ hg -R tobundle bundle tobundle.hg
328 $ hg -R tobundle bundle tobundle.hg
312 searching for changes
329 searching for changes
313 2 changesets found
330 2 changesets found
314 $ hg unbundle tobundle.hg
331 $ hg unbundle tobundle.hg
315 adding changesets
332 adding changesets
316 adding manifests
333 adding manifests
317 adding file changes
334 adding file changes
318 added 2 changesets with 2 changes to 2 files (+1 heads)
335 added 2 changesets with 2 changes to 2 files (+1 heads)
319 (run 'hg heads' to see heads, 'hg merge' to merge)
336 (run 'hg heads' to see heads, 'hg merge' to merge)
320 $ hg update
337 $ hg update
321 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
338 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
322 $ hg bookmarks
339 $ hg bookmarks
323 X2 1:925d80f479bb
340 X2 1:925d80f479bb
324 Y 2:db815d6d32e6
341 Y 2:db815d6d32e6
325 * Z 3:125c9a1d6df6
342 * Z 3:125c9a1d6df6
326 x y 2:db815d6d32e6
343 x y 2:db815d6d32e6
327
344
@@ -1,109 +1,111 b''
1 Init repo1:
1 Init repo1:
2
2
3 $ hg init repo1
3 $ hg init repo1
4 $ cd repo1
4 $ cd repo1
5 $ echo "some text" > a
5 $ echo "some text" > a
6 $ hg add
6 $ hg add
7 adding a
7 adding a
8 $ hg ci -m first
8 $ hg ci -m first
9 $ cat .hg/store/fncache | sort
9 $ cat .hg/store/fncache | sort
10 data/a.i
10 data/a.i
11
11
12 Testing a.i/b:
12 Testing a.i/b:
13
13
14 $ mkdir a.i
14 $ mkdir a.i
15 $ echo "some other text" > a.i/b
15 $ echo "some other text" > a.i/b
16 $ hg add
16 $ hg add
17 adding a.i/b
17 adding a.i/b
18 $ hg ci -m second
18 $ hg ci -m second
19 $ cat .hg/store/fncache | sort
19 $ cat .hg/store/fncache | sort
20 data/a.i
20 data/a.i
21 data/a.i.hg/b.i
21 data/a.i.hg/b.i
22
22
23 Testing a.i.hg/c:
23 Testing a.i.hg/c:
24
24
25 $ mkdir a.i.hg
25 $ mkdir a.i.hg
26 $ echo "yet another text" > a.i.hg/c
26 $ echo "yet another text" > a.i.hg/c
27 $ hg add
27 $ hg add
28 adding a.i.hg/c
28 adding a.i.hg/c
29 $ hg ci -m third
29 $ hg ci -m third
30 $ cat .hg/store/fncache | sort
30 $ cat .hg/store/fncache | sort
31 data/a.i
31 data/a.i
32 data/a.i.hg.hg/c.i
32 data/a.i.hg.hg/c.i
33 data/a.i.hg/b.i
33 data/a.i.hg/b.i
34
34
35 Testing verify:
35 Testing verify:
36
36
37 $ hg verify
37 $ hg verify
38 checking changesets
38 checking changesets
39 checking manifests
39 checking manifests
40 crosschecking files in changesets and manifests
40 crosschecking files in changesets and manifests
41 checking files
41 checking files
42 3 files, 3 changesets, 3 total revisions
42 3 files, 3 changesets, 3 total revisions
43
43
44 $ rm .hg/store/fncache
44 $ rm .hg/store/fncache
45
45
46 $ hg verify
46 $ hg verify
47 checking changesets
47 checking changesets
48 checking manifests
48 checking manifests
49 crosschecking files in changesets and manifests
49 crosschecking files in changesets and manifests
50 checking files
50 checking files
51 data/a.i@0: missing revlog!
51 data/a.i@0: missing revlog!
52 data/a.i.hg/c.i@2: missing revlog!
52 data/a.i.hg/c.i@2: missing revlog!
53 data/a.i/b.i@1: missing revlog!
53 data/a.i/b.i@1: missing revlog!
54 3 files, 3 changesets, 3 total revisions
54 3 files, 3 changesets, 3 total revisions
55 3 integrity errors encountered!
55 3 integrity errors encountered!
56 (first damaged changeset appears to be 0)
56 (first damaged changeset appears to be 0)
57 [1]
57 [1]
58 $ cd ..
58 $ cd ..
59
59
60 Non store repo:
60 Non store repo:
61
61
62 $ hg --config format.usestore=False init foo
62 $ hg --config format.usestore=False init foo
63 $ cd foo
63 $ cd foo
64 $ mkdir tst.d
64 $ mkdir tst.d
65 $ echo foo > tst.d/foo
65 $ echo foo > tst.d/foo
66 $ hg ci -Amfoo
66 $ hg ci -Amfoo
67 adding tst.d/foo
67 adding tst.d/foo
68 $ find .hg | sort
68 $ find .hg | sort
69 .hg
69 .hg
70 .hg/00changelog.i
70 .hg/00changelog.i
71 .hg/00manifest.i
71 .hg/00manifest.i
72 .hg/data
72 .hg/data
73 .hg/data/tst.d.hg
73 .hg/data/tst.d.hg
74 .hg/data/tst.d.hg/foo.i
74 .hg/data/tst.d.hg/foo.i
75 .hg/dirstate
75 .hg/dirstate
76 .hg/last-message.txt
76 .hg/last-message.txt
77 .hg/requires
77 .hg/requires
78 .hg/undo
78 .hg/undo
79 .hg/undo.bookmarks
79 .hg/undo.branch
80 .hg/undo.branch
80 .hg/undo.desc
81 .hg/undo.desc
81 .hg/undo.dirstate
82 .hg/undo.dirstate
82 $ cd ..
83 $ cd ..
83
84
84 Non fncache repo:
85 Non fncache repo:
85
86
86 $ hg --config format.usefncache=False init bar
87 $ hg --config format.usefncache=False init bar
87 $ cd bar
88 $ cd bar
88 $ mkdir tst.d
89 $ mkdir tst.d
89 $ echo foo > tst.d/Foo
90 $ echo foo > tst.d/Foo
90 $ hg ci -Amfoo
91 $ hg ci -Amfoo
91 adding tst.d/Foo
92 adding tst.d/Foo
92 $ find .hg | sort
93 $ find .hg | sort
93 .hg
94 .hg
94 .hg/00changelog.i
95 .hg/00changelog.i
95 .hg/dirstate
96 .hg/dirstate
96 .hg/last-message.txt
97 .hg/last-message.txt
97 .hg/requires
98 .hg/requires
98 .hg/store
99 .hg/store
99 .hg/store/00changelog.i
100 .hg/store/00changelog.i
100 .hg/store/00manifest.i
101 .hg/store/00manifest.i
101 .hg/store/data
102 .hg/store/data
102 .hg/store/data/tst.d.hg
103 .hg/store/data/tst.d.hg
103 .hg/store/data/tst.d.hg/_foo.i
104 .hg/store/data/tst.d.hg/_foo.i
104 .hg/store/undo
105 .hg/store/undo
106 .hg/undo.bookmarks
105 .hg/undo.branch
107 .hg/undo.branch
106 .hg/undo.desc
108 .hg/undo.desc
107 .hg/undo.dirstate
109 .hg/undo.dirstate
108 $ cd ..
110 $ cd ..
109
111
@@ -1,330 +1,332 b''
1 $ cat > nlinks.py <<EOF
1 $ cat > nlinks.py <<EOF
2 > import os, sys
2 > import os, sys
3 > for f in sorted(sys.stdin.readlines()):
3 > for f in sorted(sys.stdin.readlines()):
4 > f = f[:-1]
4 > f = f[:-1]
5 > print os.lstat(f).st_nlink, f
5 > print os.lstat(f).st_nlink, f
6 > EOF
6 > EOF
7
7
8 $ nlinksdir()
8 $ nlinksdir()
9 > {
9 > {
10 > find $1 -type f | python $TESTTMP/nlinks.py
10 > find $1 -type f | python $TESTTMP/nlinks.py
11 > }
11 > }
12
12
13 Some implementations of cp can't create hardlinks (replaces 'cp -al' on Linux):
13 Some implementations of cp can't create hardlinks (replaces 'cp -al' on Linux):
14
14
15 $ cat > linkcp.py <<EOF
15 $ cat > linkcp.py <<EOF
16 > from mercurial import util
16 > from mercurial import util
17 > import sys
17 > import sys
18 > util.copyfiles(sys.argv[1], sys.argv[2], hardlink=True)
18 > util.copyfiles(sys.argv[1], sys.argv[2], hardlink=True)
19 > EOF
19 > EOF
20
20
21 $ linkcp()
21 $ linkcp()
22 > {
22 > {
23 > python $TESTTMP/linkcp.py $1 $2
23 > python $TESTTMP/linkcp.py $1 $2
24 > }
24 > }
25
25
26 Prepare repo r1:
26 Prepare repo r1:
27
27
28 $ hg init r1
28 $ hg init r1
29 $ cd r1
29 $ cd r1
30
30
31 $ echo c1 > f1
31 $ echo c1 > f1
32 $ hg add f1
32 $ hg add f1
33 $ hg ci -m0
33 $ hg ci -m0
34
34
35 $ mkdir d1
35 $ mkdir d1
36 $ cd d1
36 $ cd d1
37 $ echo c2 > f2
37 $ echo c2 > f2
38 $ hg add f2
38 $ hg add f2
39 $ hg ci -m1
39 $ hg ci -m1
40 $ cd ../..
40 $ cd ../..
41
41
42 $ nlinksdir r1/.hg/store
42 $ nlinksdir r1/.hg/store
43 1 r1/.hg/store/00changelog.i
43 1 r1/.hg/store/00changelog.i
44 1 r1/.hg/store/00manifest.i
44 1 r1/.hg/store/00manifest.i
45 1 r1/.hg/store/data/d1/f2.i
45 1 r1/.hg/store/data/d1/f2.i
46 1 r1/.hg/store/data/f1.i
46 1 r1/.hg/store/data/f1.i
47 1 r1/.hg/store/fncache
47 1 r1/.hg/store/fncache
48 1 r1/.hg/store/undo
48 1 r1/.hg/store/undo
49
49
50
50
51 Create hardlinked clone r2:
51 Create hardlinked clone r2:
52
52
53 $ hg clone -U --debug r1 r2
53 $ hg clone -U --debug r1 r2
54 linked 7 files
54 linked 7 files
55
55
56 Create non-hardlinked clone r3:
56 Create non-hardlinked clone r3:
57
57
58 $ hg clone --pull r1 r3
58 $ hg clone --pull r1 r3
59 requesting all changes
59 requesting all changes
60 adding changesets
60 adding changesets
61 adding manifests
61 adding manifests
62 adding file changes
62 adding file changes
63 added 2 changesets with 2 changes to 2 files
63 added 2 changesets with 2 changes to 2 files
64 updating to branch default
64 updating to branch default
65 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
65 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
66
66
67
67
68 Repos r1 and r2 should now contain hardlinked files:
68 Repos r1 and r2 should now contain hardlinked files:
69
69
70 $ nlinksdir r1/.hg/store
70 $ nlinksdir r1/.hg/store
71 2 r1/.hg/store/00changelog.i
71 2 r1/.hg/store/00changelog.i
72 2 r1/.hg/store/00manifest.i
72 2 r1/.hg/store/00manifest.i
73 2 r1/.hg/store/data/d1/f2.i
73 2 r1/.hg/store/data/d1/f2.i
74 2 r1/.hg/store/data/f1.i
74 2 r1/.hg/store/data/f1.i
75 2 r1/.hg/store/fncache
75 2 r1/.hg/store/fncache
76 1 r1/.hg/store/undo
76 1 r1/.hg/store/undo
77
77
78 $ nlinksdir r2/.hg/store
78 $ nlinksdir r2/.hg/store
79 2 r2/.hg/store/00changelog.i
79 2 r2/.hg/store/00changelog.i
80 2 r2/.hg/store/00manifest.i
80 2 r2/.hg/store/00manifest.i
81 2 r2/.hg/store/data/d1/f2.i
81 2 r2/.hg/store/data/d1/f2.i
82 2 r2/.hg/store/data/f1.i
82 2 r2/.hg/store/data/f1.i
83 2 r2/.hg/store/fncache
83 2 r2/.hg/store/fncache
84
84
85 Repo r3 should not be hardlinked:
85 Repo r3 should not be hardlinked:
86
86
87 $ nlinksdir r3/.hg/store
87 $ nlinksdir r3/.hg/store
88 1 r3/.hg/store/00changelog.i
88 1 r3/.hg/store/00changelog.i
89 1 r3/.hg/store/00manifest.i
89 1 r3/.hg/store/00manifest.i
90 1 r3/.hg/store/data/d1/f2.i
90 1 r3/.hg/store/data/d1/f2.i
91 1 r3/.hg/store/data/f1.i
91 1 r3/.hg/store/data/f1.i
92 1 r3/.hg/store/fncache
92 1 r3/.hg/store/fncache
93 1 r3/.hg/store/undo
93 1 r3/.hg/store/undo
94
94
95
95
96 Create a non-inlined filelog in r3:
96 Create a non-inlined filelog in r3:
97
97
98 $ cd r3/d1
98 $ cd r3/d1
99 $ python -c 'for x in range(10000): print x' >> data1
99 $ python -c 'for x in range(10000): print x' >> data1
100 $ for j in 0 1 2 3 4 5 6 7 8 9; do
100 $ for j in 0 1 2 3 4 5 6 7 8 9; do
101 > cat data1 >> f2
101 > cat data1 >> f2
102 > hg commit -m$j
102 > hg commit -m$j
103 > done
103 > done
104 $ cd ../..
104 $ cd ../..
105
105
106 $ nlinksdir r3/.hg/store
106 $ nlinksdir r3/.hg/store
107 1 r3/.hg/store/00changelog.i
107 1 r3/.hg/store/00changelog.i
108 1 r3/.hg/store/00manifest.i
108 1 r3/.hg/store/00manifest.i
109 1 r3/.hg/store/data/d1/f2.d
109 1 r3/.hg/store/data/d1/f2.d
110 1 r3/.hg/store/data/d1/f2.i
110 1 r3/.hg/store/data/d1/f2.i
111 1 r3/.hg/store/data/f1.i
111 1 r3/.hg/store/data/f1.i
112 1 r3/.hg/store/fncache
112 1 r3/.hg/store/fncache
113 1 r3/.hg/store/undo
113 1 r3/.hg/store/undo
114
114
115 Push to repo r1 should break up most hardlinks in r2:
115 Push to repo r1 should break up most hardlinks in r2:
116
116
117 $ hg -R r2 verify
117 $ hg -R r2 verify
118 checking changesets
118 checking changesets
119 checking manifests
119 checking manifests
120 crosschecking files in changesets and manifests
120 crosschecking files in changesets and manifests
121 checking files
121 checking files
122 2 files, 2 changesets, 2 total revisions
122 2 files, 2 changesets, 2 total revisions
123
123
124 $ cd r3
124 $ cd r3
125 $ hg push
125 $ hg push
126 pushing to $TESTTMP/r1
126 pushing to $TESTTMP/r1
127 searching for changes
127 searching for changes
128 adding changesets
128 adding changesets
129 adding manifests
129 adding manifests
130 adding file changes
130 adding file changes
131 added 10 changesets with 10 changes to 1 files
131 added 10 changesets with 10 changes to 1 files
132
132
133 $ cd ..
133 $ cd ..
134
134
135 $ nlinksdir r2/.hg/store
135 $ nlinksdir r2/.hg/store
136 1 r2/.hg/store/00changelog.i
136 1 r2/.hg/store/00changelog.i
137 1 r2/.hg/store/00manifest.i
137 1 r2/.hg/store/00manifest.i
138 1 r2/.hg/store/data/d1/f2.i
138 1 r2/.hg/store/data/d1/f2.i
139 2 r2/.hg/store/data/f1.i
139 2 r2/.hg/store/data/f1.i
140 1 r2/.hg/store/fncache
140 1 r2/.hg/store/fncache
141
141
142 $ hg -R r2 verify
142 $ hg -R r2 verify
143 checking changesets
143 checking changesets
144 checking manifests
144 checking manifests
145 crosschecking files in changesets and manifests
145 crosschecking files in changesets and manifests
146 checking files
146 checking files
147 2 files, 2 changesets, 2 total revisions
147 2 files, 2 changesets, 2 total revisions
148
148
149
149
150 $ cd r1
150 $ cd r1
151 $ hg up
151 $ hg up
152 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
152 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
153
153
154 Committing a change to f1 in r1 must break up hardlink f1.i in r2:
154 Committing a change to f1 in r1 must break up hardlink f1.i in r2:
155
155
156 $ echo c1c1 >> f1
156 $ echo c1c1 >> f1
157 $ hg ci -m00
157 $ hg ci -m00
158 $ cd ..
158 $ cd ..
159
159
160 $ nlinksdir r2/.hg/store
160 $ nlinksdir r2/.hg/store
161 1 r2/.hg/store/00changelog.i
161 1 r2/.hg/store/00changelog.i
162 1 r2/.hg/store/00manifest.i
162 1 r2/.hg/store/00manifest.i
163 1 r2/.hg/store/data/d1/f2.i
163 1 r2/.hg/store/data/d1/f2.i
164 1 r2/.hg/store/data/f1.i
164 1 r2/.hg/store/data/f1.i
165 1 r2/.hg/store/fncache
165 1 r2/.hg/store/fncache
166
166
167
167
168 $ cd r3
168 $ cd r3
169 $ hg tip --template '{rev}:{node|short}\n'
169 $ hg tip --template '{rev}:{node|short}\n'
170 11:a6451b6bc41f
170 11:a6451b6bc41f
171 $ echo bla > f1
171 $ echo bla > f1
172 $ hg ci -m1
172 $ hg ci -m1
173 $ cd ..
173 $ cd ..
174
174
175 Create hardlinked copy r4 of r3 (on Linux, we would call 'cp -al'):
175 Create hardlinked copy r4 of r3 (on Linux, we would call 'cp -al'):
176
176
177 $ linkcp r3 r4
177 $ linkcp r3 r4
178
178
179 r4 has hardlinks in the working dir (not just inside .hg):
179 r4 has hardlinks in the working dir (not just inside .hg):
180
180
181 $ nlinksdir r4
181 $ nlinksdir r4
182 2 r4/.hg/00changelog.i
182 2 r4/.hg/00changelog.i
183 2 r4/.hg/branch
183 2 r4/.hg/branch
184 2 r4/.hg/cache/branchheads
184 2 r4/.hg/cache/branchheads
185 2 r4/.hg/cache/tags
185 2 r4/.hg/cache/tags
186 2 r4/.hg/dirstate
186 2 r4/.hg/dirstate
187 2 r4/.hg/hgrc
187 2 r4/.hg/hgrc
188 2 r4/.hg/last-message.txt
188 2 r4/.hg/last-message.txt
189 2 r4/.hg/requires
189 2 r4/.hg/requires
190 2 r4/.hg/store/00changelog.i
190 2 r4/.hg/store/00changelog.i
191 2 r4/.hg/store/00manifest.i
191 2 r4/.hg/store/00manifest.i
192 2 r4/.hg/store/data/d1/f2.d
192 2 r4/.hg/store/data/d1/f2.d
193 2 r4/.hg/store/data/d1/f2.i
193 2 r4/.hg/store/data/d1/f2.i
194 2 r4/.hg/store/data/f1.i
194 2 r4/.hg/store/data/f1.i
195 2 r4/.hg/store/fncache
195 2 r4/.hg/store/fncache
196 2 r4/.hg/store/undo
196 2 r4/.hg/store/undo
197 2 r4/.hg/undo.bookmarks
197 2 r4/.hg/undo.branch
198 2 r4/.hg/undo.branch
198 2 r4/.hg/undo.desc
199 2 r4/.hg/undo.desc
199 2 r4/.hg/undo.dirstate
200 2 r4/.hg/undo.dirstate
200 2 r4/d1/data1
201 2 r4/d1/data1
201 2 r4/d1/f2
202 2 r4/d1/f2
202 2 r4/f1
203 2 r4/f1
203
204
204 Update back to revision 11 in r4 should break hardlink of file f1:
205 Update back to revision 11 in r4 should break hardlink of file f1:
205
206
206 $ hg -R r4 up 11
207 $ hg -R r4 up 11
207 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
208 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
208
209
209 $ nlinksdir r4
210 $ nlinksdir r4
210 2 r4/.hg/00changelog.i
211 2 r4/.hg/00changelog.i
211 1 r4/.hg/branch
212 1 r4/.hg/branch
212 2 r4/.hg/cache/branchheads
213 2 r4/.hg/cache/branchheads
213 2 r4/.hg/cache/tags
214 2 r4/.hg/cache/tags
214 1 r4/.hg/dirstate
215 1 r4/.hg/dirstate
215 2 r4/.hg/hgrc
216 2 r4/.hg/hgrc
216 2 r4/.hg/last-message.txt
217 2 r4/.hg/last-message.txt
217 2 r4/.hg/requires
218 2 r4/.hg/requires
218 2 r4/.hg/store/00changelog.i
219 2 r4/.hg/store/00changelog.i
219 2 r4/.hg/store/00manifest.i
220 2 r4/.hg/store/00manifest.i
220 2 r4/.hg/store/data/d1/f2.d
221 2 r4/.hg/store/data/d1/f2.d
221 2 r4/.hg/store/data/d1/f2.i
222 2 r4/.hg/store/data/d1/f2.i
222 2 r4/.hg/store/data/f1.i
223 2 r4/.hg/store/data/f1.i
223 2 r4/.hg/store/fncache
224 2 r4/.hg/store/fncache
224 2 r4/.hg/store/undo
225 2 r4/.hg/store/undo
226 2 r4/.hg/undo.bookmarks
225 2 r4/.hg/undo.branch
227 2 r4/.hg/undo.branch
226 2 r4/.hg/undo.desc
228 2 r4/.hg/undo.desc
227 2 r4/.hg/undo.dirstate
229 2 r4/.hg/undo.dirstate
228 2 r4/d1/data1
230 2 r4/d1/data1
229 2 r4/d1/f2
231 2 r4/d1/f2
230 1 r4/f1
232 1 r4/f1
231
233
232
234
233 Test hardlinking outside hg:
235 Test hardlinking outside hg:
234
236
235 $ mkdir x
237 $ mkdir x
236 $ echo foo > x/a
238 $ echo foo > x/a
237
239
238 $ linkcp x y
240 $ linkcp x y
239 $ echo bar >> y/a
241 $ echo bar >> y/a
240
242
241 No diff if hardlink:
243 No diff if hardlink:
242
244
243 $ diff x/a y/a
245 $ diff x/a y/a
244
246
245 Test mq hardlinking:
247 Test mq hardlinking:
246
248
247 $ echo "[extensions]" >> $HGRCPATH
249 $ echo "[extensions]" >> $HGRCPATH
248 $ echo "mq=" >> $HGRCPATH
250 $ echo "mq=" >> $HGRCPATH
249
251
250 $ hg init a
252 $ hg init a
251 $ cd a
253 $ cd a
252
254
253 $ hg qimport -n foo - << EOF
255 $ hg qimport -n foo - << EOF
254 > # HG changeset patch
256 > # HG changeset patch
255 > # Date 1 0
257 > # Date 1 0
256 > diff -r 2588a8b53d66 a
258 > diff -r 2588a8b53d66 a
257 > --- /dev/null Thu Jan 01 00:00:00 1970 +0000
259 > --- /dev/null Thu Jan 01 00:00:00 1970 +0000
258 > +++ b/a Wed Jul 23 15:54:29 2008 +0200
260 > +++ b/a Wed Jul 23 15:54:29 2008 +0200
259 > @@ -0,0 +1,1 @@
261 > @@ -0,0 +1,1 @@
260 > +a
262 > +a
261 > EOF
263 > EOF
262 adding foo to series file
264 adding foo to series file
263
265
264 $ hg qpush
266 $ hg qpush
265 applying foo
267 applying foo
266 now at: foo
268 now at: foo
267
269
268 $ cd ..
270 $ cd ..
269 $ linkcp a b
271 $ linkcp a b
270 $ cd b
272 $ cd b
271
273
272 $ hg qimport -n bar - << EOF
274 $ hg qimport -n bar - << EOF
273 > # HG changeset patch
275 > # HG changeset patch
274 > # Date 2 0
276 > # Date 2 0
275 > diff -r 2588a8b53d66 a
277 > diff -r 2588a8b53d66 a
276 > --- /dev/null Thu Jan 01 00:00:00 1970 +0000
278 > --- /dev/null Thu Jan 01 00:00:00 1970 +0000
277 > +++ b/b Wed Jul 23 15:54:29 2008 +0200
279 > +++ b/b Wed Jul 23 15:54:29 2008 +0200
278 > @@ -0,0 +1,1 @@
280 > @@ -0,0 +1,1 @@
279 > +b
281 > +b
280 > EOF
282 > EOF
281 adding bar to series file
283 adding bar to series file
282
284
283 $ hg qpush
285 $ hg qpush
284 applying bar
286 applying bar
285 now at: bar
287 now at: bar
286
288
287 $ cat .hg/patches/status
289 $ cat .hg/patches/status
288 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo
290 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo
289 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c:bar
291 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c:bar
290
292
291 $ cat .hg/patches/series
293 $ cat .hg/patches/series
292 foo
294 foo
293 bar
295 bar
294
296
295 $ cat ../a/.hg/patches/status
297 $ cat ../a/.hg/patches/status
296 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo
298 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo
297
299
298 $ cat ../a/.hg/patches/series
300 $ cat ../a/.hg/patches/series
299 foo
301 foo
300
302
301 Test tags hardlinking:
303 Test tags hardlinking:
302
304
303 $ hg qdel -r qbase:qtip
305 $ hg qdel -r qbase:qtip
304 patch foo finalized without changeset message
306 patch foo finalized without changeset message
305 patch bar finalized without changeset message
307 patch bar finalized without changeset message
306
308
307 $ hg tag -l lfoo
309 $ hg tag -l lfoo
308 $ hg tag foo
310 $ hg tag foo
309
311
310 $ cd ..
312 $ cd ..
311 $ linkcp b c
313 $ linkcp b c
312 $ cd c
314 $ cd c
313
315
314 $ hg tag -l -r 0 lbar
316 $ hg tag -l -r 0 lbar
315 $ hg tag -r 0 bar
317 $ hg tag -r 0 bar
316
318
317 $ cat .hgtags
319 $ cat .hgtags
318 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo
320 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo
319 430ed4828a74fa4047bc816a25500f7472ab4bfe bar
321 430ed4828a74fa4047bc816a25500f7472ab4bfe bar
320
322
321 $ cat .hg/localtags
323 $ cat .hg/localtags
322 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo
324 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo
323 430ed4828a74fa4047bc816a25500f7472ab4bfe lbar
325 430ed4828a74fa4047bc816a25500f7472ab4bfe lbar
324
326
325 $ cat ../b/.hgtags
327 $ cat ../b/.hgtags
326 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo
328 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo
327
329
328 $ cat ../b/.hg/localtags
330 $ cat ../b/.hg/localtags
329 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo
331 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo
330
332
@@ -1,20 +1,20 b''
1 Test hangup signal in the middle of transaction
1 Test hangup signal in the middle of transaction
2
2
3 $ "$TESTDIR/hghave" fifo || exit 80
3 $ "$TESTDIR/hghave" fifo || exit 80
4 $ hg init
4 $ hg init
5 $ mkfifo p
5 $ mkfifo p
6 $ hg serve --stdio < p &
6 $ hg serve --stdio < p &
7 $ P=$!
7 $ P=$!
8 $ (echo lock; echo addchangegroup; sleep 5) > p &
8 $ (echo lock; echo addchangegroup; sleep 5) > p &
9 $ Q=$!
9 $ Q=$!
10 $ sleep 3
10 $ sleep 3
11 0
11 0
12 0
12 0
13 adding changesets
13 adding changesets
14 $ kill -HUP $P
14 $ kill -HUP $P
15 $ wait
15 $ wait
16 transaction abort!
16 transaction abort!
17 rollback completed
17 rollback completed
18 killed!
18 killed!
19 $ echo .hg/* .hg/store/*
19 $ echo .hg/* .hg/store/*
20 .hg/00changelog.i .hg/journal.branch .hg/journal.desc .hg/journal.dirstate .hg/requires .hg/store .hg/store/00changelog.i .hg/store/00changelog.i.a
20 .hg/00changelog.i .hg/journal.bookmarks .hg/journal.branch .hg/journal.desc .hg/journal.dirstate .hg/requires .hg/store .hg/store/00changelog.i .hg/store/00changelog.i.a
@@ -1,141 +1,143 b''
1 test that new files created in .hg inherit the permissions from .hg/store
1 test that new files created in .hg inherit the permissions from .hg/store
2
2
3
3
4 $ "$TESTDIR/hghave" unix-permissions || exit 80
4 $ "$TESTDIR/hghave" unix-permissions || exit 80
5
5
6 $ mkdir dir
6 $ mkdir dir
7
7
8 just in case somebody has a strange $TMPDIR
8 just in case somebody has a strange $TMPDIR
9
9
10 $ chmod g-s dir
10 $ chmod g-s dir
11 $ cd dir
11 $ cd dir
12
12
13 $ cat >printmodes.py <<EOF
13 $ cat >printmodes.py <<EOF
14 > import os, sys
14 > import os, sys
15 >
15 >
16 > allnames = []
16 > allnames = []
17 > isdir = {}
17 > isdir = {}
18 > for root, dirs, files in os.walk(sys.argv[1]):
18 > for root, dirs, files in os.walk(sys.argv[1]):
19 > for d in dirs:
19 > for d in dirs:
20 > name = os.path.join(root, d)
20 > name = os.path.join(root, d)
21 > isdir[name] = 1
21 > isdir[name] = 1
22 > allnames.append(name)
22 > allnames.append(name)
23 > for f in files:
23 > for f in files:
24 > name = os.path.join(root, f)
24 > name = os.path.join(root, f)
25 > allnames.append(name)
25 > allnames.append(name)
26 > allnames.sort()
26 > allnames.sort()
27 > for name in allnames:
27 > for name in allnames:
28 > suffix = name in isdir and '/' or ''
28 > suffix = name in isdir and '/' or ''
29 > print '%05o %s%s' % (os.lstat(name).st_mode & 07777, name, suffix)
29 > print '%05o %s%s' % (os.lstat(name).st_mode & 07777, name, suffix)
30 > EOF
30 > EOF
31
31
32 $ cat >mode.py <<EOF
32 $ cat >mode.py <<EOF
33 > import sys
33 > import sys
34 > import os
34 > import os
35 > print '%05o' % os.lstat(sys.argv[1]).st_mode
35 > print '%05o' % os.lstat(sys.argv[1]).st_mode
36 > EOF
36 > EOF
37
37
38 $ umask 077
38 $ umask 077
39
39
40 $ hg init repo
40 $ hg init repo
41 $ cd repo
41 $ cd repo
42
42
43 $ chmod 0770 .hg/store
43 $ chmod 0770 .hg/store
44
44
45 before commit
45 before commit
46 store can be written by the group, other files cannot
46 store can be written by the group, other files cannot
47 store is setgid
47 store is setgid
48
48
49 $ python ../printmodes.py .
49 $ python ../printmodes.py .
50 00700 ./.hg/
50 00700 ./.hg/
51 00600 ./.hg/00changelog.i
51 00600 ./.hg/00changelog.i
52 00600 ./.hg/requires
52 00600 ./.hg/requires
53 00770 ./.hg/store/
53 00770 ./.hg/store/
54
54
55 $ mkdir dir
55 $ mkdir dir
56 $ touch foo dir/bar
56 $ touch foo dir/bar
57 $ hg ci -qAm 'add files'
57 $ hg ci -qAm 'add files'
58
58
59 after commit
59 after commit
60 working dir files can only be written by the owner
60 working dir files can only be written by the owner
61 files created in .hg can be written by the group
61 files created in .hg can be written by the group
62 (in particular, store/**, dirstate, branch cache file, undo files)
62 (in particular, store/**, dirstate, branch cache file, undo files)
63 new directories are setgid
63 new directories are setgid
64
64
65 $ python ../printmodes.py .
65 $ python ../printmodes.py .
66 00700 ./.hg/
66 00700 ./.hg/
67 00600 ./.hg/00changelog.i
67 00600 ./.hg/00changelog.i
68 00660 ./.hg/dirstate
68 00660 ./.hg/dirstate
69 00660 ./.hg/last-message.txt
69 00660 ./.hg/last-message.txt
70 00600 ./.hg/requires
70 00600 ./.hg/requires
71 00770 ./.hg/store/
71 00770 ./.hg/store/
72 00660 ./.hg/store/00changelog.i
72 00660 ./.hg/store/00changelog.i
73 00660 ./.hg/store/00manifest.i
73 00660 ./.hg/store/00manifest.i
74 00770 ./.hg/store/data/
74 00770 ./.hg/store/data/
75 00770 ./.hg/store/data/dir/
75 00770 ./.hg/store/data/dir/
76 00660 ./.hg/store/data/dir/bar.i
76 00660 ./.hg/store/data/dir/bar.i
77 00660 ./.hg/store/data/foo.i
77 00660 ./.hg/store/data/foo.i
78 00660 ./.hg/store/fncache
78 00660 ./.hg/store/fncache
79 00660 ./.hg/store/undo
79 00660 ./.hg/store/undo
80 00660 ./.hg/undo.bookmarks
80 00660 ./.hg/undo.branch
81 00660 ./.hg/undo.branch
81 00660 ./.hg/undo.desc
82 00660 ./.hg/undo.desc
82 00660 ./.hg/undo.dirstate
83 00660 ./.hg/undo.dirstate
83 00700 ./dir/
84 00700 ./dir/
84 00600 ./dir/bar
85 00600 ./dir/bar
85 00600 ./foo
86 00600 ./foo
86
87
87 $ umask 007
88 $ umask 007
88 $ hg init ../push
89 $ hg init ../push
89
90
90 before push
91 before push
91 group can write everything
92 group can write everything
92
93
93 $ python ../printmodes.py ../push
94 $ python ../printmodes.py ../push
94 00770 ../push/.hg/
95 00770 ../push/.hg/
95 00660 ../push/.hg/00changelog.i
96 00660 ../push/.hg/00changelog.i
96 00660 ../push/.hg/requires
97 00660 ../push/.hg/requires
97 00770 ../push/.hg/store/
98 00770 ../push/.hg/store/
98
99
99 $ umask 077
100 $ umask 077
100 $ hg -q push ../push
101 $ hg -q push ../push
101
102
102 after push
103 after push
103 group can still write everything
104 group can still write everything
104
105
105 $ python ../printmodes.py ../push
106 $ python ../printmodes.py ../push
106 00770 ../push/.hg/
107 00770 ../push/.hg/
107 00660 ../push/.hg/00changelog.i
108 00660 ../push/.hg/00changelog.i
108 00770 ../push/.hg/cache/
109 00770 ../push/.hg/cache/
109 00660 ../push/.hg/cache/branchheads
110 00660 ../push/.hg/cache/branchheads
110 00660 ../push/.hg/requires
111 00660 ../push/.hg/requires
111 00770 ../push/.hg/store/
112 00770 ../push/.hg/store/
112 00660 ../push/.hg/store/00changelog.i
113 00660 ../push/.hg/store/00changelog.i
113 00660 ../push/.hg/store/00manifest.i
114 00660 ../push/.hg/store/00manifest.i
114 00770 ../push/.hg/store/data/
115 00770 ../push/.hg/store/data/
115 00770 ../push/.hg/store/data/dir/
116 00770 ../push/.hg/store/data/dir/
116 00660 ../push/.hg/store/data/dir/bar.i
117 00660 ../push/.hg/store/data/dir/bar.i
117 00660 ../push/.hg/store/data/foo.i
118 00660 ../push/.hg/store/data/foo.i
118 00660 ../push/.hg/store/fncache
119 00660 ../push/.hg/store/fncache
119 00660 ../push/.hg/store/undo
120 00660 ../push/.hg/store/undo
121 00660 ../push/.hg/undo.bookmarks
120 00660 ../push/.hg/undo.branch
122 00660 ../push/.hg/undo.branch
121 00660 ../push/.hg/undo.desc
123 00660 ../push/.hg/undo.desc
122 00660 ../push/.hg/undo.dirstate
124 00660 ../push/.hg/undo.dirstate
123
125
124
126
125 Test that we don't lose the setgid bit when we call chmod.
127 Test that we don't lose the setgid bit when we call chmod.
126 Not all systems support setgid directories (e.g. HFS+), so
128 Not all systems support setgid directories (e.g. HFS+), so
127 just check that directories have the same mode.
129 just check that directories have the same mode.
128
130
129 $ cd ..
131 $ cd ..
130 $ hg init setgid
132 $ hg init setgid
131 $ cd setgid
133 $ cd setgid
132 $ chmod g+rwx .hg/store
134 $ chmod g+rwx .hg/store
133 $ chmod g+s .hg/store 2> /dev/null
135 $ chmod g+s .hg/store 2> /dev/null
134 $ mkdir dir
136 $ mkdir dir
135 $ touch dir/file
137 $ touch dir/file
136 $ hg ci -qAm 'add dir/file'
138 $ hg ci -qAm 'add dir/file'
137 $ storemode=`python ../mode.py .hg/store`
139 $ storemode=`python ../mode.py .hg/store`
138 $ dirmode=`python ../mode.py .hg/store/data/dir`
140 $ dirmode=`python ../mode.py .hg/store/data/dir`
139 $ if [ "$storemode" != "$dirmode" ]; then
141 $ if [ "$storemode" != "$dirmode" ]; then
140 > echo "$storemode != $dirmode"
142 > echo "$storemode != $dirmode"
141 $ fi
143 $ fi
General Comments 0
You need to be logged in to leave comments. Login now