##// END OF EJS Templates
bookmarks: allow existing remote bookmarks to become heads when pushing
Levi Bard -
r16835:4267c840 default
parent child Browse files
Show More
@@ -1,256 +1,265
1 1 # discovery.py - protocol changeset discovery functions
2 2 #
3 3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import nullid, short
9 9 from i18n import _
10 10 import util, setdiscovery, treediscovery, phases
11 11
12 12 def findcommonincoming(repo, remote, heads=None, force=False):
13 13 """Return a tuple (common, anyincoming, heads) used to identify the common
14 14 subset of nodes between repo and remote.
15 15
16 16 "common" is a list of (at least) the heads of the common subset.
17 17 "anyincoming" is testable as a boolean indicating if any nodes are missing
18 18 locally. If remote does not support getbundle, this actually is a list of
19 19 roots of the nodes that would be incoming, to be supplied to
20 20 changegroupsubset. No code except for pull should be relying on this fact
21 21 any longer.
22 22 "heads" is either the supplied heads, or else the remote's heads.
23 23
24 24 If you pass heads and they are all known locally, the reponse lists justs
25 25 these heads in "common" and in "heads".
26 26
27 27 Please use findcommonoutgoing to compute the set of outgoing nodes to give
28 28 extensions a good hook into outgoing.
29 29 """
30 30
31 31 if not remote.capable('getbundle'):
32 32 return treediscovery.findcommonincoming(repo, remote, heads, force)
33 33
34 34 if heads:
35 35 allknown = True
36 36 nm = repo.changelog.nodemap
37 37 for h in heads:
38 38 if nm.get(h) is None:
39 39 allknown = False
40 40 break
41 41 if allknown:
42 42 return (heads, False, heads)
43 43
44 44 res = setdiscovery.findcommonheads(repo.ui, repo, remote,
45 45 abortwhenunrelated=not force)
46 46 common, anyinc, srvheads = res
47 47 return (list(common), anyinc, heads or list(srvheads))
48 48
49 49 class outgoing(object):
50 50 '''Represents the set of nodes present in a local repo but not in a
51 51 (possibly) remote one.
52 52
53 53 Members:
54 54
55 55 missing is a list of all nodes present in local but not in remote.
56 56 common is a list of all nodes shared between the two repos.
57 57 excluded is the list of missing changeset that shouldn't be sent remotely.
58 58 missingheads is the list of heads of missing.
59 59 commonheads is the list of heads of common.
60 60
61 61 The sets are computed on demand from the heads, unless provided upfront
62 62 by discovery.'''
63 63
64 64 def __init__(self, revlog, commonheads, missingheads):
65 65 self.commonheads = commonheads
66 66 self.missingheads = missingheads
67 67 self._revlog = revlog
68 68 self._common = None
69 69 self._missing = None
70 70 self.excluded = []
71 71
72 72 def _computecommonmissing(self):
73 73 sets = self._revlog.findcommonmissing(self.commonheads,
74 74 self.missingheads)
75 75 self._common, self._missing = sets
76 76
77 77 @util.propertycache
78 78 def common(self):
79 79 if self._common is None:
80 80 self._computecommonmissing()
81 81 return self._common
82 82
83 83 @util.propertycache
84 84 def missing(self):
85 85 if self._missing is None:
86 86 self._computecommonmissing()
87 87 return self._missing
88 88
89 89 def findcommonoutgoing(repo, other, onlyheads=None, force=False,
90 90 commoninc=None, portable=False):
91 91 '''Return an outgoing instance to identify the nodes present in repo but
92 92 not in other.
93 93
94 94 If onlyheads is given, only nodes ancestral to nodes in onlyheads
95 95 (inclusive) are included. If you already know the local repo's heads,
96 96 passing them in onlyheads is faster than letting them be recomputed here.
97 97
98 98 If commoninc is given, it must the the result of a prior call to
99 99 findcommonincoming(repo, other, force) to avoid recomputing it here.
100 100
101 101 If portable is given, compute more conservative common and missingheads,
102 102 to make bundles created from the instance more portable.'''
103 103 # declare an empty outgoing object to be filled later
104 104 og = outgoing(repo.changelog, None, None)
105 105
106 106 # get common set if not provided
107 107 if commoninc is None:
108 108 commoninc = findcommonincoming(repo, other, force=force)
109 109 og.commonheads, _any, _hds = commoninc
110 110
111 111 # compute outgoing
112 112 if not repo._phasecache.phaseroots[phases.secret]:
113 113 og.missingheads = onlyheads or repo.heads()
114 114 elif onlyheads is None:
115 115 # use visible heads as it should be cached
116 116 og.missingheads = phases.visibleheads(repo)
117 117 og.excluded = [ctx.node() for ctx in repo.set('secret()')]
118 118 else:
119 119 # compute common, missing and exclude secret stuff
120 120 sets = repo.changelog.findcommonmissing(og.commonheads, onlyheads)
121 121 og._common, allmissing = sets
122 122 og._missing = missing = []
123 123 og.excluded = excluded = []
124 124 for node in allmissing:
125 125 if repo[node].phase() >= phases.secret:
126 126 excluded.append(node)
127 127 else:
128 128 missing.append(node)
129 129 if excluded:
130 130 # update missing heads
131 131 missingheads = phases.newheads(repo, onlyheads, excluded)
132 132 else:
133 133 missingheads = onlyheads
134 134 og.missingheads = missingheads
135 135
136 136 if portable:
137 137 # recompute common and missingheads as if -r<rev> had been given for
138 138 # each head of missing, and --base <rev> for each head of the proper
139 139 # ancestors of missing
140 140 og._computecommonmissing()
141 141 cl = repo.changelog
142 142 missingrevs = set(cl.rev(n) for n in og._missing)
143 143 og._common = set(cl.ancestors(*missingrevs)) - missingrevs
144 144 commonheads = set(og.commonheads)
145 145 og.missingheads = [h for h in og.missingheads if h not in commonheads]
146 146
147 147 return og
148 148
149 149 def checkheads(repo, remote, outgoing, remoteheads, newbranch=False, inc=False):
150 150 """Check that a push won't add any outgoing head
151 151
152 152 raise Abort error and display ui message as needed.
153 153 """
154 154 if remoteheads == [nullid]:
155 155 # remote is empty, nothing to check.
156 156 return
157 157
158 158 cl = repo.changelog
159 159 if remote.capable('branchmap'):
160 160 # Check for each named branch if we're creating new remote heads.
161 161 # To be a remote head after push, node must be either:
162 162 # - unknown locally
163 163 # - a local outgoing head descended from update
164 164 # - a remote head that's known locally and not
165 165 # ancestral to an outgoing head
166 166
167 167 # 1. Create set of branches involved in the push.
168 168 branches = set(repo[n].branch() for n in outgoing.missing)
169 169
170 170 # 2. Check for new branches on the remote.
171 171 if remote.local():
172 172 remotemap = phases.visiblebranchmap(remote)
173 173 else:
174 174 remotemap = remote.branchmap()
175 175 newbranches = branches - set(remotemap)
176 176 if newbranches and not newbranch: # new branch requires --new-branch
177 177 branchnames = ', '.join(sorted(newbranches))
178 178 raise util.Abort(_("push creates new remote branches: %s!")
179 179 % branchnames,
180 180 hint=_("use 'hg push --new-branch' to create"
181 181 " new remote branches"))
182 182 branches.difference_update(newbranches)
183 183
184 184 # 3. Construct the initial oldmap and newmap dicts.
185 185 # They contain information about the remote heads before and
186 186 # after the push, respectively.
187 187 # Heads not found locally are not included in either dict,
188 188 # since they won't be affected by the push.
189 189 # unsynced contains all branches with incoming changesets.
190 190 oldmap = {}
191 191 newmap = {}
192 192 unsynced = set()
193 193 for branch in branches:
194 194 remotebrheads = remotemap[branch]
195 195 prunedbrheads = [h for h in remotebrheads if h in cl.nodemap]
196 196 oldmap[branch] = prunedbrheads
197 197 newmap[branch] = list(prunedbrheads)
198 198 if len(remotebrheads) > len(prunedbrheads):
199 199 unsynced.add(branch)
200 200
201 201 # 4. Update newmap with outgoing changes.
202 202 # This will possibly add new heads and remove existing ones.
203 203 ctxgen = (repo[n] for n in outgoing.missing)
204 204 repo._updatebranchcache(newmap, ctxgen)
205 205
206 206 else:
207 207 # 1-4b. old servers: Check for new topological heads.
208 208 # Construct {old,new}map with branch = None (topological branch).
209 209 # (code based on _updatebranchcache)
210 210 oldheads = set(h for h in remoteheads if h in cl.nodemap)
211 211 newheads = oldheads.union(outgoing.missing)
212 212 if len(newheads) > 1:
213 213 for latest in reversed(outgoing.missing):
214 214 if latest not in newheads:
215 215 continue
216 216 minhrev = min(cl.rev(h) for h in newheads)
217 217 reachable = cl.reachable(latest, cl.node(minhrev))
218 218 reachable.remove(latest)
219 219 newheads.difference_update(reachable)
220 220 branches = set([None])
221 221 newmap = {None: newheads}
222 222 oldmap = {None: oldheads}
223 223 unsynced = inc and branches or set()
224 224
225 225 # 5. Check for new heads.
226 226 # If there are more heads after the push than before, a suitable
227 227 # error message, depending on unsynced status, is displayed.
228 228 error = None
229 remotebookmarks = remote.listkeys('bookmarks')
230 localbookmarks = repo._bookmarks
231
229 232 for branch in branches:
230 233 newhs = set(newmap[branch])
231 234 oldhs = set(oldmap[branch])
235 dhs = None
232 236 if len(newhs) > len(oldhs):
233 dhs = list(newhs - oldhs)
237 # strip updates to existing remote heads from the new heads list
238 bookmarkedheads = set([repo[bm].node() for bm in localbookmarks
239 if bm in remotebookmarks and
240 remote[bm] == repo[bm].ancestor(remote[bm])])
241 dhs = list(newhs - bookmarkedheads - oldhs)
242 if dhs:
234 243 if error is None:
235 244 if branch not in ('default', None):
236 245 error = _("push creates new remote head %s "
237 246 "on branch '%s'!") % (short(dhs[0]), branch)
238 247 else:
239 248 error = _("push creates new remote head %s!"
240 249 ) % short(dhs[0])
241 250 if branch in unsynced:
242 251 hint = _("you should pull and merge or "
243 252 "use push -f to force")
244 253 else:
245 254 hint = _("did you forget to merge? "
246 255 "use push -f to force")
247 256 if branch is not None:
248 257 repo.ui.note(_("new remote heads on branch '%s'\n") % branch)
249 258 for h in dhs:
250 259 repo.ui.note(_("new remote head %s\n") % short(h))
251 260 if error:
252 261 raise util.Abort(error, hint=hint)
253 262
254 263 # 6. Check for unsynced changes on involved branches.
255 264 if unsynced:
256 265 repo.ui.warn(_("note: unsynced remote changes!\n"))
@@ -1,215 +1,254
1 1 $ "$TESTDIR/hghave" serve || exit 80
2 2
3 3 initialize
4 4
5 5 $ hg init a
6 6 $ cd a
7 7 $ echo 'test' > test
8 8 $ hg commit -Am'test'
9 9 adding test
10 10
11 11 set bookmarks
12 12
13 13 $ hg bookmark X
14 14 $ hg bookmark Y
15 15 $ hg bookmark Z
16 16
17 17 import bookmark by name
18 18
19 19 $ hg init ../b
20 20 $ cd ../b
21 21 $ hg book Y
22 22 $ hg book
23 23 * Y -1:000000000000
24 24 $ hg pull ../a
25 25 pulling from ../a
26 26 requesting all changes
27 27 adding changesets
28 28 adding manifests
29 29 adding file changes
30 30 added 1 changesets with 1 changes to 1 files
31 31 updating bookmark Y
32 32 adding remote bookmark X
33 33 adding remote bookmark Z
34 34 (run 'hg update' to get a working copy)
35 35 $ hg bookmarks
36 36 X 0:4e3505fd9583
37 37 Y 0:4e3505fd9583
38 38 Z 0:4e3505fd9583
39 39 $ hg debugpushkey ../a namespaces
40 40 bookmarks
41 41 phases
42 42 namespaces
43 43 $ hg debugpushkey ../a bookmarks
44 44 Y 4e3505fd95835d721066b76e75dbb8cc554d7f77
45 45 X 4e3505fd95835d721066b76e75dbb8cc554d7f77
46 46 Z 4e3505fd95835d721066b76e75dbb8cc554d7f77
47 47 $ hg pull -B X ../a
48 48 pulling from ../a
49 49 no changes found
50 50 importing bookmark X
51 51 $ hg bookmark
52 52 X 0:4e3505fd9583
53 53 Y 0:4e3505fd9583
54 54 Z 0:4e3505fd9583
55 55
56 56 export bookmark by name
57 57
58 58 $ hg bookmark W
59 59 $ hg bookmark foo
60 60 $ hg bookmark foobar
61 61 $ hg push -B W ../a
62 62 pushing to ../a
63 63 searching for changes
64 64 no changes found
65 65 exporting bookmark W
66 66 [1]
67 67 $ hg -R ../a bookmarks
68 68 W -1:000000000000
69 69 X 0:4e3505fd9583
70 70 Y 0:4e3505fd9583
71 71 * Z 0:4e3505fd9583
72 72
73 73 delete a remote bookmark
74 74
75 75 $ hg book -d W
76 76 $ hg push -B W ../a
77 77 pushing to ../a
78 78 searching for changes
79 79 no changes found
80 80 deleting remote bookmark W
81 81 [1]
82 82
83 83 push/pull name that doesn't exist
84 84
85 85 $ hg push -B badname ../a
86 86 pushing to ../a
87 87 searching for changes
88 88 no changes found
89 89 bookmark badname does not exist on the local or remote repository!
90 90 [2]
91 91 $ hg pull -B anotherbadname ../a
92 92 pulling from ../a
93 93 abort: remote bookmark anotherbadname not found!
94 94 [255]
95 95
96 96 divergent bookmarks
97 97
98 98 $ cd ../a
99 99 $ echo c1 > f1
100 100 $ hg ci -Am1
101 101 adding f1
102 102 $ hg book -f X
103 103 $ hg book
104 104 * X 1:0d2164f0ce0d
105 105 Y 0:4e3505fd9583
106 106 Z 1:0d2164f0ce0d
107 107
108 108 $ cd ../b
109 109 $ hg up
110 110 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
111 111 updating bookmark foobar
112 112 $ echo c2 > f2
113 113 $ hg ci -Am2
114 114 adding f2
115 115 $ hg book -f X
116 116 $ hg book
117 117 * X 1:9b140be10808
118 118 Y 0:4e3505fd9583
119 119 Z 0:4e3505fd9583
120 120 foo -1:000000000000
121 121 foobar 1:9b140be10808
122 122
123 123 $ hg pull --config paths.foo=../a foo
124 124 pulling from $TESTTMP/a
125 125 searching for changes
126 126 adding changesets
127 127 adding manifests
128 128 adding file changes
129 129 added 1 changesets with 1 changes to 1 files (+1 heads)
130 130 divergent bookmark X stored as X@foo
131 131 updating bookmark Z
132 132 (run 'hg heads' to see heads, 'hg merge' to merge)
133 133 $ hg book
134 134 * X 1:9b140be10808
135 135 X@foo 2:0d2164f0ce0d
136 136 Y 0:4e3505fd9583
137 137 Z 2:0d2164f0ce0d
138 138 foo -1:000000000000
139 139 foobar 1:9b140be10808
140 140 $ hg push -f ../a
141 141 pushing to ../a
142 142 searching for changes
143 143 adding changesets
144 144 adding manifests
145 145 adding file changes
146 146 added 1 changesets with 1 changes to 1 files (+1 heads)
147 147 $ hg -R ../a book
148 148 * X 1:0d2164f0ce0d
149 149 Y 0:4e3505fd9583
150 150 Z 1:0d2164f0ce0d
151 151
152 update a remote bookmark from a non-head to a head
153
154 $ hg up -q Y
155 $ echo c3 > f2
156 $ hg ci -Am3
157 adding f2
158 created new head
159 $ hg push ../a
160 pushing to ../a
161 searching for changes
162 adding changesets
163 adding manifests
164 adding file changes
165 added 1 changesets with 1 changes to 1 files (+1 heads)
166 updating bookmark Y
167 $ hg -R ../a book
168 * X 1:0d2164f0ce0d
169 Y 3:f6fc62dde3c0
170 Z 1:0d2164f0ce0d
171
172 diverging a remote bookmark fails
173
174 $ hg up -q 4e3505fd9583
175 $ echo c4 > f2
176 $ hg ci -Am4
177 adding f2
178 created new head
179 $ hg book -f Y
180 $ hg push ../a
181 pushing to ../a
182 searching for changes
183 abort: push creates new remote head 4efff6d98829!
184 (did you forget to merge? use push -f to force)
185 [255]
186 $ hg -R ../a book
187 * X 1:0d2164f0ce0d
188 Y 3:f6fc62dde3c0
189 Z 1:0d2164f0ce0d
190
152 191 hgweb
153 192
154 193 $ cat <<EOF > .hg/hgrc
155 194 > [web]
156 195 > push_ssl = false
157 196 > allow_push = *
158 197 > EOF
159 198
160 199 $ hg serve -p $HGPORT -d --pid-file=../hg.pid -E errors.log
161 200 $ cat ../hg.pid >> $DAEMON_PIDS
162 201 $ cd ../a
163 202
164 203 $ hg debugpushkey http://localhost:$HGPORT/ namespaces
165 204 bookmarks
166 205 phases
167 206 namespaces
168 207 $ hg debugpushkey http://localhost:$HGPORT/ bookmarks
169 Y 4e3505fd95835d721066b76e75dbb8cc554d7f77
208 Y 4efff6d98829d9c824c621afd6e3f01865f5439f
170 209 foobar 9b140be1080824d768c5a4691a564088eede71f9
171 210 Z 0d2164f0ce0d8f1d6f94351eba04b794909be66c
172 211 foo 0000000000000000000000000000000000000000
173 212 X 9b140be1080824d768c5a4691a564088eede71f9
174 213 $ hg out -B http://localhost:$HGPORT/
175 214 comparing with http://localhost:$HGPORT/
176 215 searching for changed bookmarks
177 216 no changed bookmarks found
178 217 [1]
179 218 $ hg push -B Z http://localhost:$HGPORT/
180 219 pushing to http://localhost:$HGPORT/
181 220 searching for changes
182 221 no changes found
183 222 exporting bookmark Z
184 223 [1]
185 224 $ hg book -d Z
186 225 $ hg in -B http://localhost:$HGPORT/
187 226 comparing with http://localhost:$HGPORT/
188 227 searching for changed bookmarks
189 228 Z 0d2164f0ce0d
190 229 foo 000000000000
191 230 foobar 9b140be10808
192 231 $ hg pull -B Z http://localhost:$HGPORT/
193 232 pulling from http://localhost:$HGPORT/
194 233 no changes found
195 234 adding remote bookmark foobar
196 235 adding remote bookmark Z
197 236 adding remote bookmark foo
198 237 divergent bookmark X stored as X@1
199 238 importing bookmark Z
200 239 $ hg clone http://localhost:$HGPORT/ cloned-bookmarks
201 240 requesting all changes
202 241 adding changesets
203 242 adding manifests
204 243 adding file changes
205 added 3 changesets with 3 changes to 3 files (+1 heads)
244 added 5 changesets with 5 changes to 3 files (+3 heads)
206 245 updating to branch default
207 246 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
208 247 $ hg -R cloned-bookmarks bookmarks
209 248 X 1:9b140be10808
210 Y 0:4e3505fd9583
249 Y 4:4efff6d98829
211 250 Z 2:0d2164f0ce0d
212 251 foo -1:000000000000
213 252 foobar 1:9b140be10808
214 253
215 254 $ kill `cat ../hg.pid`
General Comments 0
You need to be logged in to leave comments. Login now