##// END OF EJS Templates
fix push of moved bookmark when creating new branch heads...
Sune Foldager -
r17043:6f89c3f0 default
parent child Browse files
Show More
@@ -1,264 +1,268 b''
1 # discovery.py - protocol changeset discovery functions
1 # discovery.py - protocol changeset discovery functions
2 #
2 #
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid, short
8 from node import nullid, short
9 from i18n import _
9 from i18n import _
10 import util, setdiscovery, treediscovery, phases
10 import util, setdiscovery, treediscovery, phases
11
11
12 def findcommonincoming(repo, remote, heads=None, force=False):
12 def findcommonincoming(repo, remote, heads=None, force=False):
13 """Return a tuple (common, anyincoming, heads) used to identify the common
13 """Return a tuple (common, anyincoming, heads) used to identify the common
14 subset of nodes between repo and remote.
14 subset of nodes between repo and remote.
15
15
16 "common" is a list of (at least) the heads of the common subset.
16 "common" is a list of (at least) the heads of the common subset.
17 "anyincoming" is testable as a boolean indicating if any nodes are missing
17 "anyincoming" is testable as a boolean indicating if any nodes are missing
18 locally. If remote does not support getbundle, this actually is a list of
18 locally. If remote does not support getbundle, this actually is a list of
19 roots of the nodes that would be incoming, to be supplied to
19 roots of the nodes that would be incoming, to be supplied to
20 changegroupsubset. No code except for pull should be relying on this fact
20 changegroupsubset. No code except for pull should be relying on this fact
21 any longer.
21 any longer.
22 "heads" is either the supplied heads, or else the remote's heads.
22 "heads" is either the supplied heads, or else the remote's heads.
23
23
24 If you pass heads and they are all known locally, the reponse lists justs
24 If you pass heads and they are all known locally, the reponse lists justs
25 these heads in "common" and in "heads".
25 these heads in "common" and in "heads".
26
26
27 Please use findcommonoutgoing to compute the set of outgoing nodes to give
27 Please use findcommonoutgoing to compute the set of outgoing nodes to give
28 extensions a good hook into outgoing.
28 extensions a good hook into outgoing.
29 """
29 """
30
30
31 if not remote.capable('getbundle'):
31 if not remote.capable('getbundle'):
32 return treediscovery.findcommonincoming(repo, remote, heads, force)
32 return treediscovery.findcommonincoming(repo, remote, heads, force)
33
33
34 if heads:
34 if heads:
35 allknown = True
35 allknown = True
36 nm = repo.changelog.nodemap
36 nm = repo.changelog.nodemap
37 for h in heads:
37 for h in heads:
38 if nm.get(h) is None:
38 if nm.get(h) is None:
39 allknown = False
39 allknown = False
40 break
40 break
41 if allknown:
41 if allknown:
42 return (heads, False, heads)
42 return (heads, False, heads)
43
43
44 res = setdiscovery.findcommonheads(repo.ui, repo, remote,
44 res = setdiscovery.findcommonheads(repo.ui, repo, remote,
45 abortwhenunrelated=not force)
45 abortwhenunrelated=not force)
46 common, anyinc, srvheads = res
46 common, anyinc, srvheads = res
47 return (list(common), anyinc, heads or list(srvheads))
47 return (list(common), anyinc, heads or list(srvheads))
48
48
49 class outgoing(object):
49 class outgoing(object):
50 '''Represents the set of nodes present in a local repo but not in a
50 '''Represents the set of nodes present in a local repo but not in a
51 (possibly) remote one.
51 (possibly) remote one.
52
52
53 Members:
53 Members:
54
54
55 missing is a list of all nodes present in local but not in remote.
55 missing is a list of all nodes present in local but not in remote.
56 common is a list of all nodes shared between the two repos.
56 common is a list of all nodes shared between the two repos.
57 excluded is the list of missing changeset that shouldn't be sent remotely.
57 excluded is the list of missing changeset that shouldn't be sent remotely.
58 missingheads is the list of heads of missing.
58 missingheads is the list of heads of missing.
59 commonheads is the list of heads of common.
59 commonheads is the list of heads of common.
60
60
61 The sets are computed on demand from the heads, unless provided upfront
61 The sets are computed on demand from the heads, unless provided upfront
62 by discovery.'''
62 by discovery.'''
63
63
64 def __init__(self, revlog, commonheads, missingheads):
64 def __init__(self, revlog, commonheads, missingheads):
65 self.commonheads = commonheads
65 self.commonheads = commonheads
66 self.missingheads = missingheads
66 self.missingheads = missingheads
67 self._revlog = revlog
67 self._revlog = revlog
68 self._common = None
68 self._common = None
69 self._missing = None
69 self._missing = None
70 self.excluded = []
70 self.excluded = []
71
71
72 def _computecommonmissing(self):
72 def _computecommonmissing(self):
73 sets = self._revlog.findcommonmissing(self.commonheads,
73 sets = self._revlog.findcommonmissing(self.commonheads,
74 self.missingheads)
74 self.missingheads)
75 self._common, self._missing = sets
75 self._common, self._missing = sets
76
76
77 @util.propertycache
77 @util.propertycache
78 def common(self):
78 def common(self):
79 if self._common is None:
79 if self._common is None:
80 self._computecommonmissing()
80 self._computecommonmissing()
81 return self._common
81 return self._common
82
82
83 @util.propertycache
83 @util.propertycache
84 def missing(self):
84 def missing(self):
85 if self._missing is None:
85 if self._missing is None:
86 self._computecommonmissing()
86 self._computecommonmissing()
87 return self._missing
87 return self._missing
88
88
89 def findcommonoutgoing(repo, other, onlyheads=None, force=False,
89 def findcommonoutgoing(repo, other, onlyheads=None, force=False,
90 commoninc=None, portable=False):
90 commoninc=None, portable=False):
91 '''Return an outgoing instance to identify the nodes present in repo but
91 '''Return an outgoing instance to identify the nodes present in repo but
92 not in other.
92 not in other.
93
93
94 If onlyheads is given, only nodes ancestral to nodes in onlyheads
94 If onlyheads is given, only nodes ancestral to nodes in onlyheads
95 (inclusive) are included. If you already know the local repo's heads,
95 (inclusive) are included. If you already know the local repo's heads,
96 passing them in onlyheads is faster than letting them be recomputed here.
96 passing them in onlyheads is faster than letting them be recomputed here.
97
97
98 If commoninc is given, it must the the result of a prior call to
98 If commoninc is given, it must the the result of a prior call to
99 findcommonincoming(repo, other, force) to avoid recomputing it here.
99 findcommonincoming(repo, other, force) to avoid recomputing it here.
100
100
101 If portable is given, compute more conservative common and missingheads,
101 If portable is given, compute more conservative common and missingheads,
102 to make bundles created from the instance more portable.'''
102 to make bundles created from the instance more portable.'''
103 # declare an empty outgoing object to be filled later
103 # declare an empty outgoing object to be filled later
104 og = outgoing(repo.changelog, None, None)
104 og = outgoing(repo.changelog, None, None)
105
105
106 # get common set if not provided
106 # get common set if not provided
107 if commoninc is None:
107 if commoninc is None:
108 commoninc = findcommonincoming(repo, other, force=force)
108 commoninc = findcommonincoming(repo, other, force=force)
109 og.commonheads, _any, _hds = commoninc
109 og.commonheads, _any, _hds = commoninc
110
110
111 # compute outgoing
111 # compute outgoing
112 if not repo._phasecache.phaseroots[phases.secret]:
112 if not repo._phasecache.phaseroots[phases.secret]:
113 og.missingheads = onlyheads or repo.heads()
113 og.missingheads = onlyheads or repo.heads()
114 elif onlyheads is None:
114 elif onlyheads is None:
115 # use visible heads as it should be cached
115 # use visible heads as it should be cached
116 og.missingheads = phases.visibleheads(repo)
116 og.missingheads = phases.visibleheads(repo)
117 og.excluded = [ctx.node() for ctx in repo.set('secret()')]
117 og.excluded = [ctx.node() for ctx in repo.set('secret()')]
118 else:
118 else:
119 # compute common, missing and exclude secret stuff
119 # compute common, missing and exclude secret stuff
120 sets = repo.changelog.findcommonmissing(og.commonheads, onlyheads)
120 sets = repo.changelog.findcommonmissing(og.commonheads, onlyheads)
121 og._common, allmissing = sets
121 og._common, allmissing = sets
122 og._missing = missing = []
122 og._missing = missing = []
123 og.excluded = excluded = []
123 og.excluded = excluded = []
124 for node in allmissing:
124 for node in allmissing:
125 if repo[node].phase() >= phases.secret:
125 if repo[node].phase() >= phases.secret:
126 excluded.append(node)
126 excluded.append(node)
127 else:
127 else:
128 missing.append(node)
128 missing.append(node)
129 if excluded:
129 if excluded:
130 # update missing heads
130 # update missing heads
131 missingheads = phases.newheads(repo, onlyheads, excluded)
131 missingheads = phases.newheads(repo, onlyheads, excluded)
132 else:
132 else:
133 missingheads = onlyheads
133 missingheads = onlyheads
134 og.missingheads = missingheads
134 og.missingheads = missingheads
135
135
136 if portable:
136 if portable:
137 # recompute common and missingheads as if -r<rev> had been given for
137 # recompute common and missingheads as if -r<rev> had been given for
138 # each head of missing, and --base <rev> for each head of the proper
138 # each head of missing, and --base <rev> for each head of the proper
139 # ancestors of missing
139 # ancestors of missing
140 og._computecommonmissing()
140 og._computecommonmissing()
141 cl = repo.changelog
141 cl = repo.changelog
142 missingrevs = set(cl.rev(n) for n in og._missing)
142 missingrevs = set(cl.rev(n) for n in og._missing)
143 og._common = set(cl.ancestors(missingrevs)) - missingrevs
143 og._common = set(cl.ancestors(missingrevs)) - missingrevs
144 commonheads = set(og.commonheads)
144 commonheads = set(og.commonheads)
145 og.missingheads = [h for h in og.missingheads if h not in commonheads]
145 og.missingheads = [h for h in og.missingheads if h not in commonheads]
146
146
147 return og
147 return og
148
148
149 def checkheads(repo, remote, outgoing, remoteheads, newbranch=False, inc=False):
149 def checkheads(repo, remote, outgoing, remoteheads, newbranch=False, inc=False):
150 """Check that a push won't add any outgoing head
150 """Check that a push won't add any outgoing head
151
151
152 raise Abort error and display ui message as needed.
152 raise Abort error and display ui message as needed.
153 """
153 """
154 if remoteheads == [nullid]:
154 if remoteheads == [nullid]:
155 # remote is empty, nothing to check.
155 # remote is empty, nothing to check.
156 return
156 return
157
157
158 cl = repo.changelog
158 cl = repo.changelog
159 if remote.capable('branchmap'):
159 if remote.capable('branchmap'):
160 # Check for each named branch if we're creating new remote heads.
160 # Check for each named branch if we're creating new remote heads.
161 # To be a remote head after push, node must be either:
161 # To be a remote head after push, node must be either:
162 # - unknown locally
162 # - unknown locally
163 # - a local outgoing head descended from update
163 # - a local outgoing head descended from update
164 # - a remote head that's known locally and not
164 # - a remote head that's known locally and not
165 # ancestral to an outgoing head
165 # ancestral to an outgoing head
166
166
167 # 1. Create set of branches involved in the push.
167 # 1. Create set of branches involved in the push.
168 branches = set(repo[n].branch() for n in outgoing.missing)
168 branches = set(repo[n].branch() for n in outgoing.missing)
169
169
170 # 2. Check for new branches on the remote.
170 # 2. Check for new branches on the remote.
171 if remote.local():
171 if remote.local():
172 remotemap = phases.visiblebranchmap(remote)
172 remotemap = phases.visiblebranchmap(remote)
173 else:
173 else:
174 remotemap = remote.branchmap()
174 remotemap = remote.branchmap()
175 newbranches = branches - set(remotemap)
175 newbranches = branches - set(remotemap)
176 if newbranches and not newbranch: # new branch requires --new-branch
176 if newbranches and not newbranch: # new branch requires --new-branch
177 branchnames = ', '.join(sorted(newbranches))
177 branchnames = ', '.join(sorted(newbranches))
178 raise util.Abort(_("push creates new remote branches: %s!")
178 raise util.Abort(_("push creates new remote branches: %s!")
179 % branchnames,
179 % branchnames,
180 hint=_("use 'hg push --new-branch' to create"
180 hint=_("use 'hg push --new-branch' to create"
181 " new remote branches"))
181 " new remote branches"))
182 branches.difference_update(newbranches)
182 branches.difference_update(newbranches)
183
183
184 # 3. Construct the initial oldmap and newmap dicts.
184 # 3. Construct the initial oldmap and newmap dicts.
185 # They contain information about the remote heads before and
185 # They contain information about the remote heads before and
186 # after the push, respectively.
186 # after the push, respectively.
187 # Heads not found locally are not included in either dict,
187 # Heads not found locally are not included in either dict,
188 # since they won't be affected by the push.
188 # since they won't be affected by the push.
189 # unsynced contains all branches with incoming changesets.
189 # unsynced contains all branches with incoming changesets.
190 oldmap = {}
190 oldmap = {}
191 newmap = {}
191 newmap = {}
192 unsynced = set()
192 unsynced = set()
193 for branch in branches:
193 for branch in branches:
194 remotebrheads = remotemap[branch]
194 remotebrheads = remotemap[branch]
195 prunedbrheads = [h for h in remotebrheads if h in cl.nodemap]
195 prunedbrheads = [h for h in remotebrheads if h in cl.nodemap]
196 oldmap[branch] = prunedbrheads
196 oldmap[branch] = prunedbrheads
197 newmap[branch] = list(prunedbrheads)
197 newmap[branch] = list(prunedbrheads)
198 if len(remotebrheads) > len(prunedbrheads):
198 if len(remotebrheads) > len(prunedbrheads):
199 unsynced.add(branch)
199 unsynced.add(branch)
200
200
201 # 4. Update newmap with outgoing changes.
201 # 4. Update newmap with outgoing changes.
202 # This will possibly add new heads and remove existing ones.
202 # This will possibly add new heads and remove existing ones.
203 ctxgen = (repo[n] for n in outgoing.missing)
203 ctxgen = (repo[n] for n in outgoing.missing)
204 repo._updatebranchcache(newmap, ctxgen)
204 repo._updatebranchcache(newmap, ctxgen)
205
205
206 else:
206 else:
207 # 1-4b. old servers: Check for new topological heads.
207 # 1-4b. old servers: Check for new topological heads.
208 # Construct {old,new}map with branch = None (topological branch).
208 # Construct {old,new}map with branch = None (topological branch).
209 # (code based on _updatebranchcache)
209 # (code based on _updatebranchcache)
210 oldheadrevs = set(cl.rev(h) for h in remoteheads if h in cl.nodemap)
210 oldheadrevs = set(cl.rev(h) for h in remoteheads if h in cl.nodemap)
211 missingrevs = [cl.rev(node) for node in outgoing.missing]
211 missingrevs = [cl.rev(node) for node in outgoing.missing]
212 newheadrevs = oldheadrevs.union(missingrevs)
212 newheadrevs = oldheadrevs.union(missingrevs)
213 if len(newheadrevs) > 1:
213 if len(newheadrevs) > 1:
214 for latest in sorted(missingrevs, reverse=True):
214 for latest in sorted(missingrevs, reverse=True):
215 if latest not in newheadrevs:
215 if latest not in newheadrevs:
216 continue
216 continue
217 reachable = cl.ancestors([latest], min(newheadrevs))
217 reachable = cl.ancestors([latest], min(newheadrevs))
218 newheadrevs.difference_update(reachable)
218 newheadrevs.difference_update(reachable)
219 branches = set([None])
219 branches = set([None])
220 newmap = {None: [cl.node(rev) for rev in newheadrevs]}
220 newmap = {None: [cl.node(rev) for rev in newheadrevs]}
221 oldmap = {None: [cl.node(rev) for rev in oldheadrevs]}
221 oldmap = {None: [cl.node(rev) for rev in oldheadrevs]}
222 unsynced = inc and branches or set()
222 unsynced = inc and branches or set()
223
223
224 # 5. Check for new heads.
224 # 5. Check for new heads.
225 # If there are more heads after the push than before, a suitable
225 # If there are more heads after the push than before, a suitable
226 # error message, depending on unsynced status, is displayed.
226 # error message, depending on unsynced status, is displayed.
227 error = None
227 error = None
228 remotebookmarks = remote.listkeys('bookmarks')
229 localbookmarks = repo._bookmarks
228 localbookmarks = repo._bookmarks
230
229
231 for branch in branches:
230 for branch in branches:
232 newhs = set(newmap[branch])
231 newhs = set(newmap[branch])
233 oldhs = set(oldmap[branch])
232 oldhs = set(oldmap[branch])
234 dhs = None
233 dhs = None
235 if len(newhs) > len(oldhs):
234 if len(newhs) > len(oldhs):
236 # strip updates to existing remote heads from the new heads list
235 # strip updates to existing remote heads from the new heads list
237 bookmarkedheads = set([repo[bm].node() for bm in localbookmarks
236 remotebookmarks = remote.listkeys('bookmarks')
238 if bm in remotebookmarks and
237 bookmarkedheads = set()
239 remote[bm] == repo[bm].ancestor(remote[bm])])
238 for bm in localbookmarks:
239 rnode = remotebookmarks.get(bm)
240 if rnode and rnode in repo:
241 lctx, rctx = repo[bm], repo[rnode]
242 if rctx == lctx.ancestor(rctx):
243 bookmarkedheads.add(lctx.node())
240 dhs = list(newhs - bookmarkedheads - oldhs)
244 dhs = list(newhs - bookmarkedheads - oldhs)
241 if dhs:
245 if dhs:
242 if error is None:
246 if error is None:
243 if branch not in ('default', None):
247 if branch not in ('default', None):
244 error = _("push creates new remote head %s "
248 error = _("push creates new remote head %s "
245 "on branch '%s'!") % (short(dhs[0]), branch)
249 "on branch '%s'!") % (short(dhs[0]), branch)
246 else:
250 else:
247 error = _("push creates new remote head %s!"
251 error = _("push creates new remote head %s!"
248 ) % short(dhs[0])
252 ) % short(dhs[0])
249 if branch in unsynced:
253 if branch in unsynced:
250 hint = _("you should pull and merge or "
254 hint = _("you should pull and merge or "
251 "use push -f to force")
255 "use push -f to force")
252 else:
256 else:
253 hint = _("did you forget to merge? "
257 hint = _("did you forget to merge? "
254 "use push -f to force")
258 "use push -f to force")
255 if branch is not None:
259 if branch is not None:
256 repo.ui.note(_("new remote heads on branch '%s'\n") % branch)
260 repo.ui.note(_("new remote heads on branch '%s'\n") % branch)
257 for h in dhs:
261 for h in dhs:
258 repo.ui.note(_("new remote head %s\n") % short(h))
262 repo.ui.note(_("new remote head %s\n") % short(h))
259 if error:
263 if error:
260 raise util.Abort(error, hint=hint)
264 raise util.Abort(error, hint=hint)
261
265
262 # 6. Check for unsynced changes on involved branches.
266 # 6. Check for unsynced changes on involved branches.
263 if unsynced:
267 if unsynced:
264 repo.ui.warn(_("note: unsynced remote changes!\n"))
268 repo.ui.warn(_("note: unsynced remote changes!\n"))
@@ -1,254 +1,264 b''
1 $ "$TESTDIR/hghave" serve || exit 80
1 $ "$TESTDIR/hghave" serve || exit 80
2
2
3 initialize
3 initialize
4
4
5 $ hg init a
5 $ hg init a
6 $ cd a
6 $ cd a
7 $ echo 'test' > test
7 $ echo 'test' > test
8 $ hg commit -Am'test'
8 $ hg commit -Am'test'
9 adding test
9 adding test
10
10
11 set bookmarks
11 set bookmarks
12
12
13 $ hg bookmark X
13 $ hg bookmark X
14 $ hg bookmark Y
14 $ hg bookmark Y
15 $ hg bookmark Z
15 $ hg bookmark Z
16
16
17 import bookmark by name
17 import bookmark by name
18
18
19 $ hg init ../b
19 $ hg init ../b
20 $ cd ../b
20 $ cd ../b
21 $ hg book Y
21 $ hg book Y
22 $ hg book
22 $ hg book
23 * Y -1:000000000000
23 * Y -1:000000000000
24 $ hg pull ../a
24 $ hg pull ../a
25 pulling from ../a
25 pulling from ../a
26 requesting all changes
26 requesting all changes
27 adding changesets
27 adding changesets
28 adding manifests
28 adding manifests
29 adding file changes
29 adding file changes
30 added 1 changesets with 1 changes to 1 files
30 added 1 changesets with 1 changes to 1 files
31 updating bookmark Y
31 updating bookmark Y
32 adding remote bookmark X
32 adding remote bookmark X
33 adding remote bookmark Z
33 adding remote bookmark Z
34 (run 'hg update' to get a working copy)
34 (run 'hg update' to get a working copy)
35 $ hg bookmarks
35 $ hg bookmarks
36 X 0:4e3505fd9583
36 X 0:4e3505fd9583
37 Y 0:4e3505fd9583
37 Y 0:4e3505fd9583
38 Z 0:4e3505fd9583
38 Z 0:4e3505fd9583
39 $ hg debugpushkey ../a namespaces
39 $ hg debugpushkey ../a namespaces
40 bookmarks
40 bookmarks
41 phases
41 phases
42 namespaces
42 namespaces
43 $ hg debugpushkey ../a bookmarks
43 $ hg debugpushkey ../a bookmarks
44 Y 4e3505fd95835d721066b76e75dbb8cc554d7f77
44 Y 4e3505fd95835d721066b76e75dbb8cc554d7f77
45 X 4e3505fd95835d721066b76e75dbb8cc554d7f77
45 X 4e3505fd95835d721066b76e75dbb8cc554d7f77
46 Z 4e3505fd95835d721066b76e75dbb8cc554d7f77
46 Z 4e3505fd95835d721066b76e75dbb8cc554d7f77
47 $ hg pull -B X ../a
47 $ hg pull -B X ../a
48 pulling from ../a
48 pulling from ../a
49 no changes found
49 no changes found
50 importing bookmark X
50 importing bookmark X
51 $ hg bookmark
51 $ hg bookmark
52 X 0:4e3505fd9583
52 X 0:4e3505fd9583
53 Y 0:4e3505fd9583
53 Y 0:4e3505fd9583
54 Z 0:4e3505fd9583
54 Z 0:4e3505fd9583
55
55
56 export bookmark by name
56 export bookmark by name
57
57
58 $ hg bookmark W
58 $ hg bookmark W
59 $ hg bookmark foo
59 $ hg bookmark foo
60 $ hg bookmark foobar
60 $ hg bookmark foobar
61 $ hg push -B W ../a
61 $ hg push -B W ../a
62 pushing to ../a
62 pushing to ../a
63 searching for changes
63 searching for changes
64 no changes found
64 no changes found
65 exporting bookmark W
65 exporting bookmark W
66 [1]
66 [1]
67 $ hg -R ../a bookmarks
67 $ hg -R ../a bookmarks
68 W -1:000000000000
68 W -1:000000000000
69 X 0:4e3505fd9583
69 X 0:4e3505fd9583
70 Y 0:4e3505fd9583
70 Y 0:4e3505fd9583
71 * Z 0:4e3505fd9583
71 * Z 0:4e3505fd9583
72
72
73 delete a remote bookmark
73 delete a remote bookmark
74
74
75 $ hg book -d W
75 $ hg book -d W
76 $ hg push -B W ../a
76 $ hg push -B W ../a
77 pushing to ../a
77 pushing to ../a
78 searching for changes
78 searching for changes
79 no changes found
79 no changes found
80 deleting remote bookmark W
80 deleting remote bookmark W
81 [1]
81 [1]
82
82
83 push/pull name that doesn't exist
83 push/pull name that doesn't exist
84
84
85 $ hg push -B badname ../a
85 $ hg push -B badname ../a
86 pushing to ../a
86 pushing to ../a
87 searching for changes
87 searching for changes
88 no changes found
88 no changes found
89 bookmark badname does not exist on the local or remote repository!
89 bookmark badname does not exist on the local or remote repository!
90 [2]
90 [2]
91 $ hg pull -B anotherbadname ../a
91 $ hg pull -B anotherbadname ../a
92 pulling from ../a
92 pulling from ../a
93 abort: remote bookmark anotherbadname not found!
93 abort: remote bookmark anotherbadname not found!
94 [255]
94 [255]
95
95
96 divergent bookmarks
96 divergent bookmarks
97
97
98 $ cd ../a
98 $ cd ../a
99 $ echo c1 > f1
99 $ echo c1 > f1
100 $ hg ci -Am1
100 $ hg ci -Am1
101 adding f1
101 adding f1
102 $ hg book -f X
102 $ hg book -f X
103 $ hg book
103 $ hg book
104 * X 1:0d2164f0ce0d
104 * X 1:0d2164f0ce0d
105 Y 0:4e3505fd9583
105 Y 0:4e3505fd9583
106 Z 1:0d2164f0ce0d
106 Z 1:0d2164f0ce0d
107
107
108 $ cd ../b
108 $ cd ../b
109 $ hg up
109 $ hg up
110 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
110 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
111 updating bookmark foobar
111 updating bookmark foobar
112 $ echo c2 > f2
112 $ echo c2 > f2
113 $ hg ci -Am2
113 $ hg ci -Am2
114 adding f2
114 adding f2
115 $ hg book -f X
115 $ hg book -f X
116 $ hg book
116 $ hg book
117 * X 1:9b140be10808
117 * X 1:9b140be10808
118 Y 0:4e3505fd9583
118 Y 0:4e3505fd9583
119 Z 0:4e3505fd9583
119 Z 0:4e3505fd9583
120 foo -1:000000000000
120 foo -1:000000000000
121 foobar 1:9b140be10808
121 foobar 1:9b140be10808
122
122
123 $ hg pull --config paths.foo=../a foo
123 $ hg pull --config paths.foo=../a foo
124 pulling from $TESTTMP/a (glob)
124 pulling from $TESTTMP/a (glob)
125 searching for changes
125 searching for changes
126 adding changesets
126 adding changesets
127 adding manifests
127 adding manifests
128 adding file changes
128 adding file changes
129 added 1 changesets with 1 changes to 1 files (+1 heads)
129 added 1 changesets with 1 changes to 1 files (+1 heads)
130 divergent bookmark X stored as X@foo
130 divergent bookmark X stored as X@foo
131 updating bookmark Z
131 updating bookmark Z
132 (run 'hg heads' to see heads, 'hg merge' to merge)
132 (run 'hg heads' to see heads, 'hg merge' to merge)
133 $ hg book
133 $ hg book
134 * X 1:9b140be10808
134 * X 1:9b140be10808
135 X@foo 2:0d2164f0ce0d
135 X@foo 2:0d2164f0ce0d
136 Y 0:4e3505fd9583
136 Y 0:4e3505fd9583
137 Z 2:0d2164f0ce0d
137 Z 2:0d2164f0ce0d
138 foo -1:000000000000
138 foo -1:000000000000
139 foobar 1:9b140be10808
139 foobar 1:9b140be10808
140 $ hg push -f ../a
140 $ hg push -f ../a
141 pushing to ../a
141 pushing to ../a
142 searching for changes
142 searching for changes
143 adding changesets
143 adding changesets
144 adding manifests
144 adding manifests
145 adding file changes
145 adding file changes
146 added 1 changesets with 1 changes to 1 files (+1 heads)
146 added 1 changesets with 1 changes to 1 files (+1 heads)
147 $ hg -R ../a book
147 $ hg -R ../a book
148 * X 1:0d2164f0ce0d
148 * X 1:0d2164f0ce0d
149 Y 0:4e3505fd9583
149 Y 0:4e3505fd9583
150 Z 1:0d2164f0ce0d
150 Z 1:0d2164f0ce0d
151
151
152 update a remote bookmark from a non-head to a head
152 update a remote bookmark from a non-head to a head
153
153
154 $ hg up -q Y
154 $ hg up -q Y
155 $ echo c3 > f2
155 $ echo c3 > f2
156 $ hg ci -Am3
156 $ hg ci -Am3
157 adding f2
157 adding f2
158 created new head
158 created new head
159 $ hg push ../a
159 $ hg push ../a
160 pushing to ../a
160 pushing to ../a
161 searching for changes
161 searching for changes
162 adding changesets
162 adding changesets
163 adding manifests
163 adding manifests
164 adding file changes
164 adding file changes
165 added 1 changesets with 1 changes to 1 files (+1 heads)
165 added 1 changesets with 1 changes to 1 files (+1 heads)
166 updating bookmark Y
166 updating bookmark Y
167 $ hg -R ../a book
167 $ hg -R ../a book
168 * X 1:0d2164f0ce0d
168 * X 1:0d2164f0ce0d
169 Y 3:f6fc62dde3c0
169 Y 3:f6fc62dde3c0
170 Z 1:0d2164f0ce0d
170 Z 1:0d2164f0ce0d
171
171
172 diverging a remote bookmark fails
172 diverging a remote bookmark fails
173
173
174 $ hg up -q 4e3505fd9583
174 $ hg up -q 4e3505fd9583
175 $ echo c4 > f2
175 $ echo c4 > f2
176 $ hg ci -Am4
176 $ hg ci -Am4
177 adding f2
177 adding f2
178 created new head
178 created new head
179 $ hg book -f Y
179 $ hg book -f Y
180 $ hg push ../a
180
181 pushing to ../a
181 $ cat <<EOF > ../a/.hg/hgrc
182 > [web]
183 > push_ssl = false
184 > allow_push = *
185 > EOF
186
187 $ hg -R ../a serve -p $HGPORT2 -d --pid-file=../hg2.pid
188 $ cat ../hg2.pid >> $DAEMON_PIDS
189
190 $ hg push http://localhost:$HGPORT2/
191 pushing to http://localhost:$HGPORT2/
182 searching for changes
192 searching for changes
183 abort: push creates new remote head 4efff6d98829!
193 abort: push creates new remote head 4efff6d98829!
184 (did you forget to merge? use push -f to force)
194 (did you forget to merge? use push -f to force)
185 [255]
195 [255]
186 $ hg -R ../a book
196 $ hg -R ../a book
187 * X 1:0d2164f0ce0d
197 * X 1:0d2164f0ce0d
188 Y 3:f6fc62dde3c0
198 Y 3:f6fc62dde3c0
189 Z 1:0d2164f0ce0d
199 Z 1:0d2164f0ce0d
190
200
191 hgweb
201 hgweb
192
202
193 $ cat <<EOF > .hg/hgrc
203 $ cat <<EOF > .hg/hgrc
194 > [web]
204 > [web]
195 > push_ssl = false
205 > push_ssl = false
196 > allow_push = *
206 > allow_push = *
197 > EOF
207 > EOF
198
208
199 $ hg serve -p $HGPORT -d --pid-file=../hg.pid -E errors.log
209 $ hg serve -p $HGPORT -d --pid-file=../hg.pid -E errors.log
200 $ cat ../hg.pid >> $DAEMON_PIDS
210 $ cat ../hg.pid >> $DAEMON_PIDS
201 $ cd ../a
211 $ cd ../a
202
212
203 $ hg debugpushkey http://localhost:$HGPORT/ namespaces
213 $ hg debugpushkey http://localhost:$HGPORT/ namespaces
204 bookmarks
214 bookmarks
205 phases
215 phases
206 namespaces
216 namespaces
207 $ hg debugpushkey http://localhost:$HGPORT/ bookmarks
217 $ hg debugpushkey http://localhost:$HGPORT/ bookmarks
208 Y 4efff6d98829d9c824c621afd6e3f01865f5439f
218 Y 4efff6d98829d9c824c621afd6e3f01865f5439f
209 foobar 9b140be1080824d768c5a4691a564088eede71f9
219 foobar 9b140be1080824d768c5a4691a564088eede71f9
210 Z 0d2164f0ce0d8f1d6f94351eba04b794909be66c
220 Z 0d2164f0ce0d8f1d6f94351eba04b794909be66c
211 foo 0000000000000000000000000000000000000000
221 foo 0000000000000000000000000000000000000000
212 X 9b140be1080824d768c5a4691a564088eede71f9
222 X 9b140be1080824d768c5a4691a564088eede71f9
213 $ hg out -B http://localhost:$HGPORT/
223 $ hg out -B http://localhost:$HGPORT/
214 comparing with http://localhost:$HGPORT/
224 comparing with http://localhost:$HGPORT/
215 searching for changed bookmarks
225 searching for changed bookmarks
216 no changed bookmarks found
226 no changed bookmarks found
217 [1]
227 [1]
218 $ hg push -B Z http://localhost:$HGPORT/
228 $ hg push -B Z http://localhost:$HGPORT/
219 pushing to http://localhost:$HGPORT/
229 pushing to http://localhost:$HGPORT/
220 searching for changes
230 searching for changes
221 no changes found
231 no changes found
222 exporting bookmark Z
232 exporting bookmark Z
223 [1]
233 [1]
224 $ hg book -d Z
234 $ hg book -d Z
225 $ hg in -B http://localhost:$HGPORT/
235 $ hg in -B http://localhost:$HGPORT/
226 comparing with http://localhost:$HGPORT/
236 comparing with http://localhost:$HGPORT/
227 searching for changed bookmarks
237 searching for changed bookmarks
228 Z 0d2164f0ce0d
238 Z 0d2164f0ce0d
229 foo 000000000000
239 foo 000000000000
230 foobar 9b140be10808
240 foobar 9b140be10808
231 $ hg pull -B Z http://localhost:$HGPORT/
241 $ hg pull -B Z http://localhost:$HGPORT/
232 pulling from http://localhost:$HGPORT/
242 pulling from http://localhost:$HGPORT/
233 no changes found
243 no changes found
234 adding remote bookmark foobar
244 adding remote bookmark foobar
235 adding remote bookmark Z
245 adding remote bookmark Z
236 adding remote bookmark foo
246 adding remote bookmark foo
237 divergent bookmark X stored as X@1
247 divergent bookmark X stored as X@1
238 importing bookmark Z
248 importing bookmark Z
239 $ hg clone http://localhost:$HGPORT/ cloned-bookmarks
249 $ hg clone http://localhost:$HGPORT/ cloned-bookmarks
240 requesting all changes
250 requesting all changes
241 adding changesets
251 adding changesets
242 adding manifests
252 adding manifests
243 adding file changes
253 adding file changes
244 added 5 changesets with 5 changes to 3 files (+3 heads)
254 added 5 changesets with 5 changes to 3 files (+3 heads)
245 updating to branch default
255 updating to branch default
246 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
256 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
247 $ hg -R cloned-bookmarks bookmarks
257 $ hg -R cloned-bookmarks bookmarks
248 X 1:9b140be10808
258 X 1:9b140be10808
249 Y 4:4efff6d98829
259 Y 4:4efff6d98829
250 Z 2:0d2164f0ce0d
260 Z 2:0d2164f0ce0d
251 foo -1:000000000000
261 foo -1:000000000000
252 foobar 1:9b140be10808
262 foobar 1:9b140be10808
253
263
254 $ cd ..
264 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now