##// END OF EJS Templates
bookmarks: allow existing remote bookmarks to become heads when pushing
Levi Bard -
r16835:4267c840 default
parent child Browse files
Show More
@@ -1,256 +1,265
1 # discovery.py - protocol changeset discovery functions
1 # discovery.py - protocol changeset discovery functions
2 #
2 #
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid, short
8 from node import nullid, short
9 from i18n import _
9 from i18n import _
10 import util, setdiscovery, treediscovery, phases
10 import util, setdiscovery, treediscovery, phases
11
11
12 def findcommonincoming(repo, remote, heads=None, force=False):
12 def findcommonincoming(repo, remote, heads=None, force=False):
13 """Return a tuple (common, anyincoming, heads) used to identify the common
13 """Return a tuple (common, anyincoming, heads) used to identify the common
14 subset of nodes between repo and remote.
14 subset of nodes between repo and remote.
15
15
16 "common" is a list of (at least) the heads of the common subset.
16 "common" is a list of (at least) the heads of the common subset.
17 "anyincoming" is testable as a boolean indicating if any nodes are missing
17 "anyincoming" is testable as a boolean indicating if any nodes are missing
18 locally. If remote does not support getbundle, this actually is a list of
18 locally. If remote does not support getbundle, this actually is a list of
19 roots of the nodes that would be incoming, to be supplied to
19 roots of the nodes that would be incoming, to be supplied to
20 changegroupsubset. No code except for pull should be relying on this fact
20 changegroupsubset. No code except for pull should be relying on this fact
21 any longer.
21 any longer.
22 "heads" is either the supplied heads, or else the remote's heads.
22 "heads" is either the supplied heads, or else the remote's heads.
23
23
24 If you pass heads and they are all known locally, the reponse lists justs
24 If you pass heads and they are all known locally, the reponse lists justs
25 these heads in "common" and in "heads".
25 these heads in "common" and in "heads".
26
26
27 Please use findcommonoutgoing to compute the set of outgoing nodes to give
27 Please use findcommonoutgoing to compute the set of outgoing nodes to give
28 extensions a good hook into outgoing.
28 extensions a good hook into outgoing.
29 """
29 """
30
30
31 if not remote.capable('getbundle'):
31 if not remote.capable('getbundle'):
32 return treediscovery.findcommonincoming(repo, remote, heads, force)
32 return treediscovery.findcommonincoming(repo, remote, heads, force)
33
33
34 if heads:
34 if heads:
35 allknown = True
35 allknown = True
36 nm = repo.changelog.nodemap
36 nm = repo.changelog.nodemap
37 for h in heads:
37 for h in heads:
38 if nm.get(h) is None:
38 if nm.get(h) is None:
39 allknown = False
39 allknown = False
40 break
40 break
41 if allknown:
41 if allknown:
42 return (heads, False, heads)
42 return (heads, False, heads)
43
43
44 res = setdiscovery.findcommonheads(repo.ui, repo, remote,
44 res = setdiscovery.findcommonheads(repo.ui, repo, remote,
45 abortwhenunrelated=not force)
45 abortwhenunrelated=not force)
46 common, anyinc, srvheads = res
46 common, anyinc, srvheads = res
47 return (list(common), anyinc, heads or list(srvheads))
47 return (list(common), anyinc, heads or list(srvheads))
48
48
49 class outgoing(object):
49 class outgoing(object):
50 '''Represents the set of nodes present in a local repo but not in a
50 '''Represents the set of nodes present in a local repo but not in a
51 (possibly) remote one.
51 (possibly) remote one.
52
52
53 Members:
53 Members:
54
54
55 missing is a list of all nodes present in local but not in remote.
55 missing is a list of all nodes present in local but not in remote.
56 common is a list of all nodes shared between the two repos.
56 common is a list of all nodes shared between the two repos.
57 excluded is the list of missing changeset that shouldn't be sent remotely.
57 excluded is the list of missing changeset that shouldn't be sent remotely.
58 missingheads is the list of heads of missing.
58 missingheads is the list of heads of missing.
59 commonheads is the list of heads of common.
59 commonheads is the list of heads of common.
60
60
61 The sets are computed on demand from the heads, unless provided upfront
61 The sets are computed on demand from the heads, unless provided upfront
62 by discovery.'''
62 by discovery.'''
63
63
64 def __init__(self, revlog, commonheads, missingheads):
64 def __init__(self, revlog, commonheads, missingheads):
65 self.commonheads = commonheads
65 self.commonheads = commonheads
66 self.missingheads = missingheads
66 self.missingheads = missingheads
67 self._revlog = revlog
67 self._revlog = revlog
68 self._common = None
68 self._common = None
69 self._missing = None
69 self._missing = None
70 self.excluded = []
70 self.excluded = []
71
71
72 def _computecommonmissing(self):
72 def _computecommonmissing(self):
73 sets = self._revlog.findcommonmissing(self.commonheads,
73 sets = self._revlog.findcommonmissing(self.commonheads,
74 self.missingheads)
74 self.missingheads)
75 self._common, self._missing = sets
75 self._common, self._missing = sets
76
76
77 @util.propertycache
77 @util.propertycache
78 def common(self):
78 def common(self):
79 if self._common is None:
79 if self._common is None:
80 self._computecommonmissing()
80 self._computecommonmissing()
81 return self._common
81 return self._common
82
82
83 @util.propertycache
83 @util.propertycache
84 def missing(self):
84 def missing(self):
85 if self._missing is None:
85 if self._missing is None:
86 self._computecommonmissing()
86 self._computecommonmissing()
87 return self._missing
87 return self._missing
88
88
89 def findcommonoutgoing(repo, other, onlyheads=None, force=False,
89 def findcommonoutgoing(repo, other, onlyheads=None, force=False,
90 commoninc=None, portable=False):
90 commoninc=None, portable=False):
91 '''Return an outgoing instance to identify the nodes present in repo but
91 '''Return an outgoing instance to identify the nodes present in repo but
92 not in other.
92 not in other.
93
93
94 If onlyheads is given, only nodes ancestral to nodes in onlyheads
94 If onlyheads is given, only nodes ancestral to nodes in onlyheads
95 (inclusive) are included. If you already know the local repo's heads,
95 (inclusive) are included. If you already know the local repo's heads,
96 passing them in onlyheads is faster than letting them be recomputed here.
96 passing them in onlyheads is faster than letting them be recomputed here.
97
97
98 If commoninc is given, it must the the result of a prior call to
98 If commoninc is given, it must the the result of a prior call to
99 findcommonincoming(repo, other, force) to avoid recomputing it here.
99 findcommonincoming(repo, other, force) to avoid recomputing it here.
100
100
101 If portable is given, compute more conservative common and missingheads,
101 If portable is given, compute more conservative common and missingheads,
102 to make bundles created from the instance more portable.'''
102 to make bundles created from the instance more portable.'''
103 # declare an empty outgoing object to be filled later
103 # declare an empty outgoing object to be filled later
104 og = outgoing(repo.changelog, None, None)
104 og = outgoing(repo.changelog, None, None)
105
105
106 # get common set if not provided
106 # get common set if not provided
107 if commoninc is None:
107 if commoninc is None:
108 commoninc = findcommonincoming(repo, other, force=force)
108 commoninc = findcommonincoming(repo, other, force=force)
109 og.commonheads, _any, _hds = commoninc
109 og.commonheads, _any, _hds = commoninc
110
110
111 # compute outgoing
111 # compute outgoing
112 if not repo._phasecache.phaseroots[phases.secret]:
112 if not repo._phasecache.phaseroots[phases.secret]:
113 og.missingheads = onlyheads or repo.heads()
113 og.missingheads = onlyheads or repo.heads()
114 elif onlyheads is None:
114 elif onlyheads is None:
115 # use visible heads as it should be cached
115 # use visible heads as it should be cached
116 og.missingheads = phases.visibleheads(repo)
116 og.missingheads = phases.visibleheads(repo)
117 og.excluded = [ctx.node() for ctx in repo.set('secret()')]
117 og.excluded = [ctx.node() for ctx in repo.set('secret()')]
118 else:
118 else:
119 # compute common, missing and exclude secret stuff
119 # compute common, missing and exclude secret stuff
120 sets = repo.changelog.findcommonmissing(og.commonheads, onlyheads)
120 sets = repo.changelog.findcommonmissing(og.commonheads, onlyheads)
121 og._common, allmissing = sets
121 og._common, allmissing = sets
122 og._missing = missing = []
122 og._missing = missing = []
123 og.excluded = excluded = []
123 og.excluded = excluded = []
124 for node in allmissing:
124 for node in allmissing:
125 if repo[node].phase() >= phases.secret:
125 if repo[node].phase() >= phases.secret:
126 excluded.append(node)
126 excluded.append(node)
127 else:
127 else:
128 missing.append(node)
128 missing.append(node)
129 if excluded:
129 if excluded:
130 # update missing heads
130 # update missing heads
131 missingheads = phases.newheads(repo, onlyheads, excluded)
131 missingheads = phases.newheads(repo, onlyheads, excluded)
132 else:
132 else:
133 missingheads = onlyheads
133 missingheads = onlyheads
134 og.missingheads = missingheads
134 og.missingheads = missingheads
135
135
136 if portable:
136 if portable:
137 # recompute common and missingheads as if -r<rev> had been given for
137 # recompute common and missingheads as if -r<rev> had been given for
138 # each head of missing, and --base <rev> for each head of the proper
138 # each head of missing, and --base <rev> for each head of the proper
139 # ancestors of missing
139 # ancestors of missing
140 og._computecommonmissing()
140 og._computecommonmissing()
141 cl = repo.changelog
141 cl = repo.changelog
142 missingrevs = set(cl.rev(n) for n in og._missing)
142 missingrevs = set(cl.rev(n) for n in og._missing)
143 og._common = set(cl.ancestors(*missingrevs)) - missingrevs
143 og._common = set(cl.ancestors(*missingrevs)) - missingrevs
144 commonheads = set(og.commonheads)
144 commonheads = set(og.commonheads)
145 og.missingheads = [h for h in og.missingheads if h not in commonheads]
145 og.missingheads = [h for h in og.missingheads if h not in commonheads]
146
146
147 return og
147 return og
148
148
149 def checkheads(repo, remote, outgoing, remoteheads, newbranch=False, inc=False):
149 def checkheads(repo, remote, outgoing, remoteheads, newbranch=False, inc=False):
150 """Check that a push won't add any outgoing head
150 """Check that a push won't add any outgoing head
151
151
152 raise Abort error and display ui message as needed.
152 raise Abort error and display ui message as needed.
153 """
153 """
154 if remoteheads == [nullid]:
154 if remoteheads == [nullid]:
155 # remote is empty, nothing to check.
155 # remote is empty, nothing to check.
156 return
156 return
157
157
158 cl = repo.changelog
158 cl = repo.changelog
159 if remote.capable('branchmap'):
159 if remote.capable('branchmap'):
160 # Check for each named branch if we're creating new remote heads.
160 # Check for each named branch if we're creating new remote heads.
161 # To be a remote head after push, node must be either:
161 # To be a remote head after push, node must be either:
162 # - unknown locally
162 # - unknown locally
163 # - a local outgoing head descended from update
163 # - a local outgoing head descended from update
164 # - a remote head that's known locally and not
164 # - a remote head that's known locally and not
165 # ancestral to an outgoing head
165 # ancestral to an outgoing head
166
166
167 # 1. Create set of branches involved in the push.
167 # 1. Create set of branches involved in the push.
168 branches = set(repo[n].branch() for n in outgoing.missing)
168 branches = set(repo[n].branch() for n in outgoing.missing)
169
169
170 # 2. Check for new branches on the remote.
170 # 2. Check for new branches on the remote.
171 if remote.local():
171 if remote.local():
172 remotemap = phases.visiblebranchmap(remote)
172 remotemap = phases.visiblebranchmap(remote)
173 else:
173 else:
174 remotemap = remote.branchmap()
174 remotemap = remote.branchmap()
175 newbranches = branches - set(remotemap)
175 newbranches = branches - set(remotemap)
176 if newbranches and not newbranch: # new branch requires --new-branch
176 if newbranches and not newbranch: # new branch requires --new-branch
177 branchnames = ', '.join(sorted(newbranches))
177 branchnames = ', '.join(sorted(newbranches))
178 raise util.Abort(_("push creates new remote branches: %s!")
178 raise util.Abort(_("push creates new remote branches: %s!")
179 % branchnames,
179 % branchnames,
180 hint=_("use 'hg push --new-branch' to create"
180 hint=_("use 'hg push --new-branch' to create"
181 " new remote branches"))
181 " new remote branches"))
182 branches.difference_update(newbranches)
182 branches.difference_update(newbranches)
183
183
184 # 3. Construct the initial oldmap and newmap dicts.
184 # 3. Construct the initial oldmap and newmap dicts.
185 # They contain information about the remote heads before and
185 # They contain information about the remote heads before and
186 # after the push, respectively.
186 # after the push, respectively.
187 # Heads not found locally are not included in either dict,
187 # Heads not found locally are not included in either dict,
188 # since they won't be affected by the push.
188 # since they won't be affected by the push.
189 # unsynced contains all branches with incoming changesets.
189 # unsynced contains all branches with incoming changesets.
190 oldmap = {}
190 oldmap = {}
191 newmap = {}
191 newmap = {}
192 unsynced = set()
192 unsynced = set()
193 for branch in branches:
193 for branch in branches:
194 remotebrheads = remotemap[branch]
194 remotebrheads = remotemap[branch]
195 prunedbrheads = [h for h in remotebrheads if h in cl.nodemap]
195 prunedbrheads = [h for h in remotebrheads if h in cl.nodemap]
196 oldmap[branch] = prunedbrheads
196 oldmap[branch] = prunedbrheads
197 newmap[branch] = list(prunedbrheads)
197 newmap[branch] = list(prunedbrheads)
198 if len(remotebrheads) > len(prunedbrheads):
198 if len(remotebrheads) > len(prunedbrheads):
199 unsynced.add(branch)
199 unsynced.add(branch)
200
200
201 # 4. Update newmap with outgoing changes.
201 # 4. Update newmap with outgoing changes.
202 # This will possibly add new heads and remove existing ones.
202 # This will possibly add new heads and remove existing ones.
203 ctxgen = (repo[n] for n in outgoing.missing)
203 ctxgen = (repo[n] for n in outgoing.missing)
204 repo._updatebranchcache(newmap, ctxgen)
204 repo._updatebranchcache(newmap, ctxgen)
205
205
206 else:
206 else:
207 # 1-4b. old servers: Check for new topological heads.
207 # 1-4b. old servers: Check for new topological heads.
208 # Construct {old,new}map with branch = None (topological branch).
208 # Construct {old,new}map with branch = None (topological branch).
209 # (code based on _updatebranchcache)
209 # (code based on _updatebranchcache)
210 oldheads = set(h for h in remoteheads if h in cl.nodemap)
210 oldheads = set(h for h in remoteheads if h in cl.nodemap)
211 newheads = oldheads.union(outgoing.missing)
211 newheads = oldheads.union(outgoing.missing)
212 if len(newheads) > 1:
212 if len(newheads) > 1:
213 for latest in reversed(outgoing.missing):
213 for latest in reversed(outgoing.missing):
214 if latest not in newheads:
214 if latest not in newheads:
215 continue
215 continue
216 minhrev = min(cl.rev(h) for h in newheads)
216 minhrev = min(cl.rev(h) for h in newheads)
217 reachable = cl.reachable(latest, cl.node(minhrev))
217 reachable = cl.reachable(latest, cl.node(minhrev))
218 reachable.remove(latest)
218 reachable.remove(latest)
219 newheads.difference_update(reachable)
219 newheads.difference_update(reachable)
220 branches = set([None])
220 branches = set([None])
221 newmap = {None: newheads}
221 newmap = {None: newheads}
222 oldmap = {None: oldheads}
222 oldmap = {None: oldheads}
223 unsynced = inc and branches or set()
223 unsynced = inc and branches or set()
224
224
225 # 5. Check for new heads.
225 # 5. Check for new heads.
226 # If there are more heads after the push than before, a suitable
226 # If there are more heads after the push than before, a suitable
227 # error message, depending on unsynced status, is displayed.
227 # error message, depending on unsynced status, is displayed.
228 error = None
228 error = None
229 remotebookmarks = remote.listkeys('bookmarks')
230 localbookmarks = repo._bookmarks
231
229 for branch in branches:
232 for branch in branches:
230 newhs = set(newmap[branch])
233 newhs = set(newmap[branch])
231 oldhs = set(oldmap[branch])
234 oldhs = set(oldmap[branch])
235 dhs = None
232 if len(newhs) > len(oldhs):
236 if len(newhs) > len(oldhs):
233 dhs = list(newhs - oldhs)
237 # strip updates to existing remote heads from the new heads list
238 bookmarkedheads = set([repo[bm].node() for bm in localbookmarks
239 if bm in remotebookmarks and
240 remote[bm] == repo[bm].ancestor(remote[bm])])
241 dhs = list(newhs - bookmarkedheads - oldhs)
242 if dhs:
234 if error is None:
243 if error is None:
235 if branch not in ('default', None):
244 if branch not in ('default', None):
236 error = _("push creates new remote head %s "
245 error = _("push creates new remote head %s "
237 "on branch '%s'!") % (short(dhs[0]), branch)
246 "on branch '%s'!") % (short(dhs[0]), branch)
238 else:
247 else:
239 error = _("push creates new remote head %s!"
248 error = _("push creates new remote head %s!"
240 ) % short(dhs[0])
249 ) % short(dhs[0])
241 if branch in unsynced:
250 if branch in unsynced:
242 hint = _("you should pull and merge or "
251 hint = _("you should pull and merge or "
243 "use push -f to force")
252 "use push -f to force")
244 else:
253 else:
245 hint = _("did you forget to merge? "
254 hint = _("did you forget to merge? "
246 "use push -f to force")
255 "use push -f to force")
247 if branch is not None:
256 if branch is not None:
248 repo.ui.note(_("new remote heads on branch '%s'\n") % branch)
257 repo.ui.note(_("new remote heads on branch '%s'\n") % branch)
249 for h in dhs:
258 for h in dhs:
250 repo.ui.note(_("new remote head %s\n") % short(h))
259 repo.ui.note(_("new remote head %s\n") % short(h))
251 if error:
260 if error:
252 raise util.Abort(error, hint=hint)
261 raise util.Abort(error, hint=hint)
253
262
254 # 6. Check for unsynced changes on involved branches.
263 # 6. Check for unsynced changes on involved branches.
255 if unsynced:
264 if unsynced:
256 repo.ui.warn(_("note: unsynced remote changes!\n"))
265 repo.ui.warn(_("note: unsynced remote changes!\n"))
@@ -1,215 +1,254
1 $ "$TESTDIR/hghave" serve || exit 80
1 $ "$TESTDIR/hghave" serve || exit 80
2
2
3 initialize
3 initialize
4
4
5 $ hg init a
5 $ hg init a
6 $ cd a
6 $ cd a
7 $ echo 'test' > test
7 $ echo 'test' > test
8 $ hg commit -Am'test'
8 $ hg commit -Am'test'
9 adding test
9 adding test
10
10
11 set bookmarks
11 set bookmarks
12
12
13 $ hg bookmark X
13 $ hg bookmark X
14 $ hg bookmark Y
14 $ hg bookmark Y
15 $ hg bookmark Z
15 $ hg bookmark Z
16
16
17 import bookmark by name
17 import bookmark by name
18
18
19 $ hg init ../b
19 $ hg init ../b
20 $ cd ../b
20 $ cd ../b
21 $ hg book Y
21 $ hg book Y
22 $ hg book
22 $ hg book
23 * Y -1:000000000000
23 * Y -1:000000000000
24 $ hg pull ../a
24 $ hg pull ../a
25 pulling from ../a
25 pulling from ../a
26 requesting all changes
26 requesting all changes
27 adding changesets
27 adding changesets
28 adding manifests
28 adding manifests
29 adding file changes
29 adding file changes
30 added 1 changesets with 1 changes to 1 files
30 added 1 changesets with 1 changes to 1 files
31 updating bookmark Y
31 updating bookmark Y
32 adding remote bookmark X
32 adding remote bookmark X
33 adding remote bookmark Z
33 adding remote bookmark Z
34 (run 'hg update' to get a working copy)
34 (run 'hg update' to get a working copy)
35 $ hg bookmarks
35 $ hg bookmarks
36 X 0:4e3505fd9583
36 X 0:4e3505fd9583
37 Y 0:4e3505fd9583
37 Y 0:4e3505fd9583
38 Z 0:4e3505fd9583
38 Z 0:4e3505fd9583
39 $ hg debugpushkey ../a namespaces
39 $ hg debugpushkey ../a namespaces
40 bookmarks
40 bookmarks
41 phases
41 phases
42 namespaces
42 namespaces
43 $ hg debugpushkey ../a bookmarks
43 $ hg debugpushkey ../a bookmarks
44 Y 4e3505fd95835d721066b76e75dbb8cc554d7f77
44 Y 4e3505fd95835d721066b76e75dbb8cc554d7f77
45 X 4e3505fd95835d721066b76e75dbb8cc554d7f77
45 X 4e3505fd95835d721066b76e75dbb8cc554d7f77
46 Z 4e3505fd95835d721066b76e75dbb8cc554d7f77
46 Z 4e3505fd95835d721066b76e75dbb8cc554d7f77
47 $ hg pull -B X ../a
47 $ hg pull -B X ../a
48 pulling from ../a
48 pulling from ../a
49 no changes found
49 no changes found
50 importing bookmark X
50 importing bookmark X
51 $ hg bookmark
51 $ hg bookmark
52 X 0:4e3505fd9583
52 X 0:4e3505fd9583
53 Y 0:4e3505fd9583
53 Y 0:4e3505fd9583
54 Z 0:4e3505fd9583
54 Z 0:4e3505fd9583
55
55
56 export bookmark by name
56 export bookmark by name
57
57
58 $ hg bookmark W
58 $ hg bookmark W
59 $ hg bookmark foo
59 $ hg bookmark foo
60 $ hg bookmark foobar
60 $ hg bookmark foobar
61 $ hg push -B W ../a
61 $ hg push -B W ../a
62 pushing to ../a
62 pushing to ../a
63 searching for changes
63 searching for changes
64 no changes found
64 no changes found
65 exporting bookmark W
65 exporting bookmark W
66 [1]
66 [1]
67 $ hg -R ../a bookmarks
67 $ hg -R ../a bookmarks
68 W -1:000000000000
68 W -1:000000000000
69 X 0:4e3505fd9583
69 X 0:4e3505fd9583
70 Y 0:4e3505fd9583
70 Y 0:4e3505fd9583
71 * Z 0:4e3505fd9583
71 * Z 0:4e3505fd9583
72
72
73 delete a remote bookmark
73 delete a remote bookmark
74
74
75 $ hg book -d W
75 $ hg book -d W
76 $ hg push -B W ../a
76 $ hg push -B W ../a
77 pushing to ../a
77 pushing to ../a
78 searching for changes
78 searching for changes
79 no changes found
79 no changes found
80 deleting remote bookmark W
80 deleting remote bookmark W
81 [1]
81 [1]
82
82
83 push/pull name that doesn't exist
83 push/pull name that doesn't exist
84
84
85 $ hg push -B badname ../a
85 $ hg push -B badname ../a
86 pushing to ../a
86 pushing to ../a
87 searching for changes
87 searching for changes
88 no changes found
88 no changes found
89 bookmark badname does not exist on the local or remote repository!
89 bookmark badname does not exist on the local or remote repository!
90 [2]
90 [2]
91 $ hg pull -B anotherbadname ../a
91 $ hg pull -B anotherbadname ../a
92 pulling from ../a
92 pulling from ../a
93 abort: remote bookmark anotherbadname not found!
93 abort: remote bookmark anotherbadname not found!
94 [255]
94 [255]
95
95
96 divergent bookmarks
96 divergent bookmarks
97
97
98 $ cd ../a
98 $ cd ../a
99 $ echo c1 > f1
99 $ echo c1 > f1
100 $ hg ci -Am1
100 $ hg ci -Am1
101 adding f1
101 adding f1
102 $ hg book -f X
102 $ hg book -f X
103 $ hg book
103 $ hg book
104 * X 1:0d2164f0ce0d
104 * X 1:0d2164f0ce0d
105 Y 0:4e3505fd9583
105 Y 0:4e3505fd9583
106 Z 1:0d2164f0ce0d
106 Z 1:0d2164f0ce0d
107
107
108 $ cd ../b
108 $ cd ../b
109 $ hg up
109 $ hg up
110 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
110 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
111 updating bookmark foobar
111 updating bookmark foobar
112 $ echo c2 > f2
112 $ echo c2 > f2
113 $ hg ci -Am2
113 $ hg ci -Am2
114 adding f2
114 adding f2
115 $ hg book -f X
115 $ hg book -f X
116 $ hg book
116 $ hg book
117 * X 1:9b140be10808
117 * X 1:9b140be10808
118 Y 0:4e3505fd9583
118 Y 0:4e3505fd9583
119 Z 0:4e3505fd9583
119 Z 0:4e3505fd9583
120 foo -1:000000000000
120 foo -1:000000000000
121 foobar 1:9b140be10808
121 foobar 1:9b140be10808
122
122
123 $ hg pull --config paths.foo=../a foo
123 $ hg pull --config paths.foo=../a foo
124 pulling from $TESTTMP/a
124 pulling from $TESTTMP/a
125 searching for changes
125 searching for changes
126 adding changesets
126 adding changesets
127 adding manifests
127 adding manifests
128 adding file changes
128 adding file changes
129 added 1 changesets with 1 changes to 1 files (+1 heads)
129 added 1 changesets with 1 changes to 1 files (+1 heads)
130 divergent bookmark X stored as X@foo
130 divergent bookmark X stored as X@foo
131 updating bookmark Z
131 updating bookmark Z
132 (run 'hg heads' to see heads, 'hg merge' to merge)
132 (run 'hg heads' to see heads, 'hg merge' to merge)
133 $ hg book
133 $ hg book
134 * X 1:9b140be10808
134 * X 1:9b140be10808
135 X@foo 2:0d2164f0ce0d
135 X@foo 2:0d2164f0ce0d
136 Y 0:4e3505fd9583
136 Y 0:4e3505fd9583
137 Z 2:0d2164f0ce0d
137 Z 2:0d2164f0ce0d
138 foo -1:000000000000
138 foo -1:000000000000
139 foobar 1:9b140be10808
139 foobar 1:9b140be10808
140 $ hg push -f ../a
140 $ hg push -f ../a
141 pushing to ../a
141 pushing to ../a
142 searching for changes
142 searching for changes
143 adding changesets
143 adding changesets
144 adding manifests
144 adding manifests
145 adding file changes
145 adding file changes
146 added 1 changesets with 1 changes to 1 files (+1 heads)
146 added 1 changesets with 1 changes to 1 files (+1 heads)
147 $ hg -R ../a book
147 $ hg -R ../a book
148 * X 1:0d2164f0ce0d
148 * X 1:0d2164f0ce0d
149 Y 0:4e3505fd9583
149 Y 0:4e3505fd9583
150 Z 1:0d2164f0ce0d
150 Z 1:0d2164f0ce0d
151
151
152 update a remote bookmark from a non-head to a head
153
154 $ hg up -q Y
155 $ echo c3 > f2
156 $ hg ci -Am3
157 adding f2
158 created new head
159 $ hg push ../a
160 pushing to ../a
161 searching for changes
162 adding changesets
163 adding manifests
164 adding file changes
165 added 1 changesets with 1 changes to 1 files (+1 heads)
166 updating bookmark Y
167 $ hg -R ../a book
168 * X 1:0d2164f0ce0d
169 Y 3:f6fc62dde3c0
170 Z 1:0d2164f0ce0d
171
172 diverging a remote bookmark fails
173
174 $ hg up -q 4e3505fd9583
175 $ echo c4 > f2
176 $ hg ci -Am4
177 adding f2
178 created new head
179 $ hg book -f Y
180 $ hg push ../a
181 pushing to ../a
182 searching for changes
183 abort: push creates new remote head 4efff6d98829!
184 (did you forget to merge? use push -f to force)
185 [255]
186 $ hg -R ../a book
187 * X 1:0d2164f0ce0d
188 Y 3:f6fc62dde3c0
189 Z 1:0d2164f0ce0d
190
152 hgweb
191 hgweb
153
192
154 $ cat <<EOF > .hg/hgrc
193 $ cat <<EOF > .hg/hgrc
155 > [web]
194 > [web]
156 > push_ssl = false
195 > push_ssl = false
157 > allow_push = *
196 > allow_push = *
158 > EOF
197 > EOF
159
198
160 $ hg serve -p $HGPORT -d --pid-file=../hg.pid -E errors.log
199 $ hg serve -p $HGPORT -d --pid-file=../hg.pid -E errors.log
161 $ cat ../hg.pid >> $DAEMON_PIDS
200 $ cat ../hg.pid >> $DAEMON_PIDS
162 $ cd ../a
201 $ cd ../a
163
202
164 $ hg debugpushkey http://localhost:$HGPORT/ namespaces
203 $ hg debugpushkey http://localhost:$HGPORT/ namespaces
165 bookmarks
204 bookmarks
166 phases
205 phases
167 namespaces
206 namespaces
168 $ hg debugpushkey http://localhost:$HGPORT/ bookmarks
207 $ hg debugpushkey http://localhost:$HGPORT/ bookmarks
169 Y 4e3505fd95835d721066b76e75dbb8cc554d7f77
208 Y 4efff6d98829d9c824c621afd6e3f01865f5439f
170 foobar 9b140be1080824d768c5a4691a564088eede71f9
209 foobar 9b140be1080824d768c5a4691a564088eede71f9
171 Z 0d2164f0ce0d8f1d6f94351eba04b794909be66c
210 Z 0d2164f0ce0d8f1d6f94351eba04b794909be66c
172 foo 0000000000000000000000000000000000000000
211 foo 0000000000000000000000000000000000000000
173 X 9b140be1080824d768c5a4691a564088eede71f9
212 X 9b140be1080824d768c5a4691a564088eede71f9
174 $ hg out -B http://localhost:$HGPORT/
213 $ hg out -B http://localhost:$HGPORT/
175 comparing with http://localhost:$HGPORT/
214 comparing with http://localhost:$HGPORT/
176 searching for changed bookmarks
215 searching for changed bookmarks
177 no changed bookmarks found
216 no changed bookmarks found
178 [1]
217 [1]
179 $ hg push -B Z http://localhost:$HGPORT/
218 $ hg push -B Z http://localhost:$HGPORT/
180 pushing to http://localhost:$HGPORT/
219 pushing to http://localhost:$HGPORT/
181 searching for changes
220 searching for changes
182 no changes found
221 no changes found
183 exporting bookmark Z
222 exporting bookmark Z
184 [1]
223 [1]
185 $ hg book -d Z
224 $ hg book -d Z
186 $ hg in -B http://localhost:$HGPORT/
225 $ hg in -B http://localhost:$HGPORT/
187 comparing with http://localhost:$HGPORT/
226 comparing with http://localhost:$HGPORT/
188 searching for changed bookmarks
227 searching for changed bookmarks
189 Z 0d2164f0ce0d
228 Z 0d2164f0ce0d
190 foo 000000000000
229 foo 000000000000
191 foobar 9b140be10808
230 foobar 9b140be10808
192 $ hg pull -B Z http://localhost:$HGPORT/
231 $ hg pull -B Z http://localhost:$HGPORT/
193 pulling from http://localhost:$HGPORT/
232 pulling from http://localhost:$HGPORT/
194 no changes found
233 no changes found
195 adding remote bookmark foobar
234 adding remote bookmark foobar
196 adding remote bookmark Z
235 adding remote bookmark Z
197 adding remote bookmark foo
236 adding remote bookmark foo
198 divergent bookmark X stored as X@1
237 divergent bookmark X stored as X@1
199 importing bookmark Z
238 importing bookmark Z
200 $ hg clone http://localhost:$HGPORT/ cloned-bookmarks
239 $ hg clone http://localhost:$HGPORT/ cloned-bookmarks
201 requesting all changes
240 requesting all changes
202 adding changesets
241 adding changesets
203 adding manifests
242 adding manifests
204 adding file changes
243 adding file changes
205 added 3 changesets with 3 changes to 3 files (+1 heads)
244 added 5 changesets with 5 changes to 3 files (+3 heads)
206 updating to branch default
245 updating to branch default
207 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
246 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
208 $ hg -R cloned-bookmarks bookmarks
247 $ hg -R cloned-bookmarks bookmarks
209 X 1:9b140be10808
248 X 1:9b140be10808
210 Y 0:4e3505fd9583
249 Y 4:4efff6d98829
211 Z 2:0d2164f0ce0d
250 Z 2:0d2164f0ce0d
212 foo -1:000000000000
251 foo -1:000000000000
213 foobar 1:9b140be10808
252 foobar 1:9b140be10808
214
253
215 $ kill `cat ../hg.pid`
254 $ kill `cat ../hg.pid`
General Comments 0
You need to be logged in to leave comments. Login now