##// END OF EJS Templates
phases: do not exchange secret changesets...
Pierre-Yves David -
r15713:cff25e4b default
parent child Browse files
Show More
@@ -1,202 +1,218 b''
1 # discovery.py - protocol changeset discovery functions
1 # discovery.py - protocol changeset discovery functions
2 #
2 #
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid, short
8 from node import nullid, short
9 from i18n import _
9 from i18n import _
10 import util, setdiscovery, treediscovery
10 import util, setdiscovery, treediscovery
11
11
12 def findcommonincoming(repo, remote, heads=None, force=False):
12 def findcommonincoming(repo, remote, heads=None, force=False):
13 """Return a tuple (common, anyincoming, heads) used to identify the common
13 """Return a tuple (common, anyincoming, heads) used to identify the common
14 subset of nodes between repo and remote.
14 subset of nodes between repo and remote.
15
15
16 "common" is a list of (at least) the heads of the common subset.
16 "common" is a list of (at least) the heads of the common subset.
17 "anyincoming" is testable as a boolean indicating if any nodes are missing
17 "anyincoming" is testable as a boolean indicating if any nodes are missing
18 locally. If remote does not support getbundle, this actually is a list of
18 locally. If remote does not support getbundle, this actually is a list of
19 roots of the nodes that would be incoming, to be supplied to
19 roots of the nodes that would be incoming, to be supplied to
20 changegroupsubset. No code except for pull should be relying on this fact
20 changegroupsubset. No code except for pull should be relying on this fact
21 any longer.
21 any longer.
22 "heads" is either the supplied heads, or else the remote's heads.
22 "heads" is either the supplied heads, or else the remote's heads.
23
23
24 If you pass heads and they are all known locally, the reponse lists justs
24 If you pass heads and they are all known locally, the reponse lists justs
25 these heads in "common" and in "heads".
25 these heads in "common" and in "heads".
26
26
27 Please use findcommonoutgoing to compute the set of outgoing nodes to give
27 Please use findcommonoutgoing to compute the set of outgoing nodes to give
28 extensions a good hook into outgoing.
28 extensions a good hook into outgoing.
29 """
29 """
30
30
31 if not remote.capable('getbundle'):
31 if not remote.capable('getbundle'):
32 return treediscovery.findcommonincoming(repo, remote, heads, force)
32 return treediscovery.findcommonincoming(repo, remote, heads, force)
33
33
34 if heads:
34 if heads:
35 allknown = True
35 allknown = True
36 nm = repo.changelog.nodemap
36 nm = repo.changelog.nodemap
37 for h in heads:
37 for h in heads:
38 if nm.get(h) is None:
38 if nm.get(h) is None:
39 allknown = False
39 allknown = False
40 break
40 break
41 if allknown:
41 if allknown:
42 return (heads, False, heads)
42 return (heads, False, heads)
43
43
44 res = setdiscovery.findcommonheads(repo.ui, repo, remote,
44 res = setdiscovery.findcommonheads(repo.ui, repo, remote,
45 abortwhenunrelated=not force)
45 abortwhenunrelated=not force)
46 common, anyinc, srvheads = res
46 common, anyinc, srvheads = res
47 return (list(common), anyinc, heads or list(srvheads))
47 return (list(common), anyinc, heads or list(srvheads))
48
48
49 def findcommonoutgoing(repo, other, onlyheads=None, force=False, commoninc=None):
49 def findcommonoutgoing(repo, other, onlyheads=None, force=False, commoninc=None):
50 '''Return a tuple (common, anyoutgoing, heads) used to identify the set
50 '''Return a tuple (common, anyoutgoing, heads) used to identify the set
51 of nodes present in repo but not in other.
51 of nodes present in repo but not in other.
52
52
53 If onlyheads is given, only nodes ancestral to nodes in onlyheads (inclusive)
53 If onlyheads is given, only nodes ancestral to nodes in onlyheads (inclusive)
54 are included. If you already know the local repo's heads, passing them in
54 are included. If you already know the local repo's heads, passing them in
55 onlyheads is faster than letting them be recomputed here.
55 onlyheads is faster than letting them be recomputed here.
56
56
57 If commoninc is given, it must the the result of a prior call to
57 If commoninc is given, it must the the result of a prior call to
58 findcommonincoming(repo, other, force) to avoid recomputing it here.
58 findcommonincoming(repo, other, force) to avoid recomputing it here.
59
59
60 The returned tuple is meant to be passed to changelog.findmissing.'''
60 The returned tuple is meant to be passed to changelog.findmissing.'''
61 common, _any, _hds = commoninc or findcommonincoming(repo, other, force=force)
61 common, _any, _hds = commoninc or findcommonincoming(repo, other, force=force)
62 return (common, onlyheads or repo.heads())
62 return (common, onlyheads or repo.heads())
63
63
64 def prepush(repo, remote, force, revs, newbranch):
64 def prepush(repo, remote, force, revs, newbranch):
65 '''Analyze the local and remote repositories and determine which
65 '''Analyze the local and remote repositories and determine which
66 changesets need to be pushed to the remote. Return value depends
66 changesets need to be pushed to the remote. Return value depends
67 on circumstances:
67 on circumstances:
68
68
69 If we are not going to push anything, return a tuple (None,
69 If we are not going to push anything, return a tuple (None,
70 outgoing, common) where outgoing is 0 if there are no outgoing
70 outgoing, common) where outgoing is 0 if there are no outgoing
71 changesets and 1 if there are, but we refuse to push them
71 changesets and 1 if there are, but we refuse to push them
72 (e.g. would create new remote heads). The third element "common"
72 (e.g. would create new remote heads). The third element "common"
73 is the list of heads of the common set between local and remote.
73 is the list of heads of the common set between local and remote.
74
74
75 Otherwise, return a tuple (changegroup, remoteheads, futureheads),
75 Otherwise, return a tuple (changegroup, remoteheads, futureheads),
76 where changegroup is a readable file-like object whose read()
76 where changegroup is a readable file-like object whose read()
77 returns successive changegroup chunks ready to be sent over the
77 returns successive changegroup chunks ready to be sent over the
78 wire, remoteheads is the list of remote heads and futureheads is
78 wire, remoteheads is the list of remote heads and futureheads is
79 the list of heads of the common set between local and remote to
79 the list of heads of the common set between local and remote to
80 be after push completion.
80 be after push completion.
81 '''
81 '''
82 commoninc = findcommonincoming(repo, remote, force=force)
82 commoninc = findcommonincoming(repo, remote, force=force)
83 common, revs = findcommonoutgoing(repo, remote, onlyheads=revs,
83 common, revs = findcommonoutgoing(repo, remote, onlyheads=revs,
84 commoninc=commoninc, force=force)
84 commoninc=commoninc, force=force)
85 _common, inc, remoteheads = commoninc
85 _common, inc, remoteheads = commoninc
86
86
87 cl = repo.changelog
87 cl = repo.changelog
88 outg = cl.findmissing(common, revs)
88 alloutg = cl.findmissing(common, revs)
89 outg = []
90 secret = []
91 for o in alloutg:
92 if repo[o].phase() >= 2:
93 secret.append(o)
94 else:
95 outg.append(o)
89
96
90 if not outg:
97 if not outg:
91 repo.ui.status(_("no changes found\n"))
98 if secret:
99 repo.ui.status(_("no changes to push but %i secret changesets\n")
100 % len(secret))
101 else:
102 repo.ui.status(_("no changes found\n"))
92 return None, 1, common
103 return None, 1, common
93
104
105 if secret:
106 # recompute target revs
107 revs = [ctx.node() for ctx in repo.set('heads(::(%ld))',
108 map(repo.changelog.rev, outg))]
109
94 if not force and remoteheads != [nullid]:
110 if not force and remoteheads != [nullid]:
95 if remote.capable('branchmap'):
111 if remote.capable('branchmap'):
96 # Check for each named branch if we're creating new remote heads.
112 # Check for each named branch if we're creating new remote heads.
97 # To be a remote head after push, node must be either:
113 # To be a remote head after push, node must be either:
98 # - unknown locally
114 # - unknown locally
99 # - a local outgoing head descended from update
115 # - a local outgoing head descended from update
100 # - a remote head that's known locally and not
116 # - a remote head that's known locally and not
101 # ancestral to an outgoing head
117 # ancestral to an outgoing head
102
118
103 # 1. Create set of branches involved in the push.
119 # 1. Create set of branches involved in the push.
104 branches = set(repo[n].branch() for n in outg)
120 branches = set(repo[n].branch() for n in outg)
105
121
106 # 2. Check for new branches on the remote.
122 # 2. Check for new branches on the remote.
107 remotemap = remote.branchmap()
123 remotemap = remote.branchmap()
108 newbranches = branches - set(remotemap)
124 newbranches = branches - set(remotemap)
109 if newbranches and not newbranch: # new branch requires --new-branch
125 if newbranches and not newbranch: # new branch requires --new-branch
110 branchnames = ', '.join(sorted(newbranches))
126 branchnames = ', '.join(sorted(newbranches))
111 raise util.Abort(_("push creates new remote branches: %s!")
127 raise util.Abort(_("push creates new remote branches: %s!")
112 % branchnames,
128 % branchnames,
113 hint=_("use 'hg push --new-branch' to create"
129 hint=_("use 'hg push --new-branch' to create"
114 " new remote branches"))
130 " new remote branches"))
115 branches.difference_update(newbranches)
131 branches.difference_update(newbranches)
116
132
117 # 3. Construct the initial oldmap and newmap dicts.
133 # 3. Construct the initial oldmap and newmap dicts.
118 # They contain information about the remote heads before and
134 # They contain information about the remote heads before and
119 # after the push, respectively.
135 # after the push, respectively.
120 # Heads not found locally are not included in either dict,
136 # Heads not found locally are not included in either dict,
121 # since they won't be affected by the push.
137 # since they won't be affected by the push.
122 # unsynced contains all branches with incoming changesets.
138 # unsynced contains all branches with incoming changesets.
123 oldmap = {}
139 oldmap = {}
124 newmap = {}
140 newmap = {}
125 unsynced = set()
141 unsynced = set()
126 for branch in branches:
142 for branch in branches:
127 remotebrheads = remotemap[branch]
143 remotebrheads = remotemap[branch]
128 prunedbrheads = [h for h in remotebrheads if h in cl.nodemap]
144 prunedbrheads = [h for h in remotebrheads if h in cl.nodemap]
129 oldmap[branch] = prunedbrheads
145 oldmap[branch] = prunedbrheads
130 newmap[branch] = list(prunedbrheads)
146 newmap[branch] = list(prunedbrheads)
131 if len(remotebrheads) > len(prunedbrheads):
147 if len(remotebrheads) > len(prunedbrheads):
132 unsynced.add(branch)
148 unsynced.add(branch)
133
149
134 # 4. Update newmap with outgoing changes.
150 # 4. Update newmap with outgoing changes.
135 # This will possibly add new heads and remove existing ones.
151 # This will possibly add new heads and remove existing ones.
136 ctxgen = (repo[n] for n in outg)
152 ctxgen = (repo[n] for n in outg)
137 repo._updatebranchcache(newmap, ctxgen)
153 repo._updatebranchcache(newmap, ctxgen)
138
154
139 else:
155 else:
140 # 1-4b. old servers: Check for new topological heads.
156 # 1-4b. old servers: Check for new topological heads.
141 # Construct {old,new}map with branch = None (topological branch).
157 # Construct {old,new}map with branch = None (topological branch).
142 # (code based on _updatebranchcache)
158 # (code based on _updatebranchcache)
143 oldheads = set(h for h in remoteheads if h in cl.nodemap)
159 oldheads = set(h for h in remoteheads if h in cl.nodemap)
144 newheads = oldheads.union(outg)
160 newheads = oldheads.union(outg)
145 if len(newheads) > 1:
161 if len(newheads) > 1:
146 for latest in reversed(outg):
162 for latest in reversed(outg):
147 if latest not in newheads:
163 if latest not in newheads:
148 continue
164 continue
149 minhrev = min(cl.rev(h) for h in newheads)
165 minhrev = min(cl.rev(h) for h in newheads)
150 reachable = cl.reachable(latest, cl.node(minhrev))
166 reachable = cl.reachable(latest, cl.node(minhrev))
151 reachable.remove(latest)
167 reachable.remove(latest)
152 newheads.difference_update(reachable)
168 newheads.difference_update(reachable)
153 branches = set([None])
169 branches = set([None])
154 newmap = {None: newheads}
170 newmap = {None: newheads}
155 oldmap = {None: oldheads}
171 oldmap = {None: oldheads}
156 unsynced = inc and branches or set()
172 unsynced = inc and branches or set()
157
173
158 # 5. Check for new heads.
174 # 5. Check for new heads.
159 # If there are more heads after the push than before, a suitable
175 # If there are more heads after the push than before, a suitable
160 # error message, depending on unsynced status, is displayed.
176 # error message, depending on unsynced status, is displayed.
161 error = None
177 error = None
162 for branch in branches:
178 for branch in branches:
163 newhs = set(newmap[branch])
179 newhs = set(newmap[branch])
164 oldhs = set(oldmap[branch])
180 oldhs = set(oldmap[branch])
165 if len(newhs) > len(oldhs):
181 if len(newhs) > len(oldhs):
166 dhs = list(newhs - oldhs)
182 dhs = list(newhs - oldhs)
167 if error is None:
183 if error is None:
168 if branch not in ('default', None):
184 if branch not in ('default', None):
169 error = _("push creates new remote head %s "
185 error = _("push creates new remote head %s "
170 "on branch '%s'!") % (short(dhs[0]), branch)
186 "on branch '%s'!") % (short(dhs[0]), branch)
171 else:
187 else:
172 error = _("push creates new remote head %s!"
188 error = _("push creates new remote head %s!"
173 ) % short(dhs[0])
189 ) % short(dhs[0])
174 if branch in unsynced:
190 if branch in unsynced:
175 hint = _("you should pull and merge or "
191 hint = _("you should pull and merge or "
176 "use push -f to force")
192 "use push -f to force")
177 else:
193 else:
178 hint = _("did you forget to merge? "
194 hint = _("did you forget to merge? "
179 "use push -f to force")
195 "use push -f to force")
180 if branch is not None:
196 if branch is not None:
181 repo.ui.note(_("new remote heads on branch '%s'\n") % branch)
197 repo.ui.note(_("new remote heads on branch '%s'\n") % branch)
182 for h in dhs:
198 for h in dhs:
183 repo.ui.note(_("new remote head %s\n") % short(h))
199 repo.ui.note(_("new remote head %s\n") % short(h))
184 if error:
200 if error:
185 raise util.Abort(error, hint=hint)
201 raise util.Abort(error, hint=hint)
186
202
187 # 6. Check for unsynced changes on involved branches.
203 # 6. Check for unsynced changes on involved branches.
188 if unsynced:
204 if unsynced:
189 repo.ui.warn(_("note: unsynced remote changes!\n"))
205 repo.ui.warn(_("note: unsynced remote changes!\n"))
190
206
191 if revs is None:
207 if revs is None:
192 # use the fast path, no race possible on push
208 # use the fast path, no race possible on push
193 cg = repo._changegroup(outg, 'push')
209 cg = repo._changegroup(outg, 'push')
194 else:
210 else:
195 cg = repo.getbundle('push', heads=revs, common=common)
211 cg = repo.getbundle('push', heads=revs, common=common)
196 # no need to compute outg ancestor. All node in outg have either:
212 # no need to compute outg ancestor. All node in outg have either:
197 # - parents in outg
213 # - parents in outg
198 # - parents in common
214 # - parents in common
199 # - nullid parent
215 # - nullid parent
200 rset = repo.set('heads(%ln + %ln)', common, outg)
216 rset = repo.set('heads(%ln + %ln)', common, outg)
201 futureheads = [ctx.node() for ctx in rset]
217 futureheads = [ctx.node() for ctx in rset]
202 return cg, remoteheads, futureheads
218 return cg, remoteheads, futureheads
@@ -1,194 +1,195 b''
1 # setdiscovery.py - improved discovery of common nodeset for mercurial
1 # setdiscovery.py - improved discovery of common nodeset for mercurial
2 #
2 #
3 # Copyright 2010 Benoit Boissinot <bboissin@gmail.com>
3 # Copyright 2010 Benoit Boissinot <bboissin@gmail.com>
4 # and Peter Arrenbrecht <peter@arrenbrecht.ch>
4 # and Peter Arrenbrecht <peter@arrenbrecht.ch>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from node import nullid
9 from node import nullid
10 from i18n import _
10 from i18n import _
11 import random, collections, util, dagutil
11 import random, collections, util, dagutil
12 import phases
12
13
13 def _updatesample(dag, nodes, sample, always, quicksamplesize=0):
14 def _updatesample(dag, nodes, sample, always, quicksamplesize=0):
14 # if nodes is empty we scan the entire graph
15 # if nodes is empty we scan the entire graph
15 if nodes:
16 if nodes:
16 heads = dag.headsetofconnecteds(nodes)
17 heads = dag.headsetofconnecteds(nodes)
17 else:
18 else:
18 heads = dag.heads()
19 heads = dag.heads()
19 dist = {}
20 dist = {}
20 visit = collections.deque(heads)
21 visit = collections.deque(heads)
21 seen = set()
22 seen = set()
22 factor = 1
23 factor = 1
23 while visit:
24 while visit:
24 curr = visit.popleft()
25 curr = visit.popleft()
25 if curr in seen:
26 if curr in seen:
26 continue
27 continue
27 d = dist.setdefault(curr, 1)
28 d = dist.setdefault(curr, 1)
28 if d > factor:
29 if d > factor:
29 factor *= 2
30 factor *= 2
30 if d == factor:
31 if d == factor:
31 if curr not in always: # need this check for the early exit below
32 if curr not in always: # need this check for the early exit below
32 sample.add(curr)
33 sample.add(curr)
33 if quicksamplesize and (len(sample) >= quicksamplesize):
34 if quicksamplesize and (len(sample) >= quicksamplesize):
34 return
35 return
35 seen.add(curr)
36 seen.add(curr)
36 for p in dag.parents(curr):
37 for p in dag.parents(curr):
37 if not nodes or p in nodes:
38 if not nodes or p in nodes:
38 dist.setdefault(p, d + 1)
39 dist.setdefault(p, d + 1)
39 visit.append(p)
40 visit.append(p)
40
41
41 def _setupsample(dag, nodes, size):
42 def _setupsample(dag, nodes, size):
42 if len(nodes) <= size:
43 if len(nodes) <= size:
43 return set(nodes), None, 0
44 return set(nodes), None, 0
44 always = dag.headsetofconnecteds(nodes)
45 always = dag.headsetofconnecteds(nodes)
45 desiredlen = size - len(always)
46 desiredlen = size - len(always)
46 if desiredlen <= 0:
47 if desiredlen <= 0:
47 # This could be bad if there are very many heads, all unknown to the
48 # This could be bad if there are very many heads, all unknown to the
48 # server. We're counting on long request support here.
49 # server. We're counting on long request support here.
49 return always, None, desiredlen
50 return always, None, desiredlen
50 return always, set(), desiredlen
51 return always, set(), desiredlen
51
52
52 def _takequicksample(dag, nodes, size, initial):
53 def _takequicksample(dag, nodes, size, initial):
53 always, sample, desiredlen = _setupsample(dag, nodes, size)
54 always, sample, desiredlen = _setupsample(dag, nodes, size)
54 if sample is None:
55 if sample is None:
55 return always
56 return always
56 if initial:
57 if initial:
57 fromset = None
58 fromset = None
58 else:
59 else:
59 fromset = nodes
60 fromset = nodes
60 _updatesample(dag, fromset, sample, always, quicksamplesize=desiredlen)
61 _updatesample(dag, fromset, sample, always, quicksamplesize=desiredlen)
61 sample.update(always)
62 sample.update(always)
62 return sample
63 return sample
63
64
64 def _takefullsample(dag, nodes, size):
65 def _takefullsample(dag, nodes, size):
65 always, sample, desiredlen = _setupsample(dag, nodes, size)
66 always, sample, desiredlen = _setupsample(dag, nodes, size)
66 if sample is None:
67 if sample is None:
67 return always
68 return always
68 # update from heads
69 # update from heads
69 _updatesample(dag, nodes, sample, always)
70 _updatesample(dag, nodes, sample, always)
70 # update from roots
71 # update from roots
71 _updatesample(dag.inverse(), nodes, sample, always)
72 _updatesample(dag.inverse(), nodes, sample, always)
72 assert sample
73 assert sample
73 if len(sample) > desiredlen:
74 if len(sample) > desiredlen:
74 sample = set(random.sample(sample, desiredlen))
75 sample = set(random.sample(sample, desiredlen))
75 elif len(sample) < desiredlen:
76 elif len(sample) < desiredlen:
76 more = desiredlen - len(sample)
77 more = desiredlen - len(sample)
77 sample.update(random.sample(list(nodes - sample - always), more))
78 sample.update(random.sample(list(nodes - sample - always), more))
78 sample.update(always)
79 sample.update(always)
79 return sample
80 return sample
80
81
81 def findcommonheads(ui, local, remote,
82 def findcommonheads(ui, local, remote,
82 initialsamplesize=100,
83 initialsamplesize=100,
83 fullsamplesize=200,
84 fullsamplesize=200,
84 abortwhenunrelated=True):
85 abortwhenunrelated=True):
85 '''Return a tuple (common, anyincoming, remoteheads) used to identify
86 '''Return a tuple (common, anyincoming, remoteheads) used to identify
86 missing nodes from or in remote.
87 missing nodes from or in remote.
87
88
88 shortcutlocal determines whether we try use direct access to localrepo if
89 shortcutlocal determines whether we try use direct access to localrepo if
89 remote is actually local.
90 remote is actually local.
90 '''
91 '''
91 roundtrips = 0
92 roundtrips = 0
92 cl = local.changelog
93 cl = local.changelog
93 dag = dagutil.revlogdag(cl)
94 dag = dagutil.revlogdag(cl)
94
95
95 # early exit if we know all the specified remote heads already
96 # early exit if we know all the specified remote heads already
96 ui.debug("query 1; heads\n")
97 ui.debug("query 1; heads\n")
97 roundtrips += 1
98 roundtrips += 1
98 ownheads = dag.heads()
99 ownheads = dag.heads()
99 sample = ownheads
100 sample = ownheads
100 if remote.local():
101 if remote.local():
101 # stopgap until we have a proper localpeer that supports batch()
102 # stopgap until we have a proper localpeer that supports batch()
102 srvheadhashes = remote.heads()
103 srvheadhashes = phases.visibleheads(remote)
103 yesno = remote.known(dag.externalizeall(sample))
104 yesno = remote.known(dag.externalizeall(sample))
104 elif remote.capable('batch'):
105 elif remote.capable('batch'):
105 batch = remote.batch()
106 batch = remote.batch()
106 srvheadhashesref = batch.heads()
107 srvheadhashesref = batch.heads()
107 yesnoref = batch.known(dag.externalizeall(sample))
108 yesnoref = batch.known(dag.externalizeall(sample))
108 batch.submit()
109 batch.submit()
109 srvheadhashes = srvheadhashesref.value
110 srvheadhashes = srvheadhashesref.value
110 yesno = yesnoref.value
111 yesno = yesnoref.value
111 else:
112 else:
112 # compatibitity with pre-batch, but post-known remotes during 1.9 devel
113 # compatibitity with pre-batch, but post-known remotes during 1.9 devel
113 srvheadhashes = remote.heads()
114 srvheadhashes = remote.heads()
114 sample = []
115 sample = []
115
116
116 if cl.tip() == nullid:
117 if cl.tip() == nullid:
117 if srvheadhashes != [nullid]:
118 if srvheadhashes != [nullid]:
118 return [nullid], True, srvheadhashes
119 return [nullid], True, srvheadhashes
119 return [nullid], False, []
120 return [nullid], False, []
120
121
121 # start actual discovery (we note this before the next "if" for
122 # start actual discovery (we note this before the next "if" for
122 # compatibility reasons)
123 # compatibility reasons)
123 ui.status(_("searching for changes\n"))
124 ui.status(_("searching for changes\n"))
124
125
125 srvheads = dag.internalizeall(srvheadhashes, filterunknown=True)
126 srvheads = dag.internalizeall(srvheadhashes, filterunknown=True)
126 if len(srvheads) == len(srvheadhashes):
127 if len(srvheads) == len(srvheadhashes):
127 ui.debug("all remote heads known locally\n")
128 ui.debug("all remote heads known locally\n")
128 return (srvheadhashes, False, srvheadhashes,)
129 return (srvheadhashes, False, srvheadhashes,)
129
130
130 if sample and util.all(yesno):
131 if sample and util.all(yesno):
131 ui.note(_("all local heads known remotely\n"))
132 ui.note(_("all local heads known remotely\n"))
132 ownheadhashes = dag.externalizeall(ownheads)
133 ownheadhashes = dag.externalizeall(ownheads)
133 return (ownheadhashes, True, srvheadhashes,)
134 return (ownheadhashes, True, srvheadhashes,)
134
135
135 # full blown discovery
136 # full blown discovery
136 undecided = dag.nodeset() # own nodes where I don't know if remote knows them
137 undecided = dag.nodeset() # own nodes where I don't know if remote knows them
137 common = set() # own nodes I know we both know
138 common = set() # own nodes I know we both know
138 missing = set() # own nodes I know remote lacks
139 missing = set() # own nodes I know remote lacks
139
140
140 # treat remote heads (and maybe own heads) as a first implicit sample response
141 # treat remote heads (and maybe own heads) as a first implicit sample response
141 common.update(dag.ancestorset(srvheads))
142 common.update(dag.ancestorset(srvheads))
142 undecided.difference_update(common)
143 undecided.difference_update(common)
143
144
144 full = False
145 full = False
145 while undecided:
146 while undecided:
146
147
147 if sample:
148 if sample:
148 commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
149 commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
149 common.update(dag.ancestorset(commoninsample, common))
150 common.update(dag.ancestorset(commoninsample, common))
150
151
151 missinginsample = [n for i, n in enumerate(sample) if not yesno[i]]
152 missinginsample = [n for i, n in enumerate(sample) if not yesno[i]]
152 missing.update(dag.descendantset(missinginsample, missing))
153 missing.update(dag.descendantset(missinginsample, missing))
153
154
154 undecided.difference_update(missing)
155 undecided.difference_update(missing)
155 undecided.difference_update(common)
156 undecided.difference_update(common)
156
157
157 if not undecided:
158 if not undecided:
158 break
159 break
159
160
160 if full:
161 if full:
161 ui.note(_("sampling from both directions\n"))
162 ui.note(_("sampling from both directions\n"))
162 sample = _takefullsample(dag, undecided, size=fullsamplesize)
163 sample = _takefullsample(dag, undecided, size=fullsamplesize)
163 elif common:
164 elif common:
164 # use cheapish initial sample
165 # use cheapish initial sample
165 ui.debug("taking initial sample\n")
166 ui.debug("taking initial sample\n")
166 sample = _takefullsample(dag, undecided, size=fullsamplesize)
167 sample = _takefullsample(dag, undecided, size=fullsamplesize)
167 else:
168 else:
168 # use even cheaper initial sample
169 # use even cheaper initial sample
169 ui.debug("taking quick initial sample\n")
170 ui.debug("taking quick initial sample\n")
170 sample = _takequicksample(dag, undecided, size=initialsamplesize,
171 sample = _takequicksample(dag, undecided, size=initialsamplesize,
171 initial=True)
172 initial=True)
172
173
173 roundtrips += 1
174 roundtrips += 1
174 ui.progress(_('searching'), roundtrips, unit=_('queries'))
175 ui.progress(_('searching'), roundtrips, unit=_('queries'))
175 ui.debug("query %i; still undecided: %i, sample size is: %i\n"
176 ui.debug("query %i; still undecided: %i, sample size is: %i\n"
176 % (roundtrips, len(undecided), len(sample)))
177 % (roundtrips, len(undecided), len(sample)))
177 # indices between sample and externalized version must match
178 # indices between sample and externalized version must match
178 sample = list(sample)
179 sample = list(sample)
179 yesno = remote.known(dag.externalizeall(sample))
180 yesno = remote.known(dag.externalizeall(sample))
180 full = True
181 full = True
181
182
182 result = dag.headsetofconnecteds(common)
183 result = dag.headsetofconnecteds(common)
183 ui.progress(_('searching'), None)
184 ui.progress(_('searching'), None)
184 ui.debug("%d total queries\n" % roundtrips)
185 ui.debug("%d total queries\n" % roundtrips)
185
186
186 if not result and srvheadhashes != [nullid]:
187 if not result and srvheadhashes != [nullid]:
187 if abortwhenunrelated:
188 if abortwhenunrelated:
188 raise util.Abort(_("repository is unrelated"))
189 raise util.Abort(_("repository is unrelated"))
189 else:
190 else:
190 ui.warn(_("warning: repository is unrelated\n"))
191 ui.warn(_("warning: repository is unrelated\n"))
191 return (set([nullid]), True, srvheadhashes,)
192 return (set([nullid]), True, srvheadhashes,)
192
193
193 anyincoming = (srvheadhashes != [nullid])
194 anyincoming = (srvheadhashes != [nullid])
194 return dag.externalizeall(result), anyincoming, srvheadhashes
195 return dag.externalizeall(result), anyincoming, srvheadhashes
@@ -1,609 +1,610 b''
1 # wireproto.py - generic wire protocol support functions
1 # wireproto.py - generic wire protocol support functions
2 #
2 #
3 # Copyright 2005-2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import urllib, tempfile, os, sys
8 import urllib, tempfile, os, sys
9 from i18n import _
9 from i18n import _
10 from node import bin, hex
10 from node import bin, hex
11 import changegroup as changegroupmod
11 import changegroup as changegroupmod
12 import repo, error, encoding, util, store
12 import repo, error, encoding, util, store
13 import phases
13
14
14 # abstract batching support
15 # abstract batching support
15
16
16 class future(object):
17 class future(object):
17 '''placeholder for a value to be set later'''
18 '''placeholder for a value to be set later'''
18 def set(self, value):
19 def set(self, value):
19 if util.safehasattr(self, 'value'):
20 if util.safehasattr(self, 'value'):
20 raise error.RepoError("future is already set")
21 raise error.RepoError("future is already set")
21 self.value = value
22 self.value = value
22
23
23 class batcher(object):
24 class batcher(object):
24 '''base class for batches of commands submittable in a single request
25 '''base class for batches of commands submittable in a single request
25
26
26 All methods invoked on instances of this class are simply queued and return a
27 All methods invoked on instances of this class are simply queued and return a
27 a future for the result. Once you call submit(), all the queued calls are
28 a future for the result. Once you call submit(), all the queued calls are
28 performed and the results set in their respective futures.
29 performed and the results set in their respective futures.
29 '''
30 '''
30 def __init__(self):
31 def __init__(self):
31 self.calls = []
32 self.calls = []
32 def __getattr__(self, name):
33 def __getattr__(self, name):
33 def call(*args, **opts):
34 def call(*args, **opts):
34 resref = future()
35 resref = future()
35 self.calls.append((name, args, opts, resref,))
36 self.calls.append((name, args, opts, resref,))
36 return resref
37 return resref
37 return call
38 return call
38 def submit(self):
39 def submit(self):
39 pass
40 pass
40
41
41 class localbatch(batcher):
42 class localbatch(batcher):
42 '''performs the queued calls directly'''
43 '''performs the queued calls directly'''
43 def __init__(self, local):
44 def __init__(self, local):
44 batcher.__init__(self)
45 batcher.__init__(self)
45 self.local = local
46 self.local = local
46 def submit(self):
47 def submit(self):
47 for name, args, opts, resref in self.calls:
48 for name, args, opts, resref in self.calls:
48 resref.set(getattr(self.local, name)(*args, **opts))
49 resref.set(getattr(self.local, name)(*args, **opts))
49
50
50 class remotebatch(batcher):
51 class remotebatch(batcher):
51 '''batches the queued calls; uses as few roundtrips as possible'''
52 '''batches the queued calls; uses as few roundtrips as possible'''
52 def __init__(self, remote):
53 def __init__(self, remote):
53 '''remote must support _submitbatch(encbatch) and _submitone(op, encargs)'''
54 '''remote must support _submitbatch(encbatch) and _submitone(op, encargs)'''
54 batcher.__init__(self)
55 batcher.__init__(self)
55 self.remote = remote
56 self.remote = remote
56 def submit(self):
57 def submit(self):
57 req, rsp = [], []
58 req, rsp = [], []
58 for name, args, opts, resref in self.calls:
59 for name, args, opts, resref in self.calls:
59 mtd = getattr(self.remote, name)
60 mtd = getattr(self.remote, name)
60 batchablefn = getattr(mtd, 'batchable', None)
61 batchablefn = getattr(mtd, 'batchable', None)
61 if batchablefn is not None:
62 if batchablefn is not None:
62 batchable = batchablefn(mtd.im_self, *args, **opts)
63 batchable = batchablefn(mtd.im_self, *args, **opts)
63 encargsorres, encresref = batchable.next()
64 encargsorres, encresref = batchable.next()
64 if encresref:
65 if encresref:
65 req.append((name, encargsorres,))
66 req.append((name, encargsorres,))
66 rsp.append((batchable, encresref, resref,))
67 rsp.append((batchable, encresref, resref,))
67 else:
68 else:
68 resref.set(encargsorres)
69 resref.set(encargsorres)
69 else:
70 else:
70 if req:
71 if req:
71 self._submitreq(req, rsp)
72 self._submitreq(req, rsp)
72 req, rsp = [], []
73 req, rsp = [], []
73 resref.set(mtd(*args, **opts))
74 resref.set(mtd(*args, **opts))
74 if req:
75 if req:
75 self._submitreq(req, rsp)
76 self._submitreq(req, rsp)
76 def _submitreq(self, req, rsp):
77 def _submitreq(self, req, rsp):
77 encresults = self.remote._submitbatch(req)
78 encresults = self.remote._submitbatch(req)
78 for encres, r in zip(encresults, rsp):
79 for encres, r in zip(encresults, rsp):
79 batchable, encresref, resref = r
80 batchable, encresref, resref = r
80 encresref.set(encres)
81 encresref.set(encres)
81 resref.set(batchable.next())
82 resref.set(batchable.next())
82
83
83 def batchable(f):
84 def batchable(f):
84 '''annotation for batchable methods
85 '''annotation for batchable methods
85
86
86 Such methods must implement a coroutine as follows:
87 Such methods must implement a coroutine as follows:
87
88
88 @batchable
89 @batchable
89 def sample(self, one, two=None):
90 def sample(self, one, two=None):
90 # Handle locally computable results first:
91 # Handle locally computable results first:
91 if not one:
92 if not one:
92 yield "a local result", None
93 yield "a local result", None
93 # Build list of encoded arguments suitable for your wire protocol:
94 # Build list of encoded arguments suitable for your wire protocol:
94 encargs = [('one', encode(one),), ('two', encode(two),)]
95 encargs = [('one', encode(one),), ('two', encode(two),)]
95 # Create future for injection of encoded result:
96 # Create future for injection of encoded result:
96 encresref = future()
97 encresref = future()
97 # Return encoded arguments and future:
98 # Return encoded arguments and future:
98 yield encargs, encresref
99 yield encargs, encresref
99 # Assuming the future to be filled with the result from the batched request
100 # Assuming the future to be filled with the result from the batched request
100 # now. Decode it:
101 # now. Decode it:
101 yield decode(encresref.value)
102 yield decode(encresref.value)
102
103
103 The decorator returns a function which wraps this coroutine as a plain method,
104 The decorator returns a function which wraps this coroutine as a plain method,
104 but adds the original method as an attribute called "batchable", which is
105 but adds the original method as an attribute called "batchable", which is
105 used by remotebatch to split the call into separate encoding and decoding
106 used by remotebatch to split the call into separate encoding and decoding
106 phases.
107 phases.
107 '''
108 '''
108 def plain(*args, **opts):
109 def plain(*args, **opts):
109 batchable = f(*args, **opts)
110 batchable = f(*args, **opts)
110 encargsorres, encresref = batchable.next()
111 encargsorres, encresref = batchable.next()
111 if not encresref:
112 if not encresref:
112 return encargsorres # a local result in this case
113 return encargsorres # a local result in this case
113 self = args[0]
114 self = args[0]
114 encresref.set(self._submitone(f.func_name, encargsorres))
115 encresref.set(self._submitone(f.func_name, encargsorres))
115 return batchable.next()
116 return batchable.next()
116 setattr(plain, 'batchable', f)
117 setattr(plain, 'batchable', f)
117 return plain
118 return plain
118
119
119 # list of nodes encoding / decoding
120 # list of nodes encoding / decoding
120
121
121 def decodelist(l, sep=' '):
122 def decodelist(l, sep=' '):
122 if l:
123 if l:
123 return map(bin, l.split(sep))
124 return map(bin, l.split(sep))
124 return []
125 return []
125
126
126 def encodelist(l, sep=' '):
127 def encodelist(l, sep=' '):
127 return sep.join(map(hex, l))
128 return sep.join(map(hex, l))
128
129
129 # batched call argument encoding
130 # batched call argument encoding
130
131
131 def escapearg(plain):
132 def escapearg(plain):
132 return (plain
133 return (plain
133 .replace(':', '::')
134 .replace(':', '::')
134 .replace(',', ':,')
135 .replace(',', ':,')
135 .replace(';', ':;')
136 .replace(';', ':;')
136 .replace('=', ':='))
137 .replace('=', ':='))
137
138
138 def unescapearg(escaped):
139 def unescapearg(escaped):
139 return (escaped
140 return (escaped
140 .replace(':=', '=')
141 .replace(':=', '=')
141 .replace(':;', ';')
142 .replace(':;', ';')
142 .replace(':,', ',')
143 .replace(':,', ',')
143 .replace('::', ':'))
144 .replace('::', ':'))
144
145
145 # client side
146 # client side
146
147
147 def todict(**args):
148 def todict(**args):
148 return args
149 return args
149
150
150 class wirerepository(repo.repository):
151 class wirerepository(repo.repository):
151
152
152 def batch(self):
153 def batch(self):
153 return remotebatch(self)
154 return remotebatch(self)
154 def _submitbatch(self, req):
155 def _submitbatch(self, req):
155 cmds = []
156 cmds = []
156 for op, argsdict in req:
157 for op, argsdict in req:
157 args = ','.join('%s=%s' % p for p in argsdict.iteritems())
158 args = ','.join('%s=%s' % p for p in argsdict.iteritems())
158 cmds.append('%s %s' % (op, args))
159 cmds.append('%s %s' % (op, args))
159 rsp = self._call("batch", cmds=';'.join(cmds))
160 rsp = self._call("batch", cmds=';'.join(cmds))
160 return rsp.split(';')
161 return rsp.split(';')
161 def _submitone(self, op, args):
162 def _submitone(self, op, args):
162 return self._call(op, **args)
163 return self._call(op, **args)
163
164
164 @batchable
165 @batchable
165 def lookup(self, key):
166 def lookup(self, key):
166 self.requirecap('lookup', _('look up remote revision'))
167 self.requirecap('lookup', _('look up remote revision'))
167 f = future()
168 f = future()
168 yield todict(key=encoding.fromlocal(key)), f
169 yield todict(key=encoding.fromlocal(key)), f
169 d = f.value
170 d = f.value
170 success, data = d[:-1].split(" ", 1)
171 success, data = d[:-1].split(" ", 1)
171 if int(success):
172 if int(success):
172 yield bin(data)
173 yield bin(data)
173 self._abort(error.RepoError(data))
174 self._abort(error.RepoError(data))
174
175
175 @batchable
176 @batchable
176 def heads(self):
177 def heads(self):
177 f = future()
178 f = future()
178 yield {}, f
179 yield {}, f
179 d = f.value
180 d = f.value
180 try:
181 try:
181 yield decodelist(d[:-1])
182 yield decodelist(d[:-1])
182 except ValueError:
183 except ValueError:
183 self._abort(error.ResponseError(_("unexpected response:"), d))
184 self._abort(error.ResponseError(_("unexpected response:"), d))
184
185
185 @batchable
186 @batchable
186 def known(self, nodes):
187 def known(self, nodes):
187 f = future()
188 f = future()
188 yield todict(nodes=encodelist(nodes)), f
189 yield todict(nodes=encodelist(nodes)), f
189 d = f.value
190 d = f.value
190 try:
191 try:
191 yield [bool(int(f)) for f in d]
192 yield [bool(int(f)) for f in d]
192 except ValueError:
193 except ValueError:
193 self._abort(error.ResponseError(_("unexpected response:"), d))
194 self._abort(error.ResponseError(_("unexpected response:"), d))
194
195
195 @batchable
196 @batchable
196 def branchmap(self):
197 def branchmap(self):
197 f = future()
198 f = future()
198 yield {}, f
199 yield {}, f
199 d = f.value
200 d = f.value
200 try:
201 try:
201 branchmap = {}
202 branchmap = {}
202 for branchpart in d.splitlines():
203 for branchpart in d.splitlines():
203 branchname, branchheads = branchpart.split(' ', 1)
204 branchname, branchheads = branchpart.split(' ', 1)
204 branchname = encoding.tolocal(urllib.unquote(branchname))
205 branchname = encoding.tolocal(urllib.unquote(branchname))
205 branchheads = decodelist(branchheads)
206 branchheads = decodelist(branchheads)
206 branchmap[branchname] = branchheads
207 branchmap[branchname] = branchheads
207 yield branchmap
208 yield branchmap
208 except TypeError:
209 except TypeError:
209 self._abort(error.ResponseError(_("unexpected response:"), d))
210 self._abort(error.ResponseError(_("unexpected response:"), d))
210
211
211 def branches(self, nodes):
212 def branches(self, nodes):
212 n = encodelist(nodes)
213 n = encodelist(nodes)
213 d = self._call("branches", nodes=n)
214 d = self._call("branches", nodes=n)
214 try:
215 try:
215 br = [tuple(decodelist(b)) for b in d.splitlines()]
216 br = [tuple(decodelist(b)) for b in d.splitlines()]
216 return br
217 return br
217 except ValueError:
218 except ValueError:
218 self._abort(error.ResponseError(_("unexpected response:"), d))
219 self._abort(error.ResponseError(_("unexpected response:"), d))
219
220
220 def between(self, pairs):
221 def between(self, pairs):
221 batch = 8 # avoid giant requests
222 batch = 8 # avoid giant requests
222 r = []
223 r = []
223 for i in xrange(0, len(pairs), batch):
224 for i in xrange(0, len(pairs), batch):
224 n = " ".join([encodelist(p, '-') for p in pairs[i:i + batch]])
225 n = " ".join([encodelist(p, '-') for p in pairs[i:i + batch]])
225 d = self._call("between", pairs=n)
226 d = self._call("between", pairs=n)
226 try:
227 try:
227 r.extend(l and decodelist(l) or [] for l in d.splitlines())
228 r.extend(l and decodelist(l) or [] for l in d.splitlines())
228 except ValueError:
229 except ValueError:
229 self._abort(error.ResponseError(_("unexpected response:"), d))
230 self._abort(error.ResponseError(_("unexpected response:"), d))
230 return r
231 return r
231
232
232 @batchable
233 @batchable
233 def pushkey(self, namespace, key, old, new):
234 def pushkey(self, namespace, key, old, new):
234 if not self.capable('pushkey'):
235 if not self.capable('pushkey'):
235 yield False, None
236 yield False, None
236 f = future()
237 f = future()
237 yield todict(namespace=encoding.fromlocal(namespace),
238 yield todict(namespace=encoding.fromlocal(namespace),
238 key=encoding.fromlocal(key),
239 key=encoding.fromlocal(key),
239 old=encoding.fromlocal(old),
240 old=encoding.fromlocal(old),
240 new=encoding.fromlocal(new)), f
241 new=encoding.fromlocal(new)), f
241 d = f.value
242 d = f.value
242 d, output = d.split('\n', 1)
243 d, output = d.split('\n', 1)
243 try:
244 try:
244 d = bool(int(d))
245 d = bool(int(d))
245 except ValueError:
246 except ValueError:
246 raise error.ResponseError(
247 raise error.ResponseError(
247 _('push failed (unexpected response):'), d)
248 _('push failed (unexpected response):'), d)
248 for l in output.splitlines(True):
249 for l in output.splitlines(True):
249 self.ui.status(_('remote: '), l)
250 self.ui.status(_('remote: '), l)
250 yield d
251 yield d
251
252
252 @batchable
253 @batchable
253 def listkeys(self, namespace):
254 def listkeys(self, namespace):
254 if not self.capable('pushkey'):
255 if not self.capable('pushkey'):
255 yield {}, None
256 yield {}, None
256 f = future()
257 f = future()
257 yield todict(namespace=encoding.fromlocal(namespace)), f
258 yield todict(namespace=encoding.fromlocal(namespace)), f
258 d = f.value
259 d = f.value
259 r = {}
260 r = {}
260 for l in d.splitlines():
261 for l in d.splitlines():
261 k, v = l.split('\t')
262 k, v = l.split('\t')
262 r[encoding.tolocal(k)] = encoding.tolocal(v)
263 r[encoding.tolocal(k)] = encoding.tolocal(v)
263 yield r
264 yield r
264
265
265 def stream_out(self):
266 def stream_out(self):
266 return self._callstream('stream_out')
267 return self._callstream('stream_out')
267
268
268 def changegroup(self, nodes, kind):
269 def changegroup(self, nodes, kind):
269 n = encodelist(nodes)
270 n = encodelist(nodes)
270 f = self._callstream("changegroup", roots=n)
271 f = self._callstream("changegroup", roots=n)
271 return changegroupmod.unbundle10(self._decompress(f), 'UN')
272 return changegroupmod.unbundle10(self._decompress(f), 'UN')
272
273
273 def changegroupsubset(self, bases, heads, kind):
274 def changegroupsubset(self, bases, heads, kind):
274 self.requirecap('changegroupsubset', _('look up remote changes'))
275 self.requirecap('changegroupsubset', _('look up remote changes'))
275 bases = encodelist(bases)
276 bases = encodelist(bases)
276 heads = encodelist(heads)
277 heads = encodelist(heads)
277 f = self._callstream("changegroupsubset",
278 f = self._callstream("changegroupsubset",
278 bases=bases, heads=heads)
279 bases=bases, heads=heads)
279 return changegroupmod.unbundle10(self._decompress(f), 'UN')
280 return changegroupmod.unbundle10(self._decompress(f), 'UN')
280
281
281 def getbundle(self, source, heads=None, common=None):
282 def getbundle(self, source, heads=None, common=None):
282 self.requirecap('getbundle', _('look up remote changes'))
283 self.requirecap('getbundle', _('look up remote changes'))
283 opts = {}
284 opts = {}
284 if heads is not None:
285 if heads is not None:
285 opts['heads'] = encodelist(heads)
286 opts['heads'] = encodelist(heads)
286 if common is not None:
287 if common is not None:
287 opts['common'] = encodelist(common)
288 opts['common'] = encodelist(common)
288 f = self._callstream("getbundle", **opts)
289 f = self._callstream("getbundle", **opts)
289 return changegroupmod.unbundle10(self._decompress(f), 'UN')
290 return changegroupmod.unbundle10(self._decompress(f), 'UN')
290
291
291 def unbundle(self, cg, heads, source):
292 def unbundle(self, cg, heads, source):
292 '''Send cg (a readable file-like object representing the
293 '''Send cg (a readable file-like object representing the
293 changegroup to push, typically a chunkbuffer object) to the
294 changegroup to push, typically a chunkbuffer object) to the
294 remote server as a bundle. Return an integer indicating the
295 remote server as a bundle. Return an integer indicating the
295 result of the push (see localrepository.addchangegroup()).'''
296 result of the push (see localrepository.addchangegroup()).'''
296
297
297 if heads != ['force'] and self.capable('unbundlehash'):
298 if heads != ['force'] and self.capable('unbundlehash'):
298 heads = encodelist(['hashed',
299 heads = encodelist(['hashed',
299 util.sha1(''.join(sorted(heads))).digest()])
300 util.sha1(''.join(sorted(heads))).digest()])
300 else:
301 else:
301 heads = encodelist(heads)
302 heads = encodelist(heads)
302
303
303 ret, output = self._callpush("unbundle", cg, heads=heads)
304 ret, output = self._callpush("unbundle", cg, heads=heads)
304 if ret == "":
305 if ret == "":
305 raise error.ResponseError(
306 raise error.ResponseError(
306 _('push failed:'), output)
307 _('push failed:'), output)
307 try:
308 try:
308 ret = int(ret)
309 ret = int(ret)
309 except ValueError:
310 except ValueError:
310 raise error.ResponseError(
311 raise error.ResponseError(
311 _('push failed (unexpected response):'), ret)
312 _('push failed (unexpected response):'), ret)
312
313
313 for l in output.splitlines(True):
314 for l in output.splitlines(True):
314 self.ui.status(_('remote: '), l)
315 self.ui.status(_('remote: '), l)
315 return ret
316 return ret
316
317
317 def debugwireargs(self, one, two, three=None, four=None, five=None):
318 def debugwireargs(self, one, two, three=None, four=None, five=None):
318 # don't pass optional arguments left at their default value
319 # don't pass optional arguments left at their default value
319 opts = {}
320 opts = {}
320 if three is not None:
321 if three is not None:
321 opts['three'] = three
322 opts['three'] = three
322 if four is not None:
323 if four is not None:
323 opts['four'] = four
324 opts['four'] = four
324 return self._call('debugwireargs', one=one, two=two, **opts)
325 return self._call('debugwireargs', one=one, two=two, **opts)
325
326
326 # server side
327 # server side
327
328
328 class streamres(object):
329 class streamres(object):
329 def __init__(self, gen):
330 def __init__(self, gen):
330 self.gen = gen
331 self.gen = gen
331
332
332 class pushres(object):
333 class pushres(object):
333 def __init__(self, res):
334 def __init__(self, res):
334 self.res = res
335 self.res = res
335
336
336 class pusherr(object):
337 class pusherr(object):
337 def __init__(self, res):
338 def __init__(self, res):
338 self.res = res
339 self.res = res
339
340
340 class ooberror(object):
341 class ooberror(object):
341 def __init__(self, message):
342 def __init__(self, message):
342 self.message = message
343 self.message = message
343
344
344 def dispatch(repo, proto, command):
345 def dispatch(repo, proto, command):
345 func, spec = commands[command]
346 func, spec = commands[command]
346 args = proto.getargs(spec)
347 args = proto.getargs(spec)
347 return func(repo, proto, *args)
348 return func(repo, proto, *args)
348
349
349 def options(cmd, keys, others):
350 def options(cmd, keys, others):
350 opts = {}
351 opts = {}
351 for k in keys:
352 for k in keys:
352 if k in others:
353 if k in others:
353 opts[k] = others[k]
354 opts[k] = others[k]
354 del others[k]
355 del others[k]
355 if others:
356 if others:
356 sys.stderr.write("abort: %s got unexpected arguments %s\n"
357 sys.stderr.write("abort: %s got unexpected arguments %s\n"
357 % (cmd, ",".join(others)))
358 % (cmd, ",".join(others)))
358 return opts
359 return opts
359
360
360 def batch(repo, proto, cmds, others):
361 def batch(repo, proto, cmds, others):
361 res = []
362 res = []
362 for pair in cmds.split(';'):
363 for pair in cmds.split(';'):
363 op, args = pair.split(' ', 1)
364 op, args = pair.split(' ', 1)
364 vals = {}
365 vals = {}
365 for a in args.split(','):
366 for a in args.split(','):
366 if a:
367 if a:
367 n, v = a.split('=')
368 n, v = a.split('=')
368 vals[n] = unescapearg(v)
369 vals[n] = unescapearg(v)
369 func, spec = commands[op]
370 func, spec = commands[op]
370 if spec:
371 if spec:
371 keys = spec.split()
372 keys = spec.split()
372 data = {}
373 data = {}
373 for k in keys:
374 for k in keys:
374 if k == '*':
375 if k == '*':
375 star = {}
376 star = {}
376 for key in vals.keys():
377 for key in vals.keys():
377 if key not in keys:
378 if key not in keys:
378 star[key] = vals[key]
379 star[key] = vals[key]
379 data['*'] = star
380 data['*'] = star
380 else:
381 else:
381 data[k] = vals[k]
382 data[k] = vals[k]
382 result = func(repo, proto, *[data[k] for k in keys])
383 result = func(repo, proto, *[data[k] for k in keys])
383 else:
384 else:
384 result = func(repo, proto)
385 result = func(repo, proto)
385 if isinstance(result, ooberror):
386 if isinstance(result, ooberror):
386 return result
387 return result
387 res.append(escapearg(result))
388 res.append(escapearg(result))
388 return ';'.join(res)
389 return ';'.join(res)
389
390
390 def between(repo, proto, pairs):
391 def between(repo, proto, pairs):
391 pairs = [decodelist(p, '-') for p in pairs.split(" ")]
392 pairs = [decodelist(p, '-') for p in pairs.split(" ")]
392 r = []
393 r = []
393 for b in repo.between(pairs):
394 for b in repo.between(pairs):
394 r.append(encodelist(b) + "\n")
395 r.append(encodelist(b) + "\n")
395 return "".join(r)
396 return "".join(r)
396
397
397 def branchmap(repo, proto):
398 def branchmap(repo, proto):
398 branchmap = repo.branchmap()
399 branchmap = repo.branchmap()
399 heads = []
400 heads = []
400 for branch, nodes in branchmap.iteritems():
401 for branch, nodes in branchmap.iteritems():
401 branchname = urllib.quote(encoding.fromlocal(branch))
402 branchname = urllib.quote(encoding.fromlocal(branch))
402 branchnodes = encodelist(nodes)
403 branchnodes = encodelist(nodes)
403 heads.append('%s %s' % (branchname, branchnodes))
404 heads.append('%s %s' % (branchname, branchnodes))
404 return '\n'.join(heads)
405 return '\n'.join(heads)
405
406
406 def branches(repo, proto, nodes):
407 def branches(repo, proto, nodes):
407 nodes = decodelist(nodes)
408 nodes = decodelist(nodes)
408 r = []
409 r = []
409 for b in repo.branches(nodes):
410 for b in repo.branches(nodes):
410 r.append(encodelist(b) + "\n")
411 r.append(encodelist(b) + "\n")
411 return "".join(r)
412 return "".join(r)
412
413
413 def capabilities(repo, proto):
414 def capabilities(repo, proto):
414 caps = ('lookup changegroupsubset branchmap pushkey known getbundle '
415 caps = ('lookup changegroupsubset branchmap pushkey known getbundle '
415 'unbundlehash batch').split()
416 'unbundlehash batch').split()
416 if _allowstream(repo.ui):
417 if _allowstream(repo.ui):
417 requiredformats = repo.requirements & repo.supportedformats
418 requiredformats = repo.requirements & repo.supportedformats
418 # if our local revlogs are just revlogv1, add 'stream' cap
419 # if our local revlogs are just revlogv1, add 'stream' cap
419 if not requiredformats - set(('revlogv1',)):
420 if not requiredformats - set(('revlogv1',)):
420 caps.append('stream')
421 caps.append('stream')
421 # otherwise, add 'streamreqs' detailing our local revlog format
422 # otherwise, add 'streamreqs' detailing our local revlog format
422 else:
423 else:
423 caps.append('streamreqs=%s' % ','.join(requiredformats))
424 caps.append('streamreqs=%s' % ','.join(requiredformats))
424 caps.append('unbundle=%s' % ','.join(changegroupmod.bundlepriority))
425 caps.append('unbundle=%s' % ','.join(changegroupmod.bundlepriority))
425 caps.append('httpheader=1024')
426 caps.append('httpheader=1024')
426 return ' '.join(caps)
427 return ' '.join(caps)
427
428
428 def changegroup(repo, proto, roots):
429 def changegroup(repo, proto, roots):
429 nodes = decodelist(roots)
430 nodes = decodelist(roots)
430 cg = repo.changegroup(nodes, 'serve')
431 cg = repo.changegroup(nodes, 'serve')
431 return streamres(proto.groupchunks(cg))
432 return streamres(proto.groupchunks(cg))
432
433
433 def changegroupsubset(repo, proto, bases, heads):
434 def changegroupsubset(repo, proto, bases, heads):
434 bases = decodelist(bases)
435 bases = decodelist(bases)
435 heads = decodelist(heads)
436 heads = decodelist(heads)
436 cg = repo.changegroupsubset(bases, heads, 'serve')
437 cg = repo.changegroupsubset(bases, heads, 'serve')
437 return streamres(proto.groupchunks(cg))
438 return streamres(proto.groupchunks(cg))
438
439
439 def debugwireargs(repo, proto, one, two, others):
440 def debugwireargs(repo, proto, one, two, others):
440 # only accept optional args from the known set
441 # only accept optional args from the known set
441 opts = options('debugwireargs', ['three', 'four'], others)
442 opts = options('debugwireargs', ['three', 'four'], others)
442 return repo.debugwireargs(one, two, **opts)
443 return repo.debugwireargs(one, two, **opts)
443
444
444 def getbundle(repo, proto, others):
445 def getbundle(repo, proto, others):
445 opts = options('getbundle', ['heads', 'common'], others)
446 opts = options('getbundle', ['heads', 'common'], others)
446 for k, v in opts.iteritems():
447 for k, v in opts.iteritems():
447 opts[k] = decodelist(v)
448 opts[k] = decodelist(v)
448 cg = repo.getbundle('serve', **opts)
449 cg = repo.getbundle('serve', **opts)
449 return streamres(proto.groupchunks(cg))
450 return streamres(proto.groupchunks(cg))
450
451
451 def heads(repo, proto):
452 def heads(repo, proto):
452 h = repo.heads()
453 h = phases.visibleheads(repo)
453 return encodelist(h) + "\n"
454 return encodelist(h) + "\n"
454
455
455 def hello(repo, proto):
456 def hello(repo, proto):
456 '''the hello command returns a set of lines describing various
457 '''the hello command returns a set of lines describing various
457 interesting things about the server, in an RFC822-like format.
458 interesting things about the server, in an RFC822-like format.
458 Currently the only one defined is "capabilities", which
459 Currently the only one defined is "capabilities", which
459 consists of a line in the form:
460 consists of a line in the form:
460
461
461 capabilities: space separated list of tokens
462 capabilities: space separated list of tokens
462 '''
463 '''
463 return "capabilities: %s\n" % (capabilities(repo, proto))
464 return "capabilities: %s\n" % (capabilities(repo, proto))
464
465
465 def listkeys(repo, proto, namespace):
466 def listkeys(repo, proto, namespace):
466 d = repo.listkeys(encoding.tolocal(namespace)).items()
467 d = repo.listkeys(encoding.tolocal(namespace)).items()
467 t = '\n'.join(['%s\t%s' % (encoding.fromlocal(k), encoding.fromlocal(v))
468 t = '\n'.join(['%s\t%s' % (encoding.fromlocal(k), encoding.fromlocal(v))
468 for k, v in d])
469 for k, v in d])
469 return t
470 return t
470
471
471 def lookup(repo, proto, key):
472 def lookup(repo, proto, key):
472 try:
473 try:
473 r = hex(repo.lookup(encoding.tolocal(key)))
474 r = hex(repo.lookup(encoding.tolocal(key)))
474 success = 1
475 success = 1
475 except Exception, inst:
476 except Exception, inst:
476 r = str(inst)
477 r = str(inst)
477 success = 0
478 success = 0
478 return "%s %s\n" % (success, r)
479 return "%s %s\n" % (success, r)
479
480
480 def known(repo, proto, nodes, others):
481 def known(repo, proto, nodes, others):
481 return ''.join(b and "1" or "0" for b in repo.known(decodelist(nodes)))
482 return ''.join(b and "1" or "0" for b in repo.known(decodelist(nodes)))
482
483
483 def pushkey(repo, proto, namespace, key, old, new):
484 def pushkey(repo, proto, namespace, key, old, new):
484 # compatibility with pre-1.8 clients which were accidentally
485 # compatibility with pre-1.8 clients which were accidentally
485 # sending raw binary nodes rather than utf-8-encoded hex
486 # sending raw binary nodes rather than utf-8-encoded hex
486 if len(new) == 20 and new.encode('string-escape') != new:
487 if len(new) == 20 and new.encode('string-escape') != new:
487 # looks like it could be a binary node
488 # looks like it could be a binary node
488 try:
489 try:
489 new.decode('utf-8')
490 new.decode('utf-8')
490 new = encoding.tolocal(new) # but cleanly decodes as UTF-8
491 new = encoding.tolocal(new) # but cleanly decodes as UTF-8
491 except UnicodeDecodeError:
492 except UnicodeDecodeError:
492 pass # binary, leave unmodified
493 pass # binary, leave unmodified
493 else:
494 else:
494 new = encoding.tolocal(new) # normal path
495 new = encoding.tolocal(new) # normal path
495
496
496 r = repo.pushkey(encoding.tolocal(namespace), encoding.tolocal(key),
497 r = repo.pushkey(encoding.tolocal(namespace), encoding.tolocal(key),
497 encoding.tolocal(old), new)
498 encoding.tolocal(old), new)
498 return '%s\n' % int(r)
499 return '%s\n' % int(r)
499
500
500 def _allowstream(ui):
501 def _allowstream(ui):
501 return ui.configbool('server', 'uncompressed', True, untrusted=True)
502 return ui.configbool('server', 'uncompressed', True, untrusted=True)
502
503
503 def stream(repo, proto):
504 def stream(repo, proto):
504 '''If the server supports streaming clone, it advertises the "stream"
505 '''If the server supports streaming clone, it advertises the "stream"
505 capability with a value representing the version and flags of the repo
506 capability with a value representing the version and flags of the repo
506 it is serving. Client checks to see if it understands the format.
507 it is serving. Client checks to see if it understands the format.
507
508
508 The format is simple: the server writes out a line with the amount
509 The format is simple: the server writes out a line with the amount
509 of files, then the total amount of bytes to be transfered (separated
510 of files, then the total amount of bytes to be transfered (separated
510 by a space). Then, for each file, the server first writes the filename
511 by a space). Then, for each file, the server first writes the filename
511 and filesize (separated by the null character), then the file contents.
512 and filesize (separated by the null character), then the file contents.
512 '''
513 '''
513
514
514 if not _allowstream(repo.ui):
515 if not _allowstream(repo.ui):
515 return '1\n'
516 return '1\n'
516
517
517 entries = []
518 entries = []
518 total_bytes = 0
519 total_bytes = 0
519 try:
520 try:
520 # get consistent snapshot of repo, lock during scan
521 # get consistent snapshot of repo, lock during scan
521 lock = repo.lock()
522 lock = repo.lock()
522 try:
523 try:
523 repo.ui.debug('scanning\n')
524 repo.ui.debug('scanning\n')
524 for name, ename, size in repo.store.walk():
525 for name, ename, size in repo.store.walk():
525 entries.append((name, size))
526 entries.append((name, size))
526 total_bytes += size
527 total_bytes += size
527 finally:
528 finally:
528 lock.release()
529 lock.release()
529 except error.LockError:
530 except error.LockError:
530 return '2\n' # error: 2
531 return '2\n' # error: 2
531
532
532 def streamer(repo, entries, total):
533 def streamer(repo, entries, total):
533 '''stream out all metadata files in repository.'''
534 '''stream out all metadata files in repository.'''
534 yield '0\n' # success
535 yield '0\n' # success
535 repo.ui.debug('%d files, %d bytes to transfer\n' %
536 repo.ui.debug('%d files, %d bytes to transfer\n' %
536 (len(entries), total_bytes))
537 (len(entries), total_bytes))
537 yield '%d %d\n' % (len(entries), total_bytes)
538 yield '%d %d\n' % (len(entries), total_bytes)
538 for name, size in entries:
539 for name, size in entries:
539 repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
540 repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
540 # partially encode name over the wire for backwards compat
541 # partially encode name over the wire for backwards compat
541 yield '%s\0%d\n' % (store.encodedir(name), size)
542 yield '%s\0%d\n' % (store.encodedir(name), size)
542 for chunk in util.filechunkiter(repo.sopener(name), limit=size):
543 for chunk in util.filechunkiter(repo.sopener(name), limit=size):
543 yield chunk
544 yield chunk
544
545
545 return streamres(streamer(repo, entries, total_bytes))
546 return streamres(streamer(repo, entries, total_bytes))
546
547
547 def unbundle(repo, proto, heads):
548 def unbundle(repo, proto, heads):
548 their_heads = decodelist(heads)
549 their_heads = decodelist(heads)
549
550
550 def check_heads():
551 def check_heads():
551 heads = repo.heads()
552 heads = repo.heads()
552 heads_hash = util.sha1(''.join(sorted(heads))).digest()
553 heads_hash = util.sha1(''.join(sorted(heads))).digest()
553 return (their_heads == ['force'] or their_heads == heads or
554 return (their_heads == ['force'] or their_heads == heads or
554 their_heads == ['hashed', heads_hash])
555 their_heads == ['hashed', heads_hash])
555
556
556 proto.redirect()
557 proto.redirect()
557
558
558 # fail early if possible
559 # fail early if possible
559 if not check_heads():
560 if not check_heads():
560 return pusherr('unsynced changes')
561 return pusherr('unsynced changes')
561
562
562 # write bundle data to temporary file because it can be big
563 # write bundle data to temporary file because it can be big
563 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
564 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
564 fp = os.fdopen(fd, 'wb+')
565 fp = os.fdopen(fd, 'wb+')
565 r = 0
566 r = 0
566 try:
567 try:
567 proto.getfile(fp)
568 proto.getfile(fp)
568 lock = repo.lock()
569 lock = repo.lock()
569 try:
570 try:
570 if not check_heads():
571 if not check_heads():
571 # someone else committed/pushed/unbundled while we
572 # someone else committed/pushed/unbundled while we
572 # were transferring data
573 # were transferring data
573 return pusherr('unsynced changes')
574 return pusherr('unsynced changes')
574
575
575 # push can proceed
576 # push can proceed
576 fp.seek(0)
577 fp.seek(0)
577 gen = changegroupmod.readbundle(fp, None)
578 gen = changegroupmod.readbundle(fp, None)
578
579
579 try:
580 try:
580 r = repo.addchangegroup(gen, 'serve', proto._client())
581 r = repo.addchangegroup(gen, 'serve', proto._client())
581 except util.Abort, inst:
582 except util.Abort, inst:
582 sys.stderr.write("abort: %s\n" % inst)
583 sys.stderr.write("abort: %s\n" % inst)
583 finally:
584 finally:
584 lock.release()
585 lock.release()
585 return pushres(r)
586 return pushres(r)
586
587
587 finally:
588 finally:
588 fp.close()
589 fp.close()
589 os.unlink(tempname)
590 os.unlink(tempname)
590
591
591 commands = {
592 commands = {
592 'batch': (batch, 'cmds *'),
593 'batch': (batch, 'cmds *'),
593 'between': (between, 'pairs'),
594 'between': (between, 'pairs'),
594 'branchmap': (branchmap, ''),
595 'branchmap': (branchmap, ''),
595 'branches': (branches, 'nodes'),
596 'branches': (branches, 'nodes'),
596 'capabilities': (capabilities, ''),
597 'capabilities': (capabilities, ''),
597 'changegroup': (changegroup, 'roots'),
598 'changegroup': (changegroup, 'roots'),
598 'changegroupsubset': (changegroupsubset, 'bases heads'),
599 'changegroupsubset': (changegroupsubset, 'bases heads'),
599 'debugwireargs': (debugwireargs, 'one two *'),
600 'debugwireargs': (debugwireargs, 'one two *'),
600 'getbundle': (getbundle, '*'),
601 'getbundle': (getbundle, '*'),
601 'heads': (heads, ''),
602 'heads': (heads, ''),
602 'hello': (hello, ''),
603 'hello': (hello, ''),
603 'known': (known, 'nodes *'),
604 'known': (known, 'nodes *'),
604 'listkeys': (listkeys, 'namespace'),
605 'listkeys': (listkeys, 'namespace'),
605 'lookup': (lookup, 'key'),
606 'lookup': (lookup, 'key'),
606 'pushkey': (pushkey, 'namespace key old new'),
607 'pushkey': (pushkey, 'namespace key old new'),
607 'stream_out': (stream, ''),
608 'stream_out': (stream, ''),
608 'unbundle': (unbundle, 'heads'),
609 'unbundle': (unbundle, 'heads'),
609 }
610 }
@@ -1,508 +1,570 b''
1 $ cat >> $HGRCPATH <<EOF
1 $ cat >> $HGRCPATH <<EOF
2 > [extensions]
2 > [extensions]
3 > graphlog=
3 > graphlog=
4 > EOF
4 > EOF
5 $ alias hgph='hg log --template "{rev} {phase} {desc} - {node|short}\n"'
5 $ alias hgph='hg log --template "{rev} {phase} {desc} - {node|short}\n"'
6
6
7 $ mkcommit() {
7 $ mkcommit() {
8 > echo "$1" > "$1"
8 > echo "$1" > "$1"
9 > hg add "$1"
9 > hg add "$1"
10 > hg ci -m "$1"
10 > message="$1"
11 > shift
12 > hg ci -m "$message" $*
11 > }
13 > }
12
14
13 $ hg init alpha
15 $ hg init alpha
14 $ cd alpha
16 $ cd alpha
15 $ mkcommit a-A
17 $ mkcommit a-A
16 $ mkcommit a-B
18 $ mkcommit a-B
17 $ mkcommit a-C
19 $ mkcommit a-C
18 $ mkcommit a-D
20 $ mkcommit a-D
19 $ hgph
21 $ hgph
20 3 1 a-D - b555f63b6063
22 3 1 a-D - b555f63b6063
21 2 1 a-C - 54acac6f23ab
23 2 1 a-C - 54acac6f23ab
22 1 1 a-B - 548a3d25dbf0
24 1 1 a-B - 548a3d25dbf0
23 0 1 a-A - 054250a37db4
25 0 1 a-A - 054250a37db4
24
26
25 $ hg init ../beta
27 $ hg init ../beta
26 $ hg push -r 1 ../beta
28 $ hg push -r 1 ../beta
27 pushing to ../beta
29 pushing to ../beta
28 searching for changes
30 searching for changes
29 adding changesets
31 adding changesets
30 adding manifests
32 adding manifests
31 adding file changes
33 adding file changes
32 added 2 changesets with 2 changes to 2 files
34 added 2 changesets with 2 changes to 2 files
33 $ hgph
35 $ hgph
34 3 1 a-D - b555f63b6063
36 3 1 a-D - b555f63b6063
35 2 1 a-C - 54acac6f23ab
37 2 1 a-C - 54acac6f23ab
36 1 0 a-B - 548a3d25dbf0
38 1 0 a-B - 548a3d25dbf0
37 0 0 a-A - 054250a37db4
39 0 0 a-A - 054250a37db4
38
40
39 $ cd ../beta
41 $ cd ../beta
40 $ hgph
42 $ hgph
41 1 0 a-B - 548a3d25dbf0
43 1 0 a-B - 548a3d25dbf0
42 0 0 a-A - 054250a37db4
44 0 0 a-A - 054250a37db4
43 $ hg up -q
45 $ hg up -q
44 $ mkcommit b-A
46 $ mkcommit b-A
45 $ hgph
47 $ hgph
46 2 1 b-A - f54f1bb90ff3
48 2 1 b-A - f54f1bb90ff3
47 1 0 a-B - 548a3d25dbf0
49 1 0 a-B - 548a3d25dbf0
48 0 0 a-A - 054250a37db4
50 0 0 a-A - 054250a37db4
49 $ hg pull ../alpha
51 $ hg pull ../alpha
50 pulling from ../alpha
52 pulling from ../alpha
51 searching for changes
53 searching for changes
52 adding changesets
54 adding changesets
53 adding manifests
55 adding manifests
54 adding file changes
56 adding file changes
55 added 2 changesets with 2 changes to 2 files (+1 heads)
57 added 2 changesets with 2 changes to 2 files (+1 heads)
56 (run 'hg heads' to see heads, 'hg merge' to merge)
58 (run 'hg heads' to see heads, 'hg merge' to merge)
57 $ hgph
59 $ hgph
58 4 0 a-D - b555f63b6063
60 4 0 a-D - b555f63b6063
59 3 0 a-C - 54acac6f23ab
61 3 0 a-C - 54acac6f23ab
60 2 1 b-A - f54f1bb90ff3
62 2 1 b-A - f54f1bb90ff3
61 1 0 a-B - 548a3d25dbf0
63 1 0 a-B - 548a3d25dbf0
62 0 0 a-A - 054250a37db4
64 0 0 a-A - 054250a37db4
63
65
64 pull did not updated ../alpha state.
66 pull did not updated ../alpha state.
65 push from alpha to beta should update phase even if nothing is transfered
67 push from alpha to beta should update phase even if nothing is transfered
66
68
67 $ cd ../alpha
69 $ cd ../alpha
68 $ hgph # not updated by remote pull
70 $ hgph # not updated by remote pull
69 3 1 a-D - b555f63b6063
71 3 1 a-D - b555f63b6063
70 2 1 a-C - 54acac6f23ab
72 2 1 a-C - 54acac6f23ab
71 1 0 a-B - 548a3d25dbf0
73 1 0 a-B - 548a3d25dbf0
72 0 0 a-A - 054250a37db4
74 0 0 a-A - 054250a37db4
73 $ hg push ../beta
75 $ hg push ../beta
74 pushing to ../beta
76 pushing to ../beta
75 searching for changes
77 searching for changes
76 no changes found
78 no changes found
77 $ hgph
79 $ hgph
78 3 0 a-D - b555f63b6063
80 3 0 a-D - b555f63b6063
79 2 0 a-C - 54acac6f23ab
81 2 0 a-C - 54acac6f23ab
80 1 0 a-B - 548a3d25dbf0
82 1 0 a-B - 548a3d25dbf0
81 0 0 a-A - 054250a37db4
83 0 0 a-A - 054250a37db4
82
84
83 update must update phase of common changeset too
85 update must update phase of common changeset too
84
86
85 $ hg pull ../beta # getting b-A
87 $ hg pull ../beta # getting b-A
86 pulling from ../beta
88 pulling from ../beta
87 searching for changes
89 searching for changes
88 adding changesets
90 adding changesets
89 adding manifests
91 adding manifests
90 adding file changes
92 adding file changes
91 added 1 changesets with 1 changes to 1 files (+1 heads)
93 added 1 changesets with 1 changes to 1 files (+1 heads)
92 (run 'hg heads' to see heads, 'hg merge' to merge)
94 (run 'hg heads' to see heads, 'hg merge' to merge)
93
95
94 $ cd ../beta
96 $ cd ../beta
95 $ hgph # not updated by remote pull
97 $ hgph # not updated by remote pull
96 4 0 a-D - b555f63b6063
98 4 0 a-D - b555f63b6063
97 3 0 a-C - 54acac6f23ab
99 3 0 a-C - 54acac6f23ab
98 2 1 b-A - f54f1bb90ff3
100 2 1 b-A - f54f1bb90ff3
99 1 0 a-B - 548a3d25dbf0
101 1 0 a-B - 548a3d25dbf0
100 0 0 a-A - 054250a37db4
102 0 0 a-A - 054250a37db4
101 $ hg pull ../alpha
103 $ hg pull ../alpha
102 pulling from ../alpha
104 pulling from ../alpha
103 searching for changes
105 searching for changes
104 no changes found
106 no changes found
105 $ hgph
107 $ hgph
106 4 0 a-D - b555f63b6063
108 4 0 a-D - b555f63b6063
107 3 0 a-C - 54acac6f23ab
109 3 0 a-C - 54acac6f23ab
108 2 0 b-A - f54f1bb90ff3
110 2 0 b-A - f54f1bb90ff3
109 1 0 a-B - 548a3d25dbf0
111 1 0 a-B - 548a3d25dbf0
110 0 0 a-A - 054250a37db4
112 0 0 a-A - 054250a37db4
111
113
112 Publish configuration option
114 Publish configuration option
113 ----------------------------
115 ----------------------------
114
116
115 Pull
117 Pull
116 ````
118 ````
117
119
118 changegroup are added without phase movement
120 changegroup are added without phase movement
119
121
120 $ hg bundle -a ../base.bundle
122 $ hg bundle -a ../base.bundle
121 5 changesets found
123 5 changesets found
122 $ cd ..
124 $ cd ..
123 $ hg init mu
125 $ hg init mu
124 $ cd mu
126 $ cd mu
125 $ cat > .hg/hgrc << EOF
127 $ cat > .hg/hgrc << EOF
126 > [phases]
128 > [phases]
127 > publish=0
129 > publish=0
128 > EOF
130 > EOF
129 $ hg unbundle ../base.bundle
131 $ hg unbundle ../base.bundle
130 adding changesets
132 adding changesets
131 adding manifests
133 adding manifests
132 adding file changes
134 adding file changes
133 added 5 changesets with 5 changes to 5 files (+1 heads)
135 added 5 changesets with 5 changes to 5 files (+1 heads)
134 (run 'hg heads' to see heads, 'hg merge' to merge)
136 (run 'hg heads' to see heads, 'hg merge' to merge)
135 $ hgph
137 $ hgph
136 4 1 a-D - b555f63b6063
138 4 1 a-D - b555f63b6063
137 3 1 a-C - 54acac6f23ab
139 3 1 a-C - 54acac6f23ab
138 2 1 b-A - f54f1bb90ff3
140 2 1 b-A - f54f1bb90ff3
139 1 1 a-B - 548a3d25dbf0
141 1 1 a-B - 548a3d25dbf0
140 0 1 a-A - 054250a37db4
142 0 1 a-A - 054250a37db4
141 $ cd ..
143 $ cd ..
142
144
143 Pulling from publish=False to publish=False does not move boundary.
145 Pulling from publish=False to publish=False does not move boundary.
144
146
145 $ hg init nu
147 $ hg init nu
146 $ cd nu
148 $ cd nu
147 $ cat > .hg/hgrc << EOF
149 $ cat > .hg/hgrc << EOF
148 > [phases]
150 > [phases]
149 > publish=0
151 > publish=0
150 > EOF
152 > EOF
151 $ hg pull ../mu -r 54acac6f23ab
153 $ hg pull ../mu -r 54acac6f23ab
152 pulling from ../mu
154 pulling from ../mu
153 adding changesets
155 adding changesets
154 adding manifests
156 adding manifests
155 adding file changes
157 adding file changes
156 added 3 changesets with 3 changes to 3 files
158 added 3 changesets with 3 changes to 3 files
157 (run 'hg update' to get a working copy)
159 (run 'hg update' to get a working copy)
158 $ hgph
160 $ hgph
159 2 1 a-C - 54acac6f23ab
161 2 1 a-C - 54acac6f23ab
160 1 1 a-B - 548a3d25dbf0
162 1 1 a-B - 548a3d25dbf0
161 0 1 a-A - 054250a37db4
163 0 1 a-A - 054250a37db4
162
164
163 Even for common
165 Even for common
164
166
165 $ hg pull ../mu -r f54f1bb90ff3
167 $ hg pull ../mu -r f54f1bb90ff3
166 pulling from ../mu
168 pulling from ../mu
167 searching for changes
169 searching for changes
168 adding changesets
170 adding changesets
169 adding manifests
171 adding manifests
170 adding file changes
172 adding file changes
171 added 1 changesets with 1 changes to 1 files (+1 heads)
173 added 1 changesets with 1 changes to 1 files (+1 heads)
172 (run 'hg heads' to see heads, 'hg merge' to merge)
174 (run 'hg heads' to see heads, 'hg merge' to merge)
173 $ hgph
175 $ hgph
174 3 1 b-A - f54f1bb90ff3
176 3 1 b-A - f54f1bb90ff3
175 2 1 a-C - 54acac6f23ab
177 2 1 a-C - 54acac6f23ab
176 1 1 a-B - 548a3d25dbf0
178 1 1 a-B - 548a3d25dbf0
177 0 1 a-A - 054250a37db4
179 0 1 a-A - 054250a37db4
178
180
179
181
180 Pulling from Publish=True to Publish=False move boundary in common set.
182 Pulling from Publish=True to Publish=False move boundary in common set.
181 we are in nu
183 we are in nu
182
184
183 $ hg pull ../alpha -r b555f63b6063
185 $ hg pull ../alpha -r b555f63b6063
184 pulling from ../alpha
186 pulling from ../alpha
185 searching for changes
187 searching for changes
186 adding changesets
188 adding changesets
187 adding manifests
189 adding manifests
188 adding file changes
190 adding file changes
189 added 1 changesets with 1 changes to 1 files
191 added 1 changesets with 1 changes to 1 files
190 (run 'hg update' to get a working copy)
192 (run 'hg update' to get a working copy)
191 $ hgph
193 $ hgph
192 4 0 a-D - b555f63b6063
194 4 0 a-D - b555f63b6063
193 3 0 b-A - f54f1bb90ff3
195 3 0 b-A - f54f1bb90ff3
194 2 0 a-C - 54acac6f23ab
196 2 0 a-C - 54acac6f23ab
195 1 0 a-B - 548a3d25dbf0
197 1 0 a-B - 548a3d25dbf0
196 0 0 a-A - 054250a37db4
198 0 0 a-A - 054250a37db4
197
199
198 pulling from Publish=False to publish=False with some public
200 pulling from Publish=False to publish=False with some public
199
201
200 $ hg up -q f54f1bb90ff3
202 $ hg up -q f54f1bb90ff3
201 $ mkcommit n-A
203 $ mkcommit n-A
202 $ mkcommit n-B
204 $ mkcommit n-B
203 $ hgph
205 $ hgph
204 6 1 n-B - 145e75495359
206 6 1 n-B - 145e75495359
205 5 1 n-A - d6bcb4f74035
207 5 1 n-A - d6bcb4f74035
206 4 0 a-D - b555f63b6063
208 4 0 a-D - b555f63b6063
207 3 0 b-A - f54f1bb90ff3
209 3 0 b-A - f54f1bb90ff3
208 2 0 a-C - 54acac6f23ab
210 2 0 a-C - 54acac6f23ab
209 1 0 a-B - 548a3d25dbf0
211 1 0 a-B - 548a3d25dbf0
210 0 0 a-A - 054250a37db4
212 0 0 a-A - 054250a37db4
211 $ cd ../mu
213 $ cd ../mu
212 $ hg pull ../nu
214 $ hg pull ../nu
213 pulling from ../nu
215 pulling from ../nu
214 searching for changes
216 searching for changes
215 adding changesets
217 adding changesets
216 adding manifests
218 adding manifests
217 adding file changes
219 adding file changes
218 added 2 changesets with 2 changes to 2 files
220 added 2 changesets with 2 changes to 2 files
219 (run 'hg update' to get a working copy)
221 (run 'hg update' to get a working copy)
220 $ hgph
222 $ hgph
221 6 1 n-B - 145e75495359
223 6 1 n-B - 145e75495359
222 5 1 n-A - d6bcb4f74035
224 5 1 n-A - d6bcb4f74035
223 4 0 a-D - b555f63b6063
225 4 0 a-D - b555f63b6063
224 3 0 a-C - 54acac6f23ab
226 3 0 a-C - 54acac6f23ab
225 2 0 b-A - f54f1bb90ff3
227 2 0 b-A - f54f1bb90ff3
226 1 0 a-B - 548a3d25dbf0
228 1 0 a-B - 548a3d25dbf0
227 0 0 a-A - 054250a37db4
229 0 0 a-A - 054250a37db4
228 $ cd ..
230 $ cd ..
229
231
230 pulling into publish=True
232 pulling into publish=True
231
233
232 $ cd alpha
234 $ cd alpha
233 $ hgph
235 $ hgph
234 4 0 b-A - f54f1bb90ff3
236 4 0 b-A - f54f1bb90ff3
235 3 0 a-D - b555f63b6063
237 3 0 a-D - b555f63b6063
236 2 0 a-C - 54acac6f23ab
238 2 0 a-C - 54acac6f23ab
237 1 0 a-B - 548a3d25dbf0
239 1 0 a-B - 548a3d25dbf0
238 0 0 a-A - 054250a37db4
240 0 0 a-A - 054250a37db4
239 $ hg pull ../mu
241 $ hg pull ../mu
240 pulling from ../mu
242 pulling from ../mu
241 searching for changes
243 searching for changes
242 adding changesets
244 adding changesets
243 adding manifests
245 adding manifests
244 adding file changes
246 adding file changes
245 added 2 changesets with 2 changes to 2 files
247 added 2 changesets with 2 changes to 2 files
246 (run 'hg update' to get a working copy)
248 (run 'hg update' to get a working copy)
247 $ hgph
249 $ hgph
248 6 1 n-B - 145e75495359
250 6 1 n-B - 145e75495359
249 5 1 n-A - d6bcb4f74035
251 5 1 n-A - d6bcb4f74035
250 4 0 b-A - f54f1bb90ff3
252 4 0 b-A - f54f1bb90ff3
251 3 0 a-D - b555f63b6063
253 3 0 a-D - b555f63b6063
252 2 0 a-C - 54acac6f23ab
254 2 0 a-C - 54acac6f23ab
253 1 0 a-B - 548a3d25dbf0
255 1 0 a-B - 548a3d25dbf0
254 0 0 a-A - 054250a37db4
256 0 0 a-A - 054250a37db4
255 $ cd ..
257 $ cd ..
256
258
257 pulling back into original repo
259 pulling back into original repo
258
260
259 $ cd nu
261 $ cd nu
260 $ hg pull ../alpha
262 $ hg pull ../alpha
261 pulling from ../alpha
263 pulling from ../alpha
262 searching for changes
264 searching for changes
263 no changes found
265 no changes found
264 $ hgph
266 $ hgph
265 6 0 n-B - 145e75495359
267 6 0 n-B - 145e75495359
266 5 0 n-A - d6bcb4f74035
268 5 0 n-A - d6bcb4f74035
267 4 0 a-D - b555f63b6063
269 4 0 a-D - b555f63b6063
268 3 0 b-A - f54f1bb90ff3
270 3 0 b-A - f54f1bb90ff3
269 2 0 a-C - 54acac6f23ab
271 2 0 a-C - 54acac6f23ab
270 1 0 a-B - 548a3d25dbf0
272 1 0 a-B - 548a3d25dbf0
271 0 0 a-A - 054250a37db4
273 0 0 a-A - 054250a37db4
272
274
273 Push
275 Push
274 ````
276 ````
275
277
276 (inserted)
278 (inserted)
277
279
278 Test that phase are pushed even when they are nothing to pus
280 Test that phase are pushed even when they are nothing to pus
279 (this might be tested later bu are very convenient to not alter too much test)
281 (this might be tested later bu are very convenient to not alter too much test)
280
282
281 Push back to alpha
283 Push back to alpha
282
284
283 $ hg push ../alpha # from nu
285 $ hg push ../alpha # from nu
284 pushing to ../alpha
286 pushing to ../alpha
285 searching for changes
287 searching for changes
286 no changes found
288 no changes found
287 $ cd ..
289 $ cd ..
288 $ cd alpha
290 $ cd alpha
289 $ hgph
291 $ hgph
290 6 0 n-B - 145e75495359
292 6 0 n-B - 145e75495359
291 5 0 n-A - d6bcb4f74035
293 5 0 n-A - d6bcb4f74035
292 4 0 b-A - f54f1bb90ff3
294 4 0 b-A - f54f1bb90ff3
293 3 0 a-D - b555f63b6063
295 3 0 a-D - b555f63b6063
294 2 0 a-C - 54acac6f23ab
296 2 0 a-C - 54acac6f23ab
295 1 0 a-B - 548a3d25dbf0
297 1 0 a-B - 548a3d25dbf0
296 0 0 a-A - 054250a37db4
298 0 0 a-A - 054250a37db4
297
299
298 (end insertion)
300 (end insertion)
299
301
300
302
301 initial setup
303 initial setup
302
304
303 $ hg glog # of alpha
305 $ hg glog # of alpha
304 o changeset: 6:145e75495359
306 o changeset: 6:145e75495359
305 | tag: tip
307 | tag: tip
306 | user: test
308 | user: test
307 | date: Thu Jan 01 00:00:00 1970 +0000
309 | date: Thu Jan 01 00:00:00 1970 +0000
308 | summary: n-B
310 | summary: n-B
309 |
311 |
310 o changeset: 5:d6bcb4f74035
312 o changeset: 5:d6bcb4f74035
311 | user: test
313 | user: test
312 | date: Thu Jan 01 00:00:00 1970 +0000
314 | date: Thu Jan 01 00:00:00 1970 +0000
313 | summary: n-A
315 | summary: n-A
314 |
316 |
315 o changeset: 4:f54f1bb90ff3
317 o changeset: 4:f54f1bb90ff3
316 | parent: 1:548a3d25dbf0
318 | parent: 1:548a3d25dbf0
317 | user: test
319 | user: test
318 | date: Thu Jan 01 00:00:00 1970 +0000
320 | date: Thu Jan 01 00:00:00 1970 +0000
319 | summary: b-A
321 | summary: b-A
320 |
322 |
321 | @ changeset: 3:b555f63b6063
323 | @ changeset: 3:b555f63b6063
322 | | user: test
324 | | user: test
323 | | date: Thu Jan 01 00:00:00 1970 +0000
325 | | date: Thu Jan 01 00:00:00 1970 +0000
324 | | summary: a-D
326 | | summary: a-D
325 | |
327 | |
326 | o changeset: 2:54acac6f23ab
328 | o changeset: 2:54acac6f23ab
327 |/ user: test
329 |/ user: test
328 | date: Thu Jan 01 00:00:00 1970 +0000
330 | date: Thu Jan 01 00:00:00 1970 +0000
329 | summary: a-C
331 | summary: a-C
330 |
332 |
331 o changeset: 1:548a3d25dbf0
333 o changeset: 1:548a3d25dbf0
332 | user: test
334 | user: test
333 | date: Thu Jan 01 00:00:00 1970 +0000
335 | date: Thu Jan 01 00:00:00 1970 +0000
334 | summary: a-B
336 | summary: a-B
335 |
337 |
336 o changeset: 0:054250a37db4
338 o changeset: 0:054250a37db4
337 user: test
339 user: test
338 date: Thu Jan 01 00:00:00 1970 +0000
340 date: Thu Jan 01 00:00:00 1970 +0000
339 summary: a-A
341 summary: a-A
340
342
341 $ mkcommit a-E
343 $ mkcommit a-E
342 $ mkcommit a-F
344 $ mkcommit a-F
343 $ mkcommit a-G
345 $ mkcommit a-G
344 $ hg up d6bcb4f74035 -q
346 $ hg up d6bcb4f74035 -q
345 $ mkcommit a-H
347 $ mkcommit a-H
346 created new head
348 created new head
347 $ hgph
349 $ hgph
348 10 1 a-H - 967b449fbc94
350 10 1 a-H - 967b449fbc94
349 9 1 a-G - 3e27b6f1eee1
351 9 1 a-G - 3e27b6f1eee1
350 8 1 a-F - b740e3e5c05d
352 8 1 a-F - b740e3e5c05d
351 7 1 a-E - e9f537e46dea
353 7 1 a-E - e9f537e46dea
352 6 0 n-B - 145e75495359
354 6 0 n-B - 145e75495359
353 5 0 n-A - d6bcb4f74035
355 5 0 n-A - d6bcb4f74035
354 4 0 b-A - f54f1bb90ff3
356 4 0 b-A - f54f1bb90ff3
355 3 0 a-D - b555f63b6063
357 3 0 a-D - b555f63b6063
356 2 0 a-C - 54acac6f23ab
358 2 0 a-C - 54acac6f23ab
357 1 0 a-B - 548a3d25dbf0
359 1 0 a-B - 548a3d25dbf0
358 0 0 a-A - 054250a37db4
360 0 0 a-A - 054250a37db4
359
361
360 Pushing to Publish=False (unknown changeset)
362 Pushing to Publish=False (unknown changeset)
361
363
362 $ hg push ../mu -r b740e3e5c05d # a-F
364 $ hg push ../mu -r b740e3e5c05d # a-F
363 pushing to ../mu
365 pushing to ../mu
364 searching for changes
366 searching for changes
365 adding changesets
367 adding changesets
366 adding manifests
368 adding manifests
367 adding file changes
369 adding file changes
368 added 2 changesets with 2 changes to 2 files
370 added 2 changesets with 2 changes to 2 files
369 $ hgph
371 $ hgph
370 10 1 a-H - 967b449fbc94
372 10 1 a-H - 967b449fbc94
371 9 1 a-G - 3e27b6f1eee1
373 9 1 a-G - 3e27b6f1eee1
372 8 1 a-F - b740e3e5c05d
374 8 1 a-F - b740e3e5c05d
373 7 1 a-E - e9f537e46dea
375 7 1 a-E - e9f537e46dea
374 6 0 n-B - 145e75495359
376 6 0 n-B - 145e75495359
375 5 0 n-A - d6bcb4f74035
377 5 0 n-A - d6bcb4f74035
376 4 0 b-A - f54f1bb90ff3
378 4 0 b-A - f54f1bb90ff3
377 3 0 a-D - b555f63b6063
379 3 0 a-D - b555f63b6063
378 2 0 a-C - 54acac6f23ab
380 2 0 a-C - 54acac6f23ab
379 1 0 a-B - 548a3d25dbf0
381 1 0 a-B - 548a3d25dbf0
380 0 0 a-A - 054250a37db4
382 0 0 a-A - 054250a37db4
381
383
382 $ cd ../mu
384 $ cd ../mu
383 $ hgph # d6bcb4f74035 and 145e75495359 changed because common is too smart
385 $ hgph # d6bcb4f74035 and 145e75495359 changed because common is too smart
384 8 1 a-F - b740e3e5c05d
386 8 1 a-F - b740e3e5c05d
385 7 1 a-E - e9f537e46dea
387 7 1 a-E - e9f537e46dea
386 6 0 n-B - 145e75495359
388 6 0 n-B - 145e75495359
387 5 0 n-A - d6bcb4f74035
389 5 0 n-A - d6bcb4f74035
388 4 0 a-D - b555f63b6063
390 4 0 a-D - b555f63b6063
389 3 0 a-C - 54acac6f23ab
391 3 0 a-C - 54acac6f23ab
390 2 0 b-A - f54f1bb90ff3
392 2 0 b-A - f54f1bb90ff3
391 1 0 a-B - 548a3d25dbf0
393 1 0 a-B - 548a3d25dbf0
392 0 0 a-A - 054250a37db4
394 0 0 a-A - 054250a37db4
393
395
394 Pushing to Publish=True (unknown changeset)
396 Pushing to Publish=True (unknown changeset)
395
397
396 $ hg push ../beta -r b740e3e5c05d
398 $ hg push ../beta -r b740e3e5c05d
397 pushing to ../beta
399 pushing to ../beta
398 searching for changes
400 searching for changes
399 adding changesets
401 adding changesets
400 adding manifests
402 adding manifests
401 adding file changes
403 adding file changes
402 added 2 changesets with 2 changes to 2 files
404 added 2 changesets with 2 changes to 2 files
403 $ hgph # again d6bcb4f74035 and 145e75495359 changed because common is too smart
405 $ hgph # again d6bcb4f74035 and 145e75495359 changed because common is too smart
404 8 0 a-F - b740e3e5c05d
406 8 0 a-F - b740e3e5c05d
405 7 0 a-E - e9f537e46dea
407 7 0 a-E - e9f537e46dea
406 6 0 n-B - 145e75495359
408 6 0 n-B - 145e75495359
407 5 0 n-A - d6bcb4f74035
409 5 0 n-A - d6bcb4f74035
408 4 0 a-D - b555f63b6063
410 4 0 a-D - b555f63b6063
409 3 0 a-C - 54acac6f23ab
411 3 0 a-C - 54acac6f23ab
410 2 0 b-A - f54f1bb90ff3
412 2 0 b-A - f54f1bb90ff3
411 1 0 a-B - 548a3d25dbf0
413 1 0 a-B - 548a3d25dbf0
412 0 0 a-A - 054250a37db4
414 0 0 a-A - 054250a37db4
413
415
414 Pushing to Publish=True (common changeset)
416 Pushing to Publish=True (common changeset)
415
417
416 $ cd ../beta
418 $ cd ../beta
417 $ hg push ../alpha
419 $ hg push ../alpha
418 pushing to ../alpha
420 pushing to ../alpha
419 searching for changes
421 searching for changes
420 no changes found
422 no changes found
421 $ hgph
423 $ hgph
422 6 0 a-F - b740e3e5c05d
424 6 0 a-F - b740e3e5c05d
423 5 0 a-E - e9f537e46dea
425 5 0 a-E - e9f537e46dea
424 4 0 a-D - b555f63b6063
426 4 0 a-D - b555f63b6063
425 3 0 a-C - 54acac6f23ab
427 3 0 a-C - 54acac6f23ab
426 2 0 b-A - f54f1bb90ff3
428 2 0 b-A - f54f1bb90ff3
427 1 0 a-B - 548a3d25dbf0
429 1 0 a-B - 548a3d25dbf0
428 0 0 a-A - 054250a37db4
430 0 0 a-A - 054250a37db4
429 $ cd ../alpha
431 $ cd ../alpha
430 $ hgph # e9f537e46dea and b740e3e5c05d should have been sync to 0
432 $ hgph # e9f537e46dea and b740e3e5c05d should have been sync to 0
431 10 1 a-H - 967b449fbc94
433 10 1 a-H - 967b449fbc94
432 9 1 a-G - 3e27b6f1eee1
434 9 1 a-G - 3e27b6f1eee1
433 8 0 a-F - b740e3e5c05d
435 8 0 a-F - b740e3e5c05d
434 7 0 a-E - e9f537e46dea
436 7 0 a-E - e9f537e46dea
435 6 0 n-B - 145e75495359
437 6 0 n-B - 145e75495359
436 5 0 n-A - d6bcb4f74035
438 5 0 n-A - d6bcb4f74035
437 4 0 b-A - f54f1bb90ff3
439 4 0 b-A - f54f1bb90ff3
438 3 0 a-D - b555f63b6063
440 3 0 a-D - b555f63b6063
439 2 0 a-C - 54acac6f23ab
441 2 0 a-C - 54acac6f23ab
440 1 0 a-B - 548a3d25dbf0
442 1 0 a-B - 548a3d25dbf0
441 0 0 a-A - 054250a37db4
443 0 0 a-A - 054250a37db4
442
444
443 Pushing to Publish=False (common changeset that change phase + unknown one)
445 Pushing to Publish=False (common changeset that change phase + unknown one)
444
446
445 $ hg push ../mu -r 967b449fbc94 -f
447 $ hg push ../mu -r 967b449fbc94 -f
446 pushing to ../mu
448 pushing to ../mu
447 searching for changes
449 searching for changes
448 adding changesets
450 adding changesets
449 adding manifests
451 adding manifests
450 adding file changes
452 adding file changes
451 added 1 changesets with 1 changes to 1 files (+1 heads)
453 added 1 changesets with 1 changes to 1 files (+1 heads)
452 $ hgph
454 $ hgph
453 10 1 a-H - 967b449fbc94
455 10 1 a-H - 967b449fbc94
454 9 1 a-G - 3e27b6f1eee1
456 9 1 a-G - 3e27b6f1eee1
455 8 0 a-F - b740e3e5c05d
457 8 0 a-F - b740e3e5c05d
456 7 0 a-E - e9f537e46dea
458 7 0 a-E - e9f537e46dea
457 6 0 n-B - 145e75495359
459 6 0 n-B - 145e75495359
458 5 0 n-A - d6bcb4f74035
460 5 0 n-A - d6bcb4f74035
459 4 0 b-A - f54f1bb90ff3
461 4 0 b-A - f54f1bb90ff3
460 3 0 a-D - b555f63b6063
462 3 0 a-D - b555f63b6063
461 2 0 a-C - 54acac6f23ab
463 2 0 a-C - 54acac6f23ab
462 1 0 a-B - 548a3d25dbf0
464 1 0 a-B - 548a3d25dbf0
463 0 0 a-A - 054250a37db4
465 0 0 a-A - 054250a37db4
464 $ cd ../mu
466 $ cd ../mu
465 $ hgph # d6bcb4f74035 should have changed phase
467 $ hgph # d6bcb4f74035 should have changed phase
466 > # again d6bcb4f74035 and 145e75495359 changed because common was too smart
468 > # again d6bcb4f74035 and 145e75495359 changed because common was too smart
467 9 1 a-H - 967b449fbc94
469 9 1 a-H - 967b449fbc94
468 8 0 a-F - b740e3e5c05d
470 8 0 a-F - b740e3e5c05d
469 7 0 a-E - e9f537e46dea
471 7 0 a-E - e9f537e46dea
470 6 0 n-B - 145e75495359
472 6 0 n-B - 145e75495359
471 5 0 n-A - d6bcb4f74035
473 5 0 n-A - d6bcb4f74035
472 4 0 a-D - b555f63b6063
474 4 0 a-D - b555f63b6063
473 3 0 a-C - 54acac6f23ab
475 3 0 a-C - 54acac6f23ab
474 2 0 b-A - f54f1bb90ff3
476 2 0 b-A - f54f1bb90ff3
475 1 0 a-B - 548a3d25dbf0
477 1 0 a-B - 548a3d25dbf0
476 0 0 a-A - 054250a37db4
478 0 0 a-A - 054250a37db4
477
479
478
480
479 Pushing to Publish=True (common changeset from publish=False)
481 Pushing to Publish=True (common changeset from publish=False)
480
482
483 (in mu)
481 $ hg push ../alpha
484 $ hg push ../alpha
482 pushing to ../alpha
485 pushing to ../alpha
483 searching for changes
486 searching for changes
484 no changes found
487 no changes found
485 $ hgph
488 $ hgph
486 9 0 a-H - 967b449fbc94
489 9 0 a-H - 967b449fbc94
487 8 0 a-F - b740e3e5c05d
490 8 0 a-F - b740e3e5c05d
488 7 0 a-E - e9f537e46dea
491 7 0 a-E - e9f537e46dea
489 6 0 n-B - 145e75495359
492 6 0 n-B - 145e75495359
490 5 0 n-A - d6bcb4f74035
493 5 0 n-A - d6bcb4f74035
491 4 0 a-D - b555f63b6063
494 4 0 a-D - b555f63b6063
492 3 0 a-C - 54acac6f23ab
495 3 0 a-C - 54acac6f23ab
493 2 0 b-A - f54f1bb90ff3
496 2 0 b-A - f54f1bb90ff3
494 1 0 a-B - 548a3d25dbf0
497 1 0 a-B - 548a3d25dbf0
495 0 0 a-A - 054250a37db4
498 0 0 a-A - 054250a37db4
496 $ hgph -R ../alpha # a-H should have been synced to 0
499 $ hgph -R ../alpha # a-H should have been synced to 0
497 10 0 a-H - 967b449fbc94
500 10 0 a-H - 967b449fbc94
498 9 1 a-G - 3e27b6f1eee1
501 9 1 a-G - 3e27b6f1eee1
499 8 0 a-F - b740e3e5c05d
502 8 0 a-F - b740e3e5c05d
500 7 0 a-E - e9f537e46dea
503 7 0 a-E - e9f537e46dea
501 6 0 n-B - 145e75495359
504 6 0 n-B - 145e75495359
502 5 0 n-A - d6bcb4f74035
505 5 0 n-A - d6bcb4f74035
503 4 0 b-A - f54f1bb90ff3
506 4 0 b-A - f54f1bb90ff3
504 3 0 a-D - b555f63b6063
507 3 0 a-D - b555f63b6063
505 2 0 a-C - 54acac6f23ab
508 2 0 a-C - 54acac6f23ab
506 1 0 a-B - 548a3d25dbf0
509 1 0 a-B - 548a3d25dbf0
507 0 0 a-A - 054250a37db4
510 0 0 a-A - 054250a37db4
508
511
512
513 Discovery locally secret changeset on a remote repository:
514
515 - should make it non-secret
516
517 $ cd ../alpha
518 $ mkcommit A-secret --config phases.new-commit=2
519 $ hgph
520 11 2 A-secret - 435b5d83910c
521 10 0 a-H - 967b449fbc94
522 9 1 a-G - 3e27b6f1eee1
523 8 0 a-F - b740e3e5c05d
524 7 0 a-E - e9f537e46dea
525 6 0 n-B - 145e75495359
526 5 0 n-A - d6bcb4f74035
527 4 0 b-A - f54f1bb90ff3
528 3 0 a-D - b555f63b6063
529 2 0 a-C - 54acac6f23ab
530 1 0 a-B - 548a3d25dbf0
531 0 0 a-A - 054250a37db4
532 $ hg bundle --base 'parents(.)' -r . ../secret-bundle.hg
533 1 changesets found
534 $ hg -R ../mu unbundle ../secret-bundle.hg
535 adding changesets
536 adding manifests
537 adding file changes
538 added 1 changesets with 1 changes to 1 files
539 (run 'hg update' to get a working copy)
540 $ hgph -R ../mu
541 10 1 A-secret - 435b5d83910c
542 9 0 a-H - 967b449fbc94
543 8 0 a-F - b740e3e5c05d
544 7 0 a-E - e9f537e46dea
545 6 0 n-B - 145e75495359
546 5 0 n-A - d6bcb4f74035
547 4 0 a-D - b555f63b6063
548 3 0 a-C - 54acac6f23ab
549 2 0 b-A - f54f1bb90ff3
550 1 0 a-B - 548a3d25dbf0
551 0 0 a-A - 054250a37db4
552 $ hg pull ../mu
553 pulling from ../mu
554 searching for changes
555 no changes found
556 $ hgph
557 11 1 A-secret - 435b5d83910c
558 10 0 a-H - 967b449fbc94
559 9 1 a-G - 3e27b6f1eee1
560 8 0 a-F - b740e3e5c05d
561 7 0 a-E - e9f537e46dea
562 6 0 n-B - 145e75495359
563 5 0 n-A - d6bcb4f74035
564 4 0 b-A - f54f1bb90ff3
565 3 0 a-D - b555f63b6063
566 2 0 a-C - 54acac6f23ab
567 1 0 a-B - 548a3d25dbf0
568 0 0 a-A - 054250a37db4
569
570
@@ -1,90 +1,136 b''
1 $ alias hglog='hg log --template "{rev} {phase} {desc}\n"'
1 $ alias hglog='hg log --template "{rev} {phase} {desc}\n"'
2 $ mkcommit() {
2 $ mkcommit() {
3 > echo "$1" > "$1"
3 > echo "$1" > "$1"
4 > hg add "$1"
4 > hg add "$1"
5 > message="$1"
5 > message="$1"
6 > shift
6 > shift
7 > hg ci -m "$message" $*
7 > hg ci -m "$message" $*
8 > }
8 > }
9
9
10 $ hg init initialrepo
10 $ hg init initialrepo
11 $ cd initialrepo
11 $ cd initialrepo
12 $ mkcommit A
12 $ mkcommit A
13
13
14 New commit are draft by default
14 New commit are draft by default
15
15
16 $ hglog
16 $ hglog
17 0 1 A
17 0 1 A
18
18
19 Following commit are draft too
19 Following commit are draft too
20
20
21 $ mkcommit B
21 $ mkcommit B
22
22
23 $ hglog
23 $ hglog
24 1 1 B
24 1 1 B
25 0 1 A
25 0 1 A
26
26
27 Draft commit are properly created over public one:
27 Draft commit are properly created over public one:
28
28
29 $ hg pull -q . # XXX use the dedicated phase command once available
29 $ hg pull -q . # XXX use the dedicated phase command once available
30 $ hglog
30 $ hglog
31 1 0 B
31 1 0 B
32 0 0 A
32 0 0 A
33
33
34 $ mkcommit C
34 $ mkcommit C
35 $ mkcommit D
35 $ mkcommit D
36
36
37 $ hglog
37 $ hglog
38 3 1 D
38 3 1 D
39 2 1 C
39 2 1 C
40 1 0 B
40 1 0 B
41 0 0 A
41 0 0 A
42
42
43 Test creating changeset as secret
43 Test creating changeset as secret
44
44
45 $ mkcommit E --config phases.new-commit=2
45 $ mkcommit E --config phases.new-commit=2
46 $ hglog
46 $ hglog
47 4 2 E
47 4 2 E
48 3 1 D
48 3 1 D
49 2 1 C
49 2 1 C
50 1 0 B
50 1 0 B
51 0 0 A
51 0 0 A
52
52
53 Test the secret property is inherited
53 Test the secret property is inherited
54
54
55 $ mkcommit H
55 $ mkcommit H
56 $ hglog
56 $ hglog
57 5 2 H
57 5 2 H
58 4 2 E
58 4 2 E
59 3 1 D
59 3 1 D
60 2 1 C
60 2 1 C
61 1 0 B
61 1 0 B
62 0 0 A
62 0 0 A
63
63
64 Even on merge
64 Even on merge
65
65
66 $ hg up -q 1
66 $ hg up -q 1
67 $ mkcommit "B'"
67 $ mkcommit "B'"
68 created new head
68 created new head
69 $ hglog
69 $ hglog
70 6 1 B'
70 6 1 B'
71 5 2 H
71 5 2 H
72 4 2 E
72 4 2 E
73 3 1 D
73 3 1 D
74 2 1 C
74 2 1 C
75 1 0 B
75 1 0 B
76 0 0 A
76 0 0 A
77 $ hg merge 4 # E
77 $ hg merge 4 # E
78 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
78 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
79 (branch merge, don't forget to commit)
79 (branch merge, don't forget to commit)
80 $ hg ci -m "merge B' and E"
80 $ hg ci -m "merge B' and E"
81 $ hglog
81 $ hglog
82 7 2 merge B' and E
82 7 2 merge B' and E
83 6 1 B'
83 6 1 B'
84 5 2 H
84 5 2 H
85 4 2 E
85 4 2 E
86 3 1 D
86 3 1 D
87 2 1 C
87 2 1 C
88 1 0 B
88 1 0 B
89 0 0 A
89 0 0 A
90
90
91 Test secret changeset are not pushed
92
93 $ hg init ../push-dest
94 $ hg push ../push-dest -f # force because we push multiple heads
95 pushing to ../push-dest
96 searching for changes
97 adding changesets
98 adding manifests
99 adding file changes
100 added 5 changesets with 5 changes to 5 files (+1 heads)
101 $ hglog
102 7 2 merge B' and E
103 6 0 B'
104 5 2 H
105 4 2 E
106 3 0 D
107 2 0 C
108 1 0 B
109 0 0 A
110 $ cd ../push-dest
111 $ hglog
112 4 0 B'
113 3 0 D
114 2 0 C
115 1 0 B
116 0 0 A
117 $ cd ..
118
119 Test secret changeset are not pull
120
121 $ hg init pull-dest
122 $ cd pull-dest
123 $ hg pull ../initialrepo
124 pulling from ../initialrepo
125 requesting all changes
126 adding changesets
127 adding manifests
128 adding file changes
129 added 5 changesets with 5 changes to 5 files (+1 heads)
130 (run 'hg heads' to see heads, 'hg merge' to merge)
131 $ hglog
132 4 0 B'
133 3 0 D
134 2 0 C
135 1 0 B
136 0 0 A
General Comments 0
You need to be logged in to leave comments. Login now