Show More
@@ -1,352 +1,348 b'' | |||||
1 | # discovery.py - protocol changeset discovery functions |
|
1 | # discovery.py - protocol changeset discovery functions | |
2 | # |
|
2 | # | |
3 | # Copyright 2010 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2010 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from node import nullid, short |
|
8 | from node import nullid, short | |
9 | from i18n import _ |
|
9 | from i18n import _ | |
10 | import util, error |
|
10 | import util, error | |
11 |
|
11 | |||
12 | def findincoming(repo, remote, base=None, heads=None, force=False): |
|
12 | def findincoming(repo, remote, base=None, heads=None, force=False): | |
13 | """Return list of roots of the subsets of missing nodes from remote |
|
13 | """Return list of roots of the subsets of missing nodes from remote | |
14 |
|
14 | |||
15 | If base dict is specified, assume that these nodes and their parents |
|
15 | If base dict is specified, assume that these nodes and their parents | |
16 | exist on the remote side and that no child of a node of base exists |
|
16 | exist on the remote side and that no child of a node of base exists | |
17 | in both remote and repo. |
|
17 | in both remote and repo. | |
18 | Furthermore base will be updated to include the nodes that exists |
|
18 | Furthermore base will be updated to include the nodes that exists | |
19 | in repo and remote but no children exists in repo and remote. |
|
19 | in repo and remote but no children exists in repo and remote. | |
20 | If a list of heads is specified, return only nodes which are heads |
|
20 | If a list of heads is specified, return only nodes which are heads | |
21 | or ancestors of these heads. |
|
21 | or ancestors of these heads. | |
22 |
|
22 | |||
23 | All the ancestors of base are in repo and in remote. |
|
23 | All the ancestors of base are in repo and in remote. | |
24 | All the descendants of the list returned are missing in repo. |
|
24 | All the descendants of the list returned are missing in repo. | |
25 | (and so we know that the rest of the nodes are missing in remote, see |
|
25 | (and so we know that the rest of the nodes are missing in remote, see | |
26 | outgoing) |
|
26 | outgoing) | |
27 | """ |
|
27 | """ | |
28 | return findcommonincoming(repo, remote, base, heads, force)[1] |
|
28 | return findcommonincoming(repo, remote, base, heads, force)[1] | |
29 |
|
29 | |||
30 | def findcommonincoming(repo, remote, base=None, heads=None, force=False): |
|
30 | def findcommonincoming(repo, remote, base=None, heads=None, force=False): | |
31 | """Return a tuple (common, missing roots, heads) used to identify |
|
31 | """Return a tuple (common, missing roots, heads) used to identify | |
32 | missing nodes from remote. |
|
32 | missing nodes from remote. | |
33 |
|
33 | |||
34 | If base dict is specified, assume that these nodes and their parents |
|
34 | If base dict is specified, assume that these nodes and their parents | |
35 | exist on the remote side and that no child of a node of base exists |
|
35 | exist on the remote side and that no child of a node of base exists | |
36 | in both remote and repo. |
|
36 | in both remote and repo. | |
37 | Furthermore base will be updated to include the nodes that exists |
|
37 | Furthermore base will be updated to include the nodes that exists | |
38 | in repo and remote but no children exists in both repo and remote. |
|
38 | in repo and remote but no children exists in both repo and remote. | |
39 | In other words, base is the set of heads of the DAG resulting from |
|
39 | In other words, base is the set of heads of the DAG resulting from | |
40 | the intersection of the nodes from repo and remote. |
|
40 | the intersection of the nodes from repo and remote. | |
41 | If a list of heads is specified, return only nodes which are heads |
|
41 | If a list of heads is specified, return only nodes which are heads | |
42 | or ancestors of these heads. |
|
42 | or ancestors of these heads. | |
43 |
|
43 | |||
44 | All the ancestors of base are in repo and in remote. |
|
44 | All the ancestors of base are in repo and in remote. | |
45 | """ |
|
45 | """ | |
46 | m = repo.changelog.nodemap |
|
46 | m = repo.changelog.nodemap | |
47 | search = [] |
|
47 | search = [] | |
48 | fetch = set() |
|
48 | fetch = set() | |
49 | seen = set() |
|
49 | seen = set() | |
50 | seenbranch = set() |
|
50 | seenbranch = set() | |
51 | if base is None: |
|
51 | if base is None: | |
52 | base = {} |
|
52 | base = {} | |
53 |
|
53 | |||
54 | if not heads: |
|
54 | if not heads: | |
55 | heads = remote.heads() |
|
55 | heads = remote.heads() | |
56 |
|
56 | |||
57 | if repo.changelog.tip() == nullid: |
|
57 | if repo.changelog.tip() == nullid: | |
58 | base[nullid] = 1 |
|
58 | base[nullid] = 1 | |
59 | if heads != [nullid]: |
|
59 | if heads != [nullid]: | |
60 | return [nullid], [nullid], list(heads) |
|
60 | return [nullid], [nullid], list(heads) | |
61 | return [nullid], [], [] |
|
61 | return [nullid], [], [] | |
62 |
|
62 | |||
63 | # assume we're closer to the tip than the root |
|
63 | # assume we're closer to the tip than the root | |
64 | # and start by examining the heads |
|
64 | # and start by examining the heads | |
65 | repo.ui.status(_("searching for changes\n")) |
|
65 | repo.ui.status(_("searching for changes\n")) | |
66 |
|
66 | |||
67 | unknown = [] |
|
67 | unknown = [] | |
68 | for h in heads: |
|
68 | for h in heads: | |
69 | if h not in m: |
|
69 | if h not in m: | |
70 | unknown.append(h) |
|
70 | unknown.append(h) | |
71 | else: |
|
71 | else: | |
72 | base[h] = 1 |
|
72 | base[h] = 1 | |
73 |
|
73 | |||
74 | heads = unknown |
|
74 | heads = unknown | |
75 | if not unknown: |
|
75 | if not unknown: | |
76 | return base.keys(), [], [] |
|
76 | return base.keys(), [], [] | |
77 |
|
77 | |||
78 | req = set(unknown) |
|
78 | req = set(unknown) | |
79 | reqcnt = 0 |
|
79 | reqcnt = 0 | |
80 |
|
80 | |||
81 | # search through remote branches |
|
81 | # search through remote branches | |
82 | # a 'branch' here is a linear segment of history, with four parts: |
|
82 | # a 'branch' here is a linear segment of history, with four parts: | |
83 | # head, root, first parent, second parent |
|
83 | # head, root, first parent, second parent | |
84 | # (a branch always has two parents (or none) by definition) |
|
84 | # (a branch always has two parents (or none) by definition) | |
85 | unknown = remote.branches(unknown) |
|
85 | unknown = remote.branches(unknown) | |
86 | while unknown: |
|
86 | while unknown: | |
87 | r = [] |
|
87 | r = [] | |
88 | while unknown: |
|
88 | while unknown: | |
89 | n = unknown.pop(0) |
|
89 | n = unknown.pop(0) | |
90 | if n[0] in seen: |
|
90 | if n[0] in seen: | |
91 | continue |
|
91 | continue | |
92 |
|
92 | |||
93 | repo.ui.debug("examining %s:%s\n" |
|
93 | repo.ui.debug("examining %s:%s\n" | |
94 | % (short(n[0]), short(n[1]))) |
|
94 | % (short(n[0]), short(n[1]))) | |
95 | if n[0] == nullid: # found the end of the branch |
|
95 | if n[0] == nullid: # found the end of the branch | |
96 | pass |
|
96 | pass | |
97 | elif n in seenbranch: |
|
97 | elif n in seenbranch: | |
98 | repo.ui.debug("branch already found\n") |
|
98 | repo.ui.debug("branch already found\n") | |
99 | continue |
|
99 | continue | |
100 | elif n[1] and n[1] in m: # do we know the base? |
|
100 | elif n[1] and n[1] in m: # do we know the base? | |
101 | repo.ui.debug("found incomplete branch %s:%s\n" |
|
101 | repo.ui.debug("found incomplete branch %s:%s\n" | |
102 | % (short(n[0]), short(n[1]))) |
|
102 | % (short(n[0]), short(n[1]))) | |
103 | search.append(n[0:2]) # schedule branch range for scanning |
|
103 | search.append(n[0:2]) # schedule branch range for scanning | |
104 | seenbranch.add(n) |
|
104 | seenbranch.add(n) | |
105 | else: |
|
105 | else: | |
106 | if n[1] not in seen and n[1] not in fetch: |
|
106 | if n[1] not in seen and n[1] not in fetch: | |
107 | if n[2] in m and n[3] in m: |
|
107 | if n[2] in m and n[3] in m: | |
108 | repo.ui.debug("found new changeset %s\n" % |
|
108 | repo.ui.debug("found new changeset %s\n" % | |
109 | short(n[1])) |
|
109 | short(n[1])) | |
110 | fetch.add(n[1]) # earliest unknown |
|
110 | fetch.add(n[1]) # earliest unknown | |
111 | for p in n[2:4]: |
|
111 | for p in n[2:4]: | |
112 | if p in m: |
|
112 | if p in m: | |
113 | base[p] = 1 # latest known |
|
113 | base[p] = 1 # latest known | |
114 |
|
114 | |||
115 | for p in n[2:4]: |
|
115 | for p in n[2:4]: | |
116 | if p not in req and p not in m: |
|
116 | if p not in req and p not in m: | |
117 | r.append(p) |
|
117 | r.append(p) | |
118 | req.add(p) |
|
118 | req.add(p) | |
119 | seen.add(n[0]) |
|
119 | seen.add(n[0]) | |
120 |
|
120 | |||
121 | if r: |
|
121 | if r: | |
122 | reqcnt += 1 |
|
122 | reqcnt += 1 | |
123 | repo.ui.progress(_('searching'), reqcnt, unit=_('queries')) |
|
123 | repo.ui.progress(_('searching'), reqcnt, unit=_('queries')) | |
124 | repo.ui.debug("request %d: %s\n" % |
|
124 | repo.ui.debug("request %d: %s\n" % | |
125 | (reqcnt, " ".join(map(short, r)))) |
|
125 | (reqcnt, " ".join(map(short, r)))) | |
126 | for p in xrange(0, len(r), 10): |
|
126 | for p in xrange(0, len(r), 10): | |
127 | for b in remote.branches(r[p:p + 10]): |
|
127 | for b in remote.branches(r[p:p + 10]): | |
128 | repo.ui.debug("received %s:%s\n" % |
|
128 | repo.ui.debug("received %s:%s\n" % | |
129 | (short(b[0]), short(b[1]))) |
|
129 | (short(b[0]), short(b[1]))) | |
130 | unknown.append(b) |
|
130 | unknown.append(b) | |
131 |
|
131 | |||
132 | # do binary search on the branches we found |
|
132 | # do binary search on the branches we found | |
133 | while search: |
|
133 | while search: | |
134 | newsearch = [] |
|
134 | newsearch = [] | |
135 | reqcnt += 1 |
|
135 | reqcnt += 1 | |
136 | repo.ui.progress(_('searching'), reqcnt, unit=_('queries')) |
|
136 | repo.ui.progress(_('searching'), reqcnt, unit=_('queries')) | |
137 | for n, l in zip(search, remote.between(search)): |
|
137 | for n, l in zip(search, remote.between(search)): | |
138 | l.append(n[1]) |
|
138 | l.append(n[1]) | |
139 | p = n[0] |
|
139 | p = n[0] | |
140 | f = 1 |
|
140 | f = 1 | |
141 | for i in l: |
|
141 | for i in l: | |
142 | repo.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i))) |
|
142 | repo.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i))) | |
143 | if i in m: |
|
143 | if i in m: | |
144 | if f <= 2: |
|
144 | if f <= 2: | |
145 | repo.ui.debug("found new branch changeset %s\n" % |
|
145 | repo.ui.debug("found new branch changeset %s\n" % | |
146 | short(p)) |
|
146 | short(p)) | |
147 | fetch.add(p) |
|
147 | fetch.add(p) | |
148 | base[i] = 1 |
|
148 | base[i] = 1 | |
149 | else: |
|
149 | else: | |
150 | repo.ui.debug("narrowed branch search to %s:%s\n" |
|
150 | repo.ui.debug("narrowed branch search to %s:%s\n" | |
151 | % (short(p), short(i))) |
|
151 | % (short(p), short(i))) | |
152 | newsearch.append((p, i)) |
|
152 | newsearch.append((p, i)) | |
153 | break |
|
153 | break | |
154 | p, f = i, f * 2 |
|
154 | p, f = i, f * 2 | |
155 | search = newsearch |
|
155 | search = newsearch | |
156 |
|
156 | |||
157 | # sanity check our fetch list |
|
157 | # sanity check our fetch list | |
158 | for f in fetch: |
|
158 | for f in fetch: | |
159 | if f in m: |
|
159 | if f in m: | |
160 | raise error.RepoError(_("already have changeset ") |
|
160 | raise error.RepoError(_("already have changeset ") | |
161 | + short(f[:4])) |
|
161 | + short(f[:4])) | |
162 |
|
162 | |||
163 | if base.keys() == [nullid]: |
|
163 | if base.keys() == [nullid]: | |
164 | if force: |
|
164 | if force: | |
165 | repo.ui.warn(_("warning: repository is unrelated\n")) |
|
165 | repo.ui.warn(_("warning: repository is unrelated\n")) | |
166 | else: |
|
166 | else: | |
167 | raise util.Abort(_("repository is unrelated")) |
|
167 | raise util.Abort(_("repository is unrelated")) | |
168 |
|
168 | |||
169 | repo.ui.debug("found new changesets starting at " + |
|
169 | repo.ui.debug("found new changesets starting at " + | |
170 | " ".join([short(f) for f in fetch]) + "\n") |
|
170 | " ".join([short(f) for f in fetch]) + "\n") | |
171 |
|
171 | |||
172 | repo.ui.progress(_('searching'), None) |
|
172 | repo.ui.progress(_('searching'), None) | |
173 | repo.ui.debug("%d total queries\n" % reqcnt) |
|
173 | repo.ui.debug("%d total queries\n" % reqcnt) | |
174 |
|
174 | |||
175 | return base.keys(), list(fetch), heads |
|
175 | return base.keys(), list(fetch), heads | |
176 |
|
176 | |||
177 | def findoutgoing(repo, remote, base=None, heads=None, force=False): |
|
177 | def findoutgoing(repo, remote, base=None, heads=None, force=False): | |
178 | """Return list of nodes that are roots of subsets not in remote |
|
178 | """Return list of nodes that are roots of subsets not in remote | |
179 |
|
179 | |||
180 | If base dict is specified, assume that these nodes and their parents |
|
180 | If base dict is specified, assume that these nodes and their parents | |
181 | exist on the remote side. |
|
181 | exist on the remote side. | |
182 | If a list of heads is specified, return only nodes which are heads |
|
182 | If a list of heads is specified, return only nodes which are heads | |
183 | or ancestors of these heads, and return a second element which |
|
183 | or ancestors of these heads, and return a second element which | |
184 | contains all remote heads which get new children. |
|
184 | contains all remote heads which get new children. | |
185 | """ |
|
185 | """ | |
186 | if base is None: |
|
186 | if base is None: | |
187 | base = {} |
|
187 | base = {} | |
188 | findincoming(repo, remote, base, heads, force=force) |
|
188 | findincoming(repo, remote, base, heads, force=force) | |
189 |
|
189 | |||
190 | repo.ui.debug("common changesets up to " |
|
190 | repo.ui.debug("common changesets up to " | |
191 | + " ".join(map(short, base.keys())) + "\n") |
|
191 | + " ".join(map(short, base.keys())) + "\n") | |
192 |
|
192 | |||
193 | remain = set(repo.changelog.nodemap) |
|
193 | remain = set(repo.changelog.nodemap) | |
194 |
|
194 | |||
195 | # prune everything remote has from the tree |
|
195 | # prune everything remote has from the tree | |
196 | remain.remove(nullid) |
|
196 | remain.remove(nullid) | |
197 | remove = base.keys() |
|
197 | remove = base.keys() | |
198 | while remove: |
|
198 | while remove: | |
199 | n = remove.pop(0) |
|
199 | n = remove.pop(0) | |
200 | if n in remain: |
|
200 | if n in remain: | |
201 | remain.remove(n) |
|
201 | remain.remove(n) | |
202 | for p in repo.changelog.parents(n): |
|
202 | for p in repo.changelog.parents(n): | |
203 | remove.append(p) |
|
203 | remove.append(p) | |
204 |
|
204 | |||
205 | # find every node whose parents have been pruned |
|
205 | # find every node whose parents have been pruned | |
206 | subset = [] |
|
206 | subset = [] | |
207 | # find every remote head that will get new children |
|
207 | # find every remote head that will get new children | |
208 | updated_heads = set() |
|
208 | updated_heads = set() | |
209 | for n in remain: |
|
209 | for n in remain: | |
210 | p1, p2 = repo.changelog.parents(n) |
|
210 | p1, p2 = repo.changelog.parents(n) | |
211 | if p1 not in remain and p2 not in remain: |
|
211 | if p1 not in remain and p2 not in remain: | |
212 | subset.append(n) |
|
212 | subset.append(n) | |
213 | if heads: |
|
213 | if heads: | |
214 | if p1 in heads: |
|
214 | if p1 in heads: | |
215 | updated_heads.add(p1) |
|
215 | updated_heads.add(p1) | |
216 | if p2 in heads: |
|
216 | if p2 in heads: | |
217 | updated_heads.add(p2) |
|
217 | updated_heads.add(p2) | |
218 |
|
218 | |||
219 | # this is the set of all roots we have to push |
|
219 | # this is the set of all roots we have to push | |
220 | if heads: |
|
220 | if heads: | |
221 | return subset, list(updated_heads) |
|
221 | return subset, list(updated_heads) | |
222 | else: |
|
222 | else: | |
223 | return subset |
|
223 | return subset | |
224 |
|
224 | |||
225 | def prepush(repo, remote, force, revs, newbranch): |
|
225 | def prepush(repo, remote, force, revs, newbranch): | |
226 | '''Analyze the local and remote repositories and determine which |
|
226 | '''Analyze the local and remote repositories and determine which | |
227 | changesets need to be pushed to the remote. Return value depends |
|
227 | changesets need to be pushed to the remote. Return value depends | |
228 | on circumstances: |
|
228 | on circumstances: | |
229 |
|
229 | |||
230 | If we are not going to push anything, return a tuple (None, |
|
230 | If we are not going to push anything, return a tuple (None, | |
231 | outgoing) where outgoing is 0 if there are no outgoing |
|
231 | outgoing) where outgoing is 0 if there are no outgoing | |
232 | changesets and 1 if there are, but we refuse to push them |
|
232 | changesets and 1 if there are, but we refuse to push them | |
233 | (e.g. would create new remote heads). |
|
233 | (e.g. would create new remote heads). | |
234 |
|
234 | |||
235 | Otherwise, return a tuple (changegroup, remoteheads), where |
|
235 | Otherwise, return a tuple (changegroup, remoteheads), where | |
236 | changegroup is a readable file-like object whose read() returns |
|
236 | changegroup is a readable file-like object whose read() returns | |
237 | successive changegroup chunks ready to be sent over the wire and |
|
237 | successive changegroup chunks ready to be sent over the wire and | |
238 | remoteheads is the list of remote heads.''' |
|
238 | remoteheads is the list of remote heads.''' | |
239 | common = {} |
|
239 | common = {} | |
240 | remote_heads = remote.heads() |
|
240 | remote_heads = remote.heads() | |
241 | inc = findincoming(repo, remote, common, remote_heads, force=force) |
|
241 | inc = findincoming(repo, remote, common, remote_heads, force=force) | |
242 |
|
242 | |||
243 | cl = repo.changelog |
|
243 | cl = repo.changelog | |
244 | update, updated_heads = findoutgoing(repo, remote, common, remote_heads) |
|
244 | update, updated_heads = findoutgoing(repo, remote, common, remote_heads) | |
245 | outg, bases, heads = cl.nodesbetween(update, revs) |
|
245 | outg, bases, heads = cl.nodesbetween(update, revs) | |
246 |
|
246 | |||
247 | if not bases: |
|
247 | if not bases: | |
248 | repo.ui.status(_("no changes found\n")) |
|
248 | repo.ui.status(_("no changes found\n")) | |
249 | return None, 1 |
|
249 | return None, 1 | |
250 |
|
250 | |||
251 | if not force and remote_heads != [nullid]: |
|
251 | if not force and remote_heads != [nullid]: | |
252 |
|
252 | |||
253 | def fail_multiple_heads(unsynced, branch=None): |
|
253 | def fail_multiple_heads(unsynced, branch=None): | |
254 | if branch: |
|
254 | if branch: | |
255 |
msg = _(" |
|
255 | msg = _("push creates new remote heads " | |
256 |
" |
|
256 | "on branch '%s'!") % branch | |
257 | else: |
|
257 | else: | |
258 |
msg = _(" |
|
258 | msg = _("push creates new remote heads!") | |
259 | repo.ui.warn(msg) |
|
259 | ||
260 | if unsynced: |
|
260 | if unsynced: | |
261 |
|
|
261 | hint = _("you should pull and merge or use push -f to force") | |
262 | " use push -f to force)\n")) |
|
|||
263 | else: |
|
262 | else: | |
264 |
|
|
263 | hint = _("did you forget to merge? use push -f to force") | |
265 | " use push -f to force)\n")) |
|
264 | raise util.Abort(msg, hint=hint) | |
266 | return None, 0 |
|
|||
267 |
|
265 | |||
268 | if remote.capable('branchmap'): |
|
266 | if remote.capable('branchmap'): | |
269 | # Check for each named branch if we're creating new remote heads. |
|
267 | # Check for each named branch if we're creating new remote heads. | |
270 | # To be a remote head after push, node must be either: |
|
268 | # To be a remote head after push, node must be either: | |
271 | # - unknown locally |
|
269 | # - unknown locally | |
272 | # - a local outgoing head descended from update |
|
270 | # - a local outgoing head descended from update | |
273 | # - a remote head that's known locally and not |
|
271 | # - a remote head that's known locally and not | |
274 | # ancestral to an outgoing head |
|
272 | # ancestral to an outgoing head | |
275 | # |
|
273 | # | |
276 | # New named branches cannot be created without --force. |
|
274 | # New named branches cannot be created without --force. | |
277 |
|
275 | |||
278 | # 1. Create set of branches involved in the push. |
|
276 | # 1. Create set of branches involved in the push. | |
279 | branches = set(repo[n].branch() for n in outg) |
|
277 | branches = set(repo[n].branch() for n in outg) | |
280 |
|
278 | |||
281 | # 2. Check for new branches on the remote. |
|
279 | # 2. Check for new branches on the remote. | |
282 | remotemap = remote.branchmap() |
|
280 | remotemap = remote.branchmap() | |
283 | newbranches = branches - set(remotemap) |
|
281 | newbranches = branches - set(remotemap) | |
284 | if newbranches and not newbranch: # new branch requires --new-branch |
|
282 | if newbranches and not newbranch: # new branch requires --new-branch | |
285 | branchnames = ', '.join(sorted(newbranches)) |
|
283 | branchnames = ', '.join(sorted(newbranches)) | |
286 | repo.ui.warn(_("abort: push creates " |
|
284 | raise util.Abort(_("push creates new remote branches: %s!") | |
287 |
|
|
285 | % branchnames, | |
288 |
|
|
286 | hint=_("use 'hg push --new-branch' to create" | |
289 | repo.ui.status(_("(use 'hg push --new-branch' to create new " |
|
287 | " new remote branches")) | |
290 | "remote branches)\n")) |
|
|||
291 | return None, 0 |
|
|||
292 | branches.difference_update(newbranches) |
|
288 | branches.difference_update(newbranches) | |
293 |
|
289 | |||
294 | # 3. Construct the initial oldmap and newmap dicts. |
|
290 | # 3. Construct the initial oldmap and newmap dicts. | |
295 | # They contain information about the remote heads before and |
|
291 | # They contain information about the remote heads before and | |
296 | # after the push, respectively. |
|
292 | # after the push, respectively. | |
297 | # Heads not found locally are not included in either dict, |
|
293 | # Heads not found locally are not included in either dict, | |
298 | # since they won't be affected by the push. |
|
294 | # since they won't be affected by the push. | |
299 | # unsynced contains all branches with incoming changesets. |
|
295 | # unsynced contains all branches with incoming changesets. | |
300 | oldmap = {} |
|
296 | oldmap = {} | |
301 | newmap = {} |
|
297 | newmap = {} | |
302 | unsynced = set() |
|
298 | unsynced = set() | |
303 | for branch in branches: |
|
299 | for branch in branches: | |
304 | remoteheads = remotemap[branch] |
|
300 | remoteheads = remotemap[branch] | |
305 | prunedheads = [h for h in remoteheads if h in cl.nodemap] |
|
301 | prunedheads = [h for h in remoteheads if h in cl.nodemap] | |
306 | oldmap[branch] = prunedheads |
|
302 | oldmap[branch] = prunedheads | |
307 | newmap[branch] = list(prunedheads) |
|
303 | newmap[branch] = list(prunedheads) | |
308 | if len(remoteheads) > len(prunedheads): |
|
304 | if len(remoteheads) > len(prunedheads): | |
309 | unsynced.add(branch) |
|
305 | unsynced.add(branch) | |
310 |
|
306 | |||
311 | # 4. Update newmap with outgoing changes. |
|
307 | # 4. Update newmap with outgoing changes. | |
312 | # This will possibly add new heads and remove existing ones. |
|
308 | # This will possibly add new heads and remove existing ones. | |
313 | ctxgen = (repo[n] for n in outg) |
|
309 | ctxgen = (repo[n] for n in outg) | |
314 | repo._updatebranchcache(newmap, ctxgen) |
|
310 | repo._updatebranchcache(newmap, ctxgen) | |
315 |
|
311 | |||
316 | # 5. Check for new heads. |
|
312 | # 5. Check for new heads. | |
317 | # If there are more heads after the push than before, a suitable |
|
313 | # If there are more heads after the push than before, a suitable | |
318 | # warning, depending on unsynced status, is displayed. |
|
314 | # warning, depending on unsynced status, is displayed. | |
319 | for branch in branches: |
|
315 | for branch in branches: | |
320 | if len(newmap[branch]) > len(oldmap[branch]): |
|
316 | if len(newmap[branch]) > len(oldmap[branch]): | |
321 | return fail_multiple_heads(branch in unsynced, branch) |
|
317 | return fail_multiple_heads(branch in unsynced, branch) | |
322 |
|
318 | |||
323 | # 6. Check for unsynced changes on involved branches. |
|
319 | # 6. Check for unsynced changes on involved branches. | |
324 | if unsynced: |
|
320 | if unsynced: | |
325 | repo.ui.warn(_("note: unsynced remote changes!\n")) |
|
321 | repo.ui.warn(_("note: unsynced remote changes!\n")) | |
326 |
|
322 | |||
327 | else: |
|
323 | else: | |
328 | # Old servers: Check for new topological heads. |
|
324 | # Old servers: Check for new topological heads. | |
329 | # Code based on _updatebranchcache. |
|
325 | # Code based on _updatebranchcache. | |
330 | newheads = set(h for h in remote_heads if h in cl.nodemap) |
|
326 | newheads = set(h for h in remote_heads if h in cl.nodemap) | |
331 | oldheadcnt = len(newheads) |
|
327 | oldheadcnt = len(newheads) | |
332 | newheads.update(outg) |
|
328 | newheads.update(outg) | |
333 | if len(newheads) > 1: |
|
329 | if len(newheads) > 1: | |
334 | for latest in reversed(outg): |
|
330 | for latest in reversed(outg): | |
335 | if latest not in newheads: |
|
331 | if latest not in newheads: | |
336 | continue |
|
332 | continue | |
337 | minhrev = min(cl.rev(h) for h in newheads) |
|
333 | minhrev = min(cl.rev(h) for h in newheads) | |
338 | reachable = cl.reachable(latest, cl.node(minhrev)) |
|
334 | reachable = cl.reachable(latest, cl.node(minhrev)) | |
339 | reachable.remove(latest) |
|
335 | reachable.remove(latest) | |
340 | newheads.difference_update(reachable) |
|
336 | newheads.difference_update(reachable) | |
341 | if len(newheads) > oldheadcnt: |
|
337 | if len(newheads) > oldheadcnt: | |
342 | return fail_multiple_heads(inc) |
|
338 | return fail_multiple_heads(inc) | |
343 | if inc: |
|
339 | if inc: | |
344 | repo.ui.warn(_("note: unsynced remote changes!\n")) |
|
340 | repo.ui.warn(_("note: unsynced remote changes!\n")) | |
345 |
|
341 | |||
346 | if revs is None: |
|
342 | if revs is None: | |
347 | # use the fast path, no race possible on push |
|
343 | # use the fast path, no race possible on push | |
348 | nodes = repo.changelog.findmissing(common.keys()) |
|
344 | nodes = repo.changelog.findmissing(common.keys()) | |
349 | cg = repo._changegroup(nodes, 'push') |
|
345 | cg = repo._changegroup(nodes, 'push') | |
350 | else: |
|
346 | else: | |
351 | cg = repo.changegroupsubset(update, revs, 'push') |
|
347 | cg = repo.changegroupsubset(update, revs, 'push') | |
352 | return cg, remote_heads |
|
348 | return cg, remote_heads |
@@ -1,546 +1,550 b'' | |||||
1 | # dispatch.py - command dispatching for mercurial |
|
1 | # dispatch.py - command dispatching for mercurial | |
2 | # |
|
2 | # | |
3 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from i18n import _ |
|
8 | from i18n import _ | |
9 | import os, sys, atexit, signal, pdb, socket, errno, shlex, time, traceback |
|
9 | import os, sys, atexit, signal, pdb, socket, errno, shlex, time, traceback | |
10 | import util, commands, hg, fancyopts, extensions, hook, error |
|
10 | import util, commands, hg, fancyopts, extensions, hook, error | |
11 | import cmdutil, encoding |
|
11 | import cmdutil, encoding | |
12 | import ui as uimod |
|
12 | import ui as uimod | |
13 |
|
13 | |||
14 | def run(): |
|
14 | def run(): | |
15 | "run the command in sys.argv" |
|
15 | "run the command in sys.argv" | |
16 | sys.exit(dispatch(sys.argv[1:])) |
|
16 | sys.exit(dispatch(sys.argv[1:])) | |
17 |
|
17 | |||
18 | def dispatch(args): |
|
18 | def dispatch(args): | |
19 | "run the command specified in args" |
|
19 | "run the command specified in args" | |
20 | try: |
|
20 | try: | |
21 | u = uimod.ui() |
|
21 | u = uimod.ui() | |
22 | if '--traceback' in args: |
|
22 | if '--traceback' in args: | |
23 | u.setconfig('ui', 'traceback', 'on') |
|
23 | u.setconfig('ui', 'traceback', 'on') | |
24 | except util.Abort, inst: |
|
24 | except util.Abort, inst: | |
25 | sys.stderr.write(_("abort: %s\n") % inst) |
|
25 | sys.stderr.write(_("abort: %s\n") % inst) | |
|
26 | if inst.hint: | |||
|
27 | sys.stdout.write(_("(%s)\n") % inst.hint) | |||
26 | return -1 |
|
28 | return -1 | |
27 | except error.ParseError, inst: |
|
29 | except error.ParseError, inst: | |
28 | if len(inst.args) > 1: |
|
30 | if len(inst.args) > 1: | |
29 | sys.stderr.write(_("hg: parse error at %s: %s\n") % |
|
31 | sys.stderr.write(_("hg: parse error at %s: %s\n") % | |
30 | (inst.args[1], inst.args[0])) |
|
32 | (inst.args[1], inst.args[0])) | |
31 | else: |
|
33 | else: | |
32 | sys.stderr.write(_("hg: parse error: %s\n") % inst.args[0]) |
|
34 | sys.stderr.write(_("hg: parse error: %s\n") % inst.args[0]) | |
33 | return -1 |
|
35 | return -1 | |
34 | return _runcatch(u, args) |
|
36 | return _runcatch(u, args) | |
35 |
|
37 | |||
36 | def _runcatch(ui, args): |
|
38 | def _runcatch(ui, args): | |
37 | def catchterm(*args): |
|
39 | def catchterm(*args): | |
38 | raise error.SignalInterrupt |
|
40 | raise error.SignalInterrupt | |
39 |
|
41 | |||
40 | try: |
|
42 | try: | |
41 | for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM': |
|
43 | for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM': | |
42 | num = getattr(signal, name, None) |
|
44 | num = getattr(signal, name, None) | |
43 | if num: |
|
45 | if num: | |
44 | signal.signal(num, catchterm) |
|
46 | signal.signal(num, catchterm) | |
45 | except ValueError: |
|
47 | except ValueError: | |
46 | pass # happens if called in a thread |
|
48 | pass # happens if called in a thread | |
47 |
|
49 | |||
48 | try: |
|
50 | try: | |
49 | try: |
|
51 | try: | |
50 | # enter the debugger before command execution |
|
52 | # enter the debugger before command execution | |
51 | if '--debugger' in args: |
|
53 | if '--debugger' in args: | |
52 | ui.warn(_("entering debugger - " |
|
54 | ui.warn(_("entering debugger - " | |
53 | "type c to continue starting hg or h for help\n")) |
|
55 | "type c to continue starting hg or h for help\n")) | |
54 | pdb.set_trace() |
|
56 | pdb.set_trace() | |
55 | try: |
|
57 | try: | |
56 | return _dispatch(ui, args) |
|
58 | return _dispatch(ui, args) | |
57 | finally: |
|
59 | finally: | |
58 | ui.flush() |
|
60 | ui.flush() | |
59 | except: |
|
61 | except: | |
60 | # enter the debugger when we hit an exception |
|
62 | # enter the debugger when we hit an exception | |
61 | if '--debugger' in args: |
|
63 | if '--debugger' in args: | |
62 | traceback.print_exc() |
|
64 | traceback.print_exc() | |
63 | pdb.post_mortem(sys.exc_info()[2]) |
|
65 | pdb.post_mortem(sys.exc_info()[2]) | |
64 | ui.traceback() |
|
66 | ui.traceback() | |
65 | raise |
|
67 | raise | |
66 |
|
68 | |||
67 | # Global exception handling, alphabetically |
|
69 | # Global exception handling, alphabetically | |
68 | # Mercurial-specific first, followed by built-in and library exceptions |
|
70 | # Mercurial-specific first, followed by built-in and library exceptions | |
69 | except error.AmbiguousCommand, inst: |
|
71 | except error.AmbiguousCommand, inst: | |
70 | ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") % |
|
72 | ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") % | |
71 | (inst.args[0], " ".join(inst.args[1]))) |
|
73 | (inst.args[0], " ".join(inst.args[1]))) | |
72 | except error.ParseError, inst: |
|
74 | except error.ParseError, inst: | |
73 | if len(inst.args) > 1: |
|
75 | if len(inst.args) > 1: | |
74 | ui.warn(_("hg: parse error at %s: %s\n") % |
|
76 | ui.warn(_("hg: parse error at %s: %s\n") % | |
75 | (inst.args[1], inst.args[0])) |
|
77 | (inst.args[1], inst.args[0])) | |
76 | else: |
|
78 | else: | |
77 | ui.warn(_("hg: parse error: %s\n") % inst.args[0]) |
|
79 | ui.warn(_("hg: parse error: %s\n") % inst.args[0]) | |
78 | return -1 |
|
80 | return -1 | |
79 | except error.LockHeld, inst: |
|
81 | except error.LockHeld, inst: | |
80 | if inst.errno == errno.ETIMEDOUT: |
|
82 | if inst.errno == errno.ETIMEDOUT: | |
81 | reason = _('timed out waiting for lock held by %s') % inst.locker |
|
83 | reason = _('timed out waiting for lock held by %s') % inst.locker | |
82 | else: |
|
84 | else: | |
83 | reason = _('lock held by %s') % inst.locker |
|
85 | reason = _('lock held by %s') % inst.locker | |
84 | ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason)) |
|
86 | ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason)) | |
85 | except error.LockUnavailable, inst: |
|
87 | except error.LockUnavailable, inst: | |
86 | ui.warn(_("abort: could not lock %s: %s\n") % |
|
88 | ui.warn(_("abort: could not lock %s: %s\n") % | |
87 | (inst.desc or inst.filename, inst.strerror)) |
|
89 | (inst.desc or inst.filename, inst.strerror)) | |
88 | except error.CommandError, inst: |
|
90 | except error.CommandError, inst: | |
89 | if inst.args[0]: |
|
91 | if inst.args[0]: | |
90 | ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1])) |
|
92 | ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1])) | |
91 | commands.help_(ui, inst.args[0]) |
|
93 | commands.help_(ui, inst.args[0]) | |
92 | else: |
|
94 | else: | |
93 | ui.warn(_("hg: %s\n") % inst.args[1]) |
|
95 | ui.warn(_("hg: %s\n") % inst.args[1]) | |
94 | commands.help_(ui, 'shortlist') |
|
96 | commands.help_(ui, 'shortlist') | |
95 | except error.RepoError, inst: |
|
97 | except error.RepoError, inst: | |
96 | ui.warn(_("abort: %s!\n") % inst) |
|
98 | ui.warn(_("abort: %s!\n") % inst) | |
97 | except error.ResponseError, inst: |
|
99 | except error.ResponseError, inst: | |
98 | ui.warn(_("abort: %s") % inst.args[0]) |
|
100 | ui.warn(_("abort: %s") % inst.args[0]) | |
99 | if not isinstance(inst.args[1], basestring): |
|
101 | if not isinstance(inst.args[1], basestring): | |
100 | ui.warn(" %r\n" % (inst.args[1],)) |
|
102 | ui.warn(" %r\n" % (inst.args[1],)) | |
101 | elif not inst.args[1]: |
|
103 | elif not inst.args[1]: | |
102 | ui.warn(_(" empty string\n")) |
|
104 | ui.warn(_(" empty string\n")) | |
103 | else: |
|
105 | else: | |
104 | ui.warn("\n%r\n" % util.ellipsis(inst.args[1])) |
|
106 | ui.warn("\n%r\n" % util.ellipsis(inst.args[1])) | |
105 | except error.RevlogError, inst: |
|
107 | except error.RevlogError, inst: | |
106 | ui.warn(_("abort: %s!\n") % inst) |
|
108 | ui.warn(_("abort: %s!\n") % inst) | |
107 | except error.SignalInterrupt: |
|
109 | except error.SignalInterrupt: | |
108 | ui.warn(_("killed!\n")) |
|
110 | ui.warn(_("killed!\n")) | |
109 | except error.UnknownCommand, inst: |
|
111 | except error.UnknownCommand, inst: | |
110 | ui.warn(_("hg: unknown command '%s'\n") % inst.args[0]) |
|
112 | ui.warn(_("hg: unknown command '%s'\n") % inst.args[0]) | |
111 | try: |
|
113 | try: | |
112 | # check if the command is in a disabled extension |
|
114 | # check if the command is in a disabled extension | |
113 | # (but don't check for extensions themselves) |
|
115 | # (but don't check for extensions themselves) | |
114 | commands.help_(ui, inst.args[0], unknowncmd=True) |
|
116 | commands.help_(ui, inst.args[0], unknowncmd=True) | |
115 | except error.UnknownCommand: |
|
117 | except error.UnknownCommand: | |
116 | commands.help_(ui, 'shortlist') |
|
118 | commands.help_(ui, 'shortlist') | |
117 | except util.Abort, inst: |
|
119 | except util.Abort, inst: | |
118 | ui.warn(_("abort: %s\n") % inst) |
|
120 | ui.warn(_("abort: %s\n") % inst) | |
|
121 | if inst.hint: | |||
|
122 | ui.status(_("(%s)\n") % inst.hint) | |||
119 | except ImportError, inst: |
|
123 | except ImportError, inst: | |
120 | ui.warn(_("abort: %s!\n") % inst) |
|
124 | ui.warn(_("abort: %s!\n") % inst) | |
121 | m = str(inst).split()[-1] |
|
125 | m = str(inst).split()[-1] | |
122 | if m in "mpatch bdiff".split(): |
|
126 | if m in "mpatch bdiff".split(): | |
123 | ui.warn(_("(did you forget to compile extensions?)\n")) |
|
127 | ui.warn(_("(did you forget to compile extensions?)\n")) | |
124 | elif m in "zlib".split(): |
|
128 | elif m in "zlib".split(): | |
125 | ui.warn(_("(is your Python install correct?)\n")) |
|
129 | ui.warn(_("(is your Python install correct?)\n")) | |
126 | except IOError, inst: |
|
130 | except IOError, inst: | |
127 | if hasattr(inst, "code"): |
|
131 | if hasattr(inst, "code"): | |
128 | ui.warn(_("abort: %s\n") % inst) |
|
132 | ui.warn(_("abort: %s\n") % inst) | |
129 | elif hasattr(inst, "reason"): |
|
133 | elif hasattr(inst, "reason"): | |
130 | try: # usually it is in the form (errno, strerror) |
|
134 | try: # usually it is in the form (errno, strerror) | |
131 | reason = inst.reason.args[1] |
|
135 | reason = inst.reason.args[1] | |
132 | except: # it might be anything, for example a string |
|
136 | except: # it might be anything, for example a string | |
133 | reason = inst.reason |
|
137 | reason = inst.reason | |
134 | ui.warn(_("abort: error: %s\n") % reason) |
|
138 | ui.warn(_("abort: error: %s\n") % reason) | |
135 | elif hasattr(inst, "args") and inst.args[0] == errno.EPIPE: |
|
139 | elif hasattr(inst, "args") and inst.args[0] == errno.EPIPE: | |
136 | if ui.debugflag: |
|
140 | if ui.debugflag: | |
137 | ui.warn(_("broken pipe\n")) |
|
141 | ui.warn(_("broken pipe\n")) | |
138 | elif getattr(inst, "strerror", None): |
|
142 | elif getattr(inst, "strerror", None): | |
139 | if getattr(inst, "filename", None): |
|
143 | if getattr(inst, "filename", None): | |
140 | ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename)) |
|
144 | ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename)) | |
141 | else: |
|
145 | else: | |
142 | ui.warn(_("abort: %s\n") % inst.strerror) |
|
146 | ui.warn(_("abort: %s\n") % inst.strerror) | |
143 | else: |
|
147 | else: | |
144 | raise |
|
148 | raise | |
145 | except OSError, inst: |
|
149 | except OSError, inst: | |
146 | if getattr(inst, "filename", None): |
|
150 | if getattr(inst, "filename", None): | |
147 | ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename)) |
|
151 | ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename)) | |
148 | else: |
|
152 | else: | |
149 | ui.warn(_("abort: %s\n") % inst.strerror) |
|
153 | ui.warn(_("abort: %s\n") % inst.strerror) | |
150 | except KeyboardInterrupt: |
|
154 | except KeyboardInterrupt: | |
151 | try: |
|
155 | try: | |
152 | ui.warn(_("interrupted!\n")) |
|
156 | ui.warn(_("interrupted!\n")) | |
153 | except IOError, inst: |
|
157 | except IOError, inst: | |
154 | if inst.errno == errno.EPIPE: |
|
158 | if inst.errno == errno.EPIPE: | |
155 | if ui.debugflag: |
|
159 | if ui.debugflag: | |
156 | ui.warn(_("\nbroken pipe\n")) |
|
160 | ui.warn(_("\nbroken pipe\n")) | |
157 | else: |
|
161 | else: | |
158 | raise |
|
162 | raise | |
159 | except MemoryError: |
|
163 | except MemoryError: | |
160 | ui.warn(_("abort: out of memory\n")) |
|
164 | ui.warn(_("abort: out of memory\n")) | |
161 | except SystemExit, inst: |
|
165 | except SystemExit, inst: | |
162 | # Commands shouldn't sys.exit directly, but give a return code. |
|
166 | # Commands shouldn't sys.exit directly, but give a return code. | |
163 | # Just in case catch this and and pass exit code to caller. |
|
167 | # Just in case catch this and and pass exit code to caller. | |
164 | return inst.code |
|
168 | return inst.code | |
165 | except socket.error, inst: |
|
169 | except socket.error, inst: | |
166 | ui.warn(_("abort: %s\n") % inst.args[-1]) |
|
170 | ui.warn(_("abort: %s\n") % inst.args[-1]) | |
167 | except: |
|
171 | except: | |
168 | ui.warn(_("** unknown exception encountered, details follow\n")) |
|
172 | ui.warn(_("** unknown exception encountered, details follow\n")) | |
169 | ui.warn(_("** report bug details to " |
|
173 | ui.warn(_("** report bug details to " | |
170 | "http://mercurial.selenic.com/bts/\n")) |
|
174 | "http://mercurial.selenic.com/bts/\n")) | |
171 | ui.warn(_("** or mercurial@selenic.com\n")) |
|
175 | ui.warn(_("** or mercurial@selenic.com\n")) | |
172 | ui.warn(_("** Python %s\n") % sys.version.replace('\n', '')) |
|
176 | ui.warn(_("** Python %s\n") % sys.version.replace('\n', '')) | |
173 | ui.warn(_("** Mercurial Distributed SCM (version %s)\n") |
|
177 | ui.warn(_("** Mercurial Distributed SCM (version %s)\n") | |
174 | % util.version()) |
|
178 | % util.version()) | |
175 | ui.warn(_("** Extensions loaded: %s\n") |
|
179 | ui.warn(_("** Extensions loaded: %s\n") | |
176 | % ", ".join([x[0] for x in extensions.extensions()])) |
|
180 | % ", ".join([x[0] for x in extensions.extensions()])) | |
177 | raise |
|
181 | raise | |
178 |
|
182 | |||
179 | return -1 |
|
183 | return -1 | |
180 |
|
184 | |||
181 | def aliasargs(fn): |
|
185 | def aliasargs(fn): | |
182 | if hasattr(fn, 'args'): |
|
186 | if hasattr(fn, 'args'): | |
183 | return fn.args |
|
187 | return fn.args | |
184 | return [] |
|
188 | return [] | |
185 |
|
189 | |||
186 | class cmdalias(object): |
|
190 | class cmdalias(object): | |
187 | def __init__(self, name, definition, cmdtable): |
|
191 | def __init__(self, name, definition, cmdtable): | |
188 | self.name = name |
|
192 | self.name = name | |
189 | self.definition = definition |
|
193 | self.definition = definition | |
190 | self.args = [] |
|
194 | self.args = [] | |
191 | self.opts = [] |
|
195 | self.opts = [] | |
192 | self.help = '' |
|
196 | self.help = '' | |
193 | self.norepo = True |
|
197 | self.norepo = True | |
194 | self.badalias = False |
|
198 | self.badalias = False | |
195 |
|
199 | |||
196 | try: |
|
200 | try: | |
197 | cmdutil.findcmd(self.name, cmdtable, True) |
|
201 | cmdutil.findcmd(self.name, cmdtable, True) | |
198 | self.shadows = True |
|
202 | self.shadows = True | |
199 | except error.UnknownCommand: |
|
203 | except error.UnknownCommand: | |
200 | self.shadows = False |
|
204 | self.shadows = False | |
201 |
|
205 | |||
202 | if not self.definition: |
|
206 | if not self.definition: | |
203 | def fn(ui, *args): |
|
207 | def fn(ui, *args): | |
204 | ui.warn(_("no definition for alias '%s'\n") % self.name) |
|
208 | ui.warn(_("no definition for alias '%s'\n") % self.name) | |
205 | return 1 |
|
209 | return 1 | |
206 | self.fn = fn |
|
210 | self.fn = fn | |
207 | self.badalias = True |
|
211 | self.badalias = True | |
208 |
|
212 | |||
209 | return |
|
213 | return | |
210 |
|
214 | |||
211 | if self.definition.startswith('!'): |
|
215 | if self.definition.startswith('!'): | |
212 | def fn(ui, *args): |
|
216 | def fn(ui, *args): | |
213 | cmd = '%s %s' % (self.definition[1:], ' '.join(args)) |
|
217 | cmd = '%s %s' % (self.definition[1:], ' '.join(args)) | |
214 | return util.system(cmd) |
|
218 | return util.system(cmd) | |
215 | self.fn = fn |
|
219 | self.fn = fn | |
216 | return |
|
220 | return | |
217 |
|
221 | |||
218 | args = shlex.split(self.definition) |
|
222 | args = shlex.split(self.definition) | |
219 | cmd = args.pop(0) |
|
223 | cmd = args.pop(0) | |
220 | args = map(util.expandpath, args) |
|
224 | args = map(util.expandpath, args) | |
221 |
|
225 | |||
222 | try: |
|
226 | try: | |
223 | tableentry = cmdutil.findcmd(cmd, cmdtable, False)[1] |
|
227 | tableentry = cmdutil.findcmd(cmd, cmdtable, False)[1] | |
224 | if len(tableentry) > 2: |
|
228 | if len(tableentry) > 2: | |
225 | self.fn, self.opts, self.help = tableentry |
|
229 | self.fn, self.opts, self.help = tableentry | |
226 | else: |
|
230 | else: | |
227 | self.fn, self.opts = tableentry |
|
231 | self.fn, self.opts = tableentry | |
228 |
|
232 | |||
229 | self.args = aliasargs(self.fn) + args |
|
233 | self.args = aliasargs(self.fn) + args | |
230 | if cmd not in commands.norepo.split(' '): |
|
234 | if cmd not in commands.norepo.split(' '): | |
231 | self.norepo = False |
|
235 | self.norepo = False | |
232 | if self.help.startswith("hg " + cmd): |
|
236 | if self.help.startswith("hg " + cmd): | |
233 | # drop prefix in old-style help lines so hg shows the alias |
|
237 | # drop prefix in old-style help lines so hg shows the alias | |
234 | self.help = self.help[4 + len(cmd):] |
|
238 | self.help = self.help[4 + len(cmd):] | |
235 | self.__doc__ = self.fn.__doc__ |
|
239 | self.__doc__ = self.fn.__doc__ | |
236 |
|
240 | |||
237 | except error.UnknownCommand: |
|
241 | except error.UnknownCommand: | |
238 | def fn(ui, *args): |
|
242 | def fn(ui, *args): | |
239 | ui.warn(_("alias '%s' resolves to unknown command '%s'\n") \ |
|
243 | ui.warn(_("alias '%s' resolves to unknown command '%s'\n") \ | |
240 | % (self.name, cmd)) |
|
244 | % (self.name, cmd)) | |
241 | try: |
|
245 | try: | |
242 | # check if the command is in a disabled extension |
|
246 | # check if the command is in a disabled extension | |
243 | commands.help_(ui, cmd, unknowncmd=True) |
|
247 | commands.help_(ui, cmd, unknowncmd=True) | |
244 | except error.UnknownCommand: |
|
248 | except error.UnknownCommand: | |
245 | pass |
|
249 | pass | |
246 | return 1 |
|
250 | return 1 | |
247 | self.fn = fn |
|
251 | self.fn = fn | |
248 | self.badalias = True |
|
252 | self.badalias = True | |
249 | except error.AmbiguousCommand: |
|
253 | except error.AmbiguousCommand: | |
250 | def fn(ui, *args): |
|
254 | def fn(ui, *args): | |
251 | ui.warn(_("alias '%s' resolves to ambiguous command '%s'\n") \ |
|
255 | ui.warn(_("alias '%s' resolves to ambiguous command '%s'\n") \ | |
252 | % (self.name, cmd)) |
|
256 | % (self.name, cmd)) | |
253 | return 1 |
|
257 | return 1 | |
254 | self.fn = fn |
|
258 | self.fn = fn | |
255 | self.badalias = True |
|
259 | self.badalias = True | |
256 |
|
260 | |||
257 | def __call__(self, ui, *args, **opts): |
|
261 | def __call__(self, ui, *args, **opts): | |
258 | if self.shadows: |
|
262 | if self.shadows: | |
259 | ui.debug("alias '%s' shadows command\n" % self.name) |
|
263 | ui.debug("alias '%s' shadows command\n" % self.name) | |
260 |
|
264 | |||
261 | return self.fn(ui, *args, **opts) |
|
265 | return self.fn(ui, *args, **opts) | |
262 |
|
266 | |||
263 | def addaliases(ui, cmdtable): |
|
267 | def addaliases(ui, cmdtable): | |
264 | # aliases are processed after extensions have been loaded, so they |
|
268 | # aliases are processed after extensions have been loaded, so they | |
265 | # may use extension commands. Aliases can also use other alias definitions, |
|
269 | # may use extension commands. Aliases can also use other alias definitions, | |
266 | # but only if they have been defined prior to the current definition. |
|
270 | # but only if they have been defined prior to the current definition. | |
267 | for alias, definition in ui.configitems('alias'): |
|
271 | for alias, definition in ui.configitems('alias'): | |
268 | aliasdef = cmdalias(alias, definition, cmdtable) |
|
272 | aliasdef = cmdalias(alias, definition, cmdtable) | |
269 | cmdtable[alias] = (aliasdef, aliasdef.opts, aliasdef.help) |
|
273 | cmdtable[alias] = (aliasdef, aliasdef.opts, aliasdef.help) | |
270 | if aliasdef.norepo: |
|
274 | if aliasdef.norepo: | |
271 | commands.norepo += ' %s' % alias |
|
275 | commands.norepo += ' %s' % alias | |
272 |
|
276 | |||
273 | def _parse(ui, args): |
|
277 | def _parse(ui, args): | |
274 | options = {} |
|
278 | options = {} | |
275 | cmdoptions = {} |
|
279 | cmdoptions = {} | |
276 |
|
280 | |||
277 | try: |
|
281 | try: | |
278 | args = fancyopts.fancyopts(args, commands.globalopts, options) |
|
282 | args = fancyopts.fancyopts(args, commands.globalopts, options) | |
279 | except fancyopts.getopt.GetoptError, inst: |
|
283 | except fancyopts.getopt.GetoptError, inst: | |
280 | raise error.CommandError(None, inst) |
|
284 | raise error.CommandError(None, inst) | |
281 |
|
285 | |||
282 | if args: |
|
286 | if args: | |
283 | cmd, args = args[0], args[1:] |
|
287 | cmd, args = args[0], args[1:] | |
284 | aliases, entry = cmdutil.findcmd(cmd, commands.table, |
|
288 | aliases, entry = cmdutil.findcmd(cmd, commands.table, | |
285 | ui.config("ui", "strict")) |
|
289 | ui.config("ui", "strict")) | |
286 | cmd = aliases[0] |
|
290 | cmd = aliases[0] | |
287 | args = aliasargs(entry[0]) + args |
|
291 | args = aliasargs(entry[0]) + args | |
288 | defaults = ui.config("defaults", cmd) |
|
292 | defaults = ui.config("defaults", cmd) | |
289 | if defaults: |
|
293 | if defaults: | |
290 | args = map(util.expandpath, shlex.split(defaults)) + args |
|
294 | args = map(util.expandpath, shlex.split(defaults)) + args | |
291 | c = list(entry[1]) |
|
295 | c = list(entry[1]) | |
292 | else: |
|
296 | else: | |
293 | cmd = None |
|
297 | cmd = None | |
294 | c = [] |
|
298 | c = [] | |
295 |
|
299 | |||
296 | # combine global options into local |
|
300 | # combine global options into local | |
297 | for o in commands.globalopts: |
|
301 | for o in commands.globalopts: | |
298 | c.append((o[0], o[1], options[o[1]], o[3])) |
|
302 | c.append((o[0], o[1], options[o[1]], o[3])) | |
299 |
|
303 | |||
300 | try: |
|
304 | try: | |
301 | args = fancyopts.fancyopts(args, c, cmdoptions, True) |
|
305 | args = fancyopts.fancyopts(args, c, cmdoptions, True) | |
302 | except fancyopts.getopt.GetoptError, inst: |
|
306 | except fancyopts.getopt.GetoptError, inst: | |
303 | raise error.CommandError(cmd, inst) |
|
307 | raise error.CommandError(cmd, inst) | |
304 |
|
308 | |||
305 | # separate global options back out |
|
309 | # separate global options back out | |
306 | for o in commands.globalopts: |
|
310 | for o in commands.globalopts: | |
307 | n = o[1] |
|
311 | n = o[1] | |
308 | options[n] = cmdoptions[n] |
|
312 | options[n] = cmdoptions[n] | |
309 | del cmdoptions[n] |
|
313 | del cmdoptions[n] | |
310 |
|
314 | |||
311 | return (cmd, cmd and entry[0] or None, args, options, cmdoptions) |
|
315 | return (cmd, cmd and entry[0] or None, args, options, cmdoptions) | |
312 |
|
316 | |||
313 | def _parseconfig(ui, config): |
|
317 | def _parseconfig(ui, config): | |
314 | """parse the --config options from the command line""" |
|
318 | """parse the --config options from the command line""" | |
315 | for cfg in config: |
|
319 | for cfg in config: | |
316 | try: |
|
320 | try: | |
317 | name, value = cfg.split('=', 1) |
|
321 | name, value = cfg.split('=', 1) | |
318 | section, name = name.split('.', 1) |
|
322 | section, name = name.split('.', 1) | |
319 | if not section or not name: |
|
323 | if not section or not name: | |
320 | raise IndexError |
|
324 | raise IndexError | |
321 | ui.setconfig(section, name, value) |
|
325 | ui.setconfig(section, name, value) | |
322 | except (IndexError, ValueError): |
|
326 | except (IndexError, ValueError): | |
323 | raise util.Abort(_('malformed --config option: %r ' |
|
327 | raise util.Abort(_('malformed --config option: %r ' | |
324 | '(use --config section.name=value)') % cfg) |
|
328 | '(use --config section.name=value)') % cfg) | |
325 |
|
329 | |||
326 | def _earlygetopt(aliases, args): |
|
330 | def _earlygetopt(aliases, args): | |
327 | """Return list of values for an option (or aliases). |
|
331 | """Return list of values for an option (or aliases). | |
328 |
|
332 | |||
329 | The values are listed in the order they appear in args. |
|
333 | The values are listed in the order they appear in args. | |
330 | The options and values are removed from args. |
|
334 | The options and values are removed from args. | |
331 | """ |
|
335 | """ | |
332 | try: |
|
336 | try: | |
333 | argcount = args.index("--") |
|
337 | argcount = args.index("--") | |
334 | except ValueError: |
|
338 | except ValueError: | |
335 | argcount = len(args) |
|
339 | argcount = len(args) | |
336 | shortopts = [opt for opt in aliases if len(opt) == 2] |
|
340 | shortopts = [opt for opt in aliases if len(opt) == 2] | |
337 | values = [] |
|
341 | values = [] | |
338 | pos = 0 |
|
342 | pos = 0 | |
339 | while pos < argcount: |
|
343 | while pos < argcount: | |
340 | if args[pos] in aliases: |
|
344 | if args[pos] in aliases: | |
341 | if pos + 1 >= argcount: |
|
345 | if pos + 1 >= argcount: | |
342 | # ignore and let getopt report an error if there is no value |
|
346 | # ignore and let getopt report an error if there is no value | |
343 | break |
|
347 | break | |
344 | del args[pos] |
|
348 | del args[pos] | |
345 | values.append(args.pop(pos)) |
|
349 | values.append(args.pop(pos)) | |
346 | argcount -= 2 |
|
350 | argcount -= 2 | |
347 | elif args[pos][:2] in shortopts: |
|
351 | elif args[pos][:2] in shortopts: | |
348 | # short option can have no following space, e.g. hg log -Rfoo |
|
352 | # short option can have no following space, e.g. hg log -Rfoo | |
349 | values.append(args.pop(pos)[2:]) |
|
353 | values.append(args.pop(pos)[2:]) | |
350 | argcount -= 1 |
|
354 | argcount -= 1 | |
351 | else: |
|
355 | else: | |
352 | pos += 1 |
|
356 | pos += 1 | |
353 | return values |
|
357 | return values | |
354 |
|
358 | |||
355 | def runcommand(lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions): |
|
359 | def runcommand(lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions): | |
356 | # run pre-hook, and abort if it fails |
|
360 | # run pre-hook, and abort if it fails | |
357 | ret = hook.hook(lui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs), |
|
361 | ret = hook.hook(lui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs), | |
358 | pats=cmdpats, opts=cmdoptions) |
|
362 | pats=cmdpats, opts=cmdoptions) | |
359 | if ret: |
|
363 | if ret: | |
360 | return ret |
|
364 | return ret | |
361 | ret = _runcommand(ui, options, cmd, d) |
|
365 | ret = _runcommand(ui, options, cmd, d) | |
362 | # run post-hook, passing command result |
|
366 | # run post-hook, passing command result | |
363 | hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs), |
|
367 | hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs), | |
364 | result=ret, pats=cmdpats, opts=cmdoptions) |
|
368 | result=ret, pats=cmdpats, opts=cmdoptions) | |
365 | return ret |
|
369 | return ret | |
366 |
|
370 | |||
367 | _loaded = set() |
|
371 | _loaded = set() | |
368 | def _dispatch(ui, args): |
|
372 | def _dispatch(ui, args): | |
369 | # read --config before doing anything else |
|
373 | # read --config before doing anything else | |
370 | # (e.g. to change trust settings for reading .hg/hgrc) |
|
374 | # (e.g. to change trust settings for reading .hg/hgrc) | |
371 | _parseconfig(ui, _earlygetopt(['--config'], args)) |
|
375 | _parseconfig(ui, _earlygetopt(['--config'], args)) | |
372 |
|
376 | |||
373 | # check for cwd |
|
377 | # check for cwd | |
374 | cwd = _earlygetopt(['--cwd'], args) |
|
378 | cwd = _earlygetopt(['--cwd'], args) | |
375 | if cwd: |
|
379 | if cwd: | |
376 | os.chdir(cwd[-1]) |
|
380 | os.chdir(cwd[-1]) | |
377 |
|
381 | |||
378 | # read the local repository .hgrc into a local ui object |
|
382 | # read the local repository .hgrc into a local ui object | |
379 | path = cmdutil.findrepo(os.getcwd()) or "" |
|
383 | path = cmdutil.findrepo(os.getcwd()) or "" | |
380 | if not path: |
|
384 | if not path: | |
381 | lui = ui |
|
385 | lui = ui | |
382 | else: |
|
386 | else: | |
383 | try: |
|
387 | try: | |
384 | lui = ui.copy() |
|
388 | lui = ui.copy() | |
385 | lui.readconfig(os.path.join(path, ".hg", "hgrc")) |
|
389 | lui.readconfig(os.path.join(path, ".hg", "hgrc")) | |
386 | except IOError: |
|
390 | except IOError: | |
387 | pass |
|
391 | pass | |
388 |
|
392 | |||
389 | # now we can expand paths, even ones in .hg/hgrc |
|
393 | # now we can expand paths, even ones in .hg/hgrc | |
390 | rpath = _earlygetopt(["-R", "--repository", "--repo"], args) |
|
394 | rpath = _earlygetopt(["-R", "--repository", "--repo"], args) | |
391 | if rpath: |
|
395 | if rpath: | |
392 | path = lui.expandpath(rpath[-1]) |
|
396 | path = lui.expandpath(rpath[-1]) | |
393 | lui = ui.copy() |
|
397 | lui = ui.copy() | |
394 | lui.readconfig(os.path.join(path, ".hg", "hgrc")) |
|
398 | lui.readconfig(os.path.join(path, ".hg", "hgrc")) | |
395 |
|
399 | |||
396 | # Configure extensions in phases: uisetup, extsetup, cmdtable, and |
|
400 | # Configure extensions in phases: uisetup, extsetup, cmdtable, and | |
397 | # reposetup. Programs like TortoiseHg will call _dispatch several |
|
401 | # reposetup. Programs like TortoiseHg will call _dispatch several | |
398 | # times so we keep track of configured extensions in _loaded. |
|
402 | # times so we keep track of configured extensions in _loaded. | |
399 | extensions.loadall(lui) |
|
403 | extensions.loadall(lui) | |
400 | exts = [ext for ext in extensions.extensions() if ext[0] not in _loaded] |
|
404 | exts = [ext for ext in extensions.extensions() if ext[0] not in _loaded] | |
401 | # Propagate any changes to lui.__class__ by extensions |
|
405 | # Propagate any changes to lui.__class__ by extensions | |
402 | ui.__class__ = lui.__class__ |
|
406 | ui.__class__ = lui.__class__ | |
403 |
|
407 | |||
404 | # (uisetup and extsetup are handled in extensions.loadall) |
|
408 | # (uisetup and extsetup are handled in extensions.loadall) | |
405 |
|
409 | |||
406 | for name, module in exts: |
|
410 | for name, module in exts: | |
407 | cmdtable = getattr(module, 'cmdtable', {}) |
|
411 | cmdtable = getattr(module, 'cmdtable', {}) | |
408 | overrides = [cmd for cmd in cmdtable if cmd in commands.table] |
|
412 | overrides = [cmd for cmd in cmdtable if cmd in commands.table] | |
409 | if overrides: |
|
413 | if overrides: | |
410 | ui.warn(_("extension '%s' overrides commands: %s\n") |
|
414 | ui.warn(_("extension '%s' overrides commands: %s\n") | |
411 | % (name, " ".join(overrides))) |
|
415 | % (name, " ".join(overrides))) | |
412 | commands.table.update(cmdtable) |
|
416 | commands.table.update(cmdtable) | |
413 | _loaded.add(name) |
|
417 | _loaded.add(name) | |
414 |
|
418 | |||
415 | # (reposetup is handled in hg.repository) |
|
419 | # (reposetup is handled in hg.repository) | |
416 |
|
420 | |||
417 | addaliases(lui, commands.table) |
|
421 | addaliases(lui, commands.table) | |
418 |
|
422 | |||
419 | # check for fallback encoding |
|
423 | # check for fallback encoding | |
420 | fallback = lui.config('ui', 'fallbackencoding') |
|
424 | fallback = lui.config('ui', 'fallbackencoding') | |
421 | if fallback: |
|
425 | if fallback: | |
422 | encoding.fallbackencoding = fallback |
|
426 | encoding.fallbackencoding = fallback | |
423 |
|
427 | |||
424 | fullargs = args |
|
428 | fullargs = args | |
425 | cmd, func, args, options, cmdoptions = _parse(lui, args) |
|
429 | cmd, func, args, options, cmdoptions = _parse(lui, args) | |
426 |
|
430 | |||
427 | if options["config"]: |
|
431 | if options["config"]: | |
428 | raise util.Abort(_("Option --config may not be abbreviated!")) |
|
432 | raise util.Abort(_("Option --config may not be abbreviated!")) | |
429 | if options["cwd"]: |
|
433 | if options["cwd"]: | |
430 | raise util.Abort(_("Option --cwd may not be abbreviated!")) |
|
434 | raise util.Abort(_("Option --cwd may not be abbreviated!")) | |
431 | if options["repository"]: |
|
435 | if options["repository"]: | |
432 | raise util.Abort(_( |
|
436 | raise util.Abort(_( | |
433 | "Option -R has to be separated from other options (e.g. not -qR) " |
|
437 | "Option -R has to be separated from other options (e.g. not -qR) " | |
434 | "and --repository may only be abbreviated as --repo!")) |
|
438 | "and --repository may only be abbreviated as --repo!")) | |
435 |
|
439 | |||
436 | if options["encoding"]: |
|
440 | if options["encoding"]: | |
437 | encoding.encoding = options["encoding"] |
|
441 | encoding.encoding = options["encoding"] | |
438 | if options["encodingmode"]: |
|
442 | if options["encodingmode"]: | |
439 | encoding.encodingmode = options["encodingmode"] |
|
443 | encoding.encodingmode = options["encodingmode"] | |
440 | if options["time"]: |
|
444 | if options["time"]: | |
441 | def get_times(): |
|
445 | def get_times(): | |
442 | t = os.times() |
|
446 | t = os.times() | |
443 | if t[4] == 0.0: # Windows leaves this as zero, so use time.clock() |
|
447 | if t[4] == 0.0: # Windows leaves this as zero, so use time.clock() | |
444 | t = (t[0], t[1], t[2], t[3], time.clock()) |
|
448 | t = (t[0], t[1], t[2], t[3], time.clock()) | |
445 | return t |
|
449 | return t | |
446 | s = get_times() |
|
450 | s = get_times() | |
447 | def print_time(): |
|
451 | def print_time(): | |
448 | t = get_times() |
|
452 | t = get_times() | |
449 | ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") % |
|
453 | ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") % | |
450 | (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3])) |
|
454 | (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3])) | |
451 | atexit.register(print_time) |
|
455 | atexit.register(print_time) | |
452 |
|
456 | |||
453 | if options['verbose'] or options['debug'] or options['quiet']: |
|
457 | if options['verbose'] or options['debug'] or options['quiet']: | |
454 | ui.setconfig('ui', 'verbose', str(bool(options['verbose']))) |
|
458 | ui.setconfig('ui', 'verbose', str(bool(options['verbose']))) | |
455 | ui.setconfig('ui', 'debug', str(bool(options['debug']))) |
|
459 | ui.setconfig('ui', 'debug', str(bool(options['debug']))) | |
456 | ui.setconfig('ui', 'quiet', str(bool(options['quiet']))) |
|
460 | ui.setconfig('ui', 'quiet', str(bool(options['quiet']))) | |
457 | if options['traceback']: |
|
461 | if options['traceback']: | |
458 | ui.setconfig('ui', 'traceback', 'on') |
|
462 | ui.setconfig('ui', 'traceback', 'on') | |
459 | if options['noninteractive']: |
|
463 | if options['noninteractive']: | |
460 | ui.setconfig('ui', 'interactive', 'off') |
|
464 | ui.setconfig('ui', 'interactive', 'off') | |
461 |
|
465 | |||
462 | if options['help']: |
|
466 | if options['help']: | |
463 | return commands.help_(ui, cmd, options['version']) |
|
467 | return commands.help_(ui, cmd, options['version']) | |
464 | elif options['version']: |
|
468 | elif options['version']: | |
465 | return commands.version_(ui) |
|
469 | return commands.version_(ui) | |
466 | elif not cmd: |
|
470 | elif not cmd: | |
467 | return commands.help_(ui, 'shortlist') |
|
471 | return commands.help_(ui, 'shortlist') | |
468 |
|
472 | |||
469 | repo = None |
|
473 | repo = None | |
470 | cmdpats = args[:] |
|
474 | cmdpats = args[:] | |
471 | if cmd not in commands.norepo.split(): |
|
475 | if cmd not in commands.norepo.split(): | |
472 | try: |
|
476 | try: | |
473 | repo = hg.repository(ui, path=path) |
|
477 | repo = hg.repository(ui, path=path) | |
474 | ui = repo.ui |
|
478 | ui = repo.ui | |
475 | if not repo.local(): |
|
479 | if not repo.local(): | |
476 | raise util.Abort(_("repository '%s' is not local") % path) |
|
480 | raise util.Abort(_("repository '%s' is not local") % path) | |
477 | ui.setconfig("bundle", "mainreporoot", repo.root) |
|
481 | ui.setconfig("bundle", "mainreporoot", repo.root) | |
478 | except error.RepoError: |
|
482 | except error.RepoError: | |
479 | if cmd not in commands.optionalrepo.split(): |
|
483 | if cmd not in commands.optionalrepo.split(): | |
480 | if args and not path: # try to infer -R from command args |
|
484 | if args and not path: # try to infer -R from command args | |
481 | repos = map(cmdutil.findrepo, args) |
|
485 | repos = map(cmdutil.findrepo, args) | |
482 | guess = repos[0] |
|
486 | guess = repos[0] | |
483 | if guess and repos.count(guess) == len(repos): |
|
487 | if guess and repos.count(guess) == len(repos): | |
484 | return _dispatch(ui, ['--repository', guess] + fullargs) |
|
488 | return _dispatch(ui, ['--repository', guess] + fullargs) | |
485 | if not path: |
|
489 | if not path: | |
486 | raise error.RepoError(_("There is no Mercurial repository" |
|
490 | raise error.RepoError(_("There is no Mercurial repository" | |
487 | " here (.hg not found)")) |
|
491 | " here (.hg not found)")) | |
488 | raise |
|
492 | raise | |
489 | args.insert(0, repo) |
|
493 | args.insert(0, repo) | |
490 | elif rpath: |
|
494 | elif rpath: | |
491 | ui.warn("warning: --repository ignored\n") |
|
495 | ui.warn("warning: --repository ignored\n") | |
492 |
|
496 | |||
493 | d = lambda: util.checksignature(func)(ui, *args, **cmdoptions) |
|
497 | d = lambda: util.checksignature(func)(ui, *args, **cmdoptions) | |
494 | return runcommand(lui, repo, cmd, fullargs, ui, options, d, |
|
498 | return runcommand(lui, repo, cmd, fullargs, ui, options, d, | |
495 | cmdpats, cmdoptions) |
|
499 | cmdpats, cmdoptions) | |
496 |
|
500 | |||
497 | def _runcommand(ui, options, cmd, cmdfunc): |
|
501 | def _runcommand(ui, options, cmd, cmdfunc): | |
498 | def checkargs(): |
|
502 | def checkargs(): | |
499 | try: |
|
503 | try: | |
500 | return cmdfunc() |
|
504 | return cmdfunc() | |
501 | except error.SignatureError: |
|
505 | except error.SignatureError: | |
502 | raise error.CommandError(cmd, _("invalid arguments")) |
|
506 | raise error.CommandError(cmd, _("invalid arguments")) | |
503 |
|
507 | |||
504 | if options['profile']: |
|
508 | if options['profile']: | |
505 | format = ui.config('profiling', 'format', default='text') |
|
509 | format = ui.config('profiling', 'format', default='text') | |
506 |
|
510 | |||
507 | if not format in ['text', 'kcachegrind']: |
|
511 | if not format in ['text', 'kcachegrind']: | |
508 | ui.warn(_("unrecognized profiling format '%s'" |
|
512 | ui.warn(_("unrecognized profiling format '%s'" | |
509 | " - Ignored\n") % format) |
|
513 | " - Ignored\n") % format) | |
510 | format = 'text' |
|
514 | format = 'text' | |
511 |
|
515 | |||
512 | output = ui.config('profiling', 'output') |
|
516 | output = ui.config('profiling', 'output') | |
513 |
|
517 | |||
514 | if output: |
|
518 | if output: | |
515 | path = ui.expandpath(output) |
|
519 | path = ui.expandpath(output) | |
516 | ostream = open(path, 'wb') |
|
520 | ostream = open(path, 'wb') | |
517 | else: |
|
521 | else: | |
518 | ostream = sys.stderr |
|
522 | ostream = sys.stderr | |
519 |
|
523 | |||
520 | try: |
|
524 | try: | |
521 | from mercurial import lsprof |
|
525 | from mercurial import lsprof | |
522 | except ImportError: |
|
526 | except ImportError: | |
523 | raise util.Abort(_( |
|
527 | raise util.Abort(_( | |
524 | 'lsprof not available - install from ' |
|
528 | 'lsprof not available - install from ' | |
525 | 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/')) |
|
529 | 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/')) | |
526 | p = lsprof.Profiler() |
|
530 | p = lsprof.Profiler() | |
527 | p.enable(subcalls=True) |
|
531 | p.enable(subcalls=True) | |
528 | try: |
|
532 | try: | |
529 | return checkargs() |
|
533 | return checkargs() | |
530 | finally: |
|
534 | finally: | |
531 | p.disable() |
|
535 | p.disable() | |
532 |
|
536 | |||
533 | if format == 'kcachegrind': |
|
537 | if format == 'kcachegrind': | |
534 | import lsprofcalltree |
|
538 | import lsprofcalltree | |
535 | calltree = lsprofcalltree.KCacheGrind(p) |
|
539 | calltree = lsprofcalltree.KCacheGrind(p) | |
536 | calltree.output(ostream) |
|
540 | calltree.output(ostream) | |
537 | else: |
|
541 | else: | |
538 | # format == 'text' |
|
542 | # format == 'text' | |
539 | stats = lsprof.Stats(p.getstats()) |
|
543 | stats = lsprof.Stats(p.getstats()) | |
540 | stats.sort() |
|
544 | stats.sort() | |
541 | stats.pprint(top=10, file=ostream, climit=5) |
|
545 | stats.pprint(top=10, file=ostream, climit=5) | |
542 |
|
546 | |||
543 | if output: |
|
547 | if output: | |
544 | ostream.close() |
|
548 | ostream.close() | |
545 | else: |
|
549 | else: | |
546 | return checkargs() |
|
550 | return checkargs() |
@@ -1,78 +1,81 b'' | |||||
1 | # error.py - Mercurial exceptions |
|
1 | # error.py - Mercurial exceptions | |
2 | # |
|
2 | # | |
3 | # Copyright 2005-2008 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2005-2008 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | """Mercurial exceptions. |
|
8 | """Mercurial exceptions. | |
9 |
|
9 | |||
10 | This allows us to catch exceptions at higher levels without forcing |
|
10 | This allows us to catch exceptions at higher levels without forcing | |
11 | imports. |
|
11 | imports. | |
12 | """ |
|
12 | """ | |
13 |
|
13 | |||
14 | # Do not import anything here, please |
|
14 | # Do not import anything here, please | |
15 |
|
15 | |||
16 | class RevlogError(Exception): |
|
16 | class RevlogError(Exception): | |
17 | pass |
|
17 | pass | |
18 |
|
18 | |||
19 | class LookupError(RevlogError, KeyError): |
|
19 | class LookupError(RevlogError, KeyError): | |
20 | def __init__(self, name, index, message): |
|
20 | def __init__(self, name, index, message): | |
21 | self.name = name |
|
21 | self.name = name | |
22 | if isinstance(name, str) and len(name) == 20: |
|
22 | if isinstance(name, str) and len(name) == 20: | |
23 | from node import short |
|
23 | from node import short | |
24 | name = short(name) |
|
24 | name = short(name) | |
25 | RevlogError.__init__(self, '%s@%s: %s' % (index, name, message)) |
|
25 | RevlogError.__init__(self, '%s@%s: %s' % (index, name, message)) | |
26 |
|
26 | |||
27 | def __str__(self): |
|
27 | def __str__(self): | |
28 | return RevlogError.__str__(self) |
|
28 | return RevlogError.__str__(self) | |
29 |
|
29 | |||
30 | class CommandError(Exception): |
|
30 | class CommandError(Exception): | |
31 | """Exception raised on errors in parsing the command line.""" |
|
31 | """Exception raised on errors in parsing the command line.""" | |
32 |
|
32 | |||
33 | class Abort(Exception): |
|
33 | class Abort(Exception): | |
34 | """Raised if a command needs to print an error and exit.""" |
|
34 | """Raised if a command needs to print an error and exit.""" | |
|
35 | def __init__(self, *args, **kw): | |||
|
36 | Exception.__init__(self, *args) | |||
|
37 | self.hint = kw.get('hint') | |||
35 |
|
38 | |||
36 | class ConfigError(Abort): |
|
39 | class ConfigError(Abort): | |
37 | 'Exception raised when parsing config files' |
|
40 | 'Exception raised when parsing config files' | |
38 |
|
41 | |||
39 | class ParseError(Exception): |
|
42 | class ParseError(Exception): | |
40 | 'Exception raised when parsing config files (msg[, pos])' |
|
43 | 'Exception raised when parsing config files (msg[, pos])' | |
41 |
|
44 | |||
42 | class RepoError(Exception): |
|
45 | class RepoError(Exception): | |
43 | pass |
|
46 | pass | |
44 |
|
47 | |||
45 | class RepoLookupError(RepoError): |
|
48 | class RepoLookupError(RepoError): | |
46 | pass |
|
49 | pass | |
47 |
|
50 | |||
48 | class CapabilityError(RepoError): |
|
51 | class CapabilityError(RepoError): | |
49 | pass |
|
52 | pass | |
50 |
|
53 | |||
51 | class LockError(IOError): |
|
54 | class LockError(IOError): | |
52 | def __init__(self, errno, strerror, filename, desc): |
|
55 | def __init__(self, errno, strerror, filename, desc): | |
53 | IOError.__init__(self, errno, strerror, filename) |
|
56 | IOError.__init__(self, errno, strerror, filename) | |
54 | self.desc = desc |
|
57 | self.desc = desc | |
55 |
|
58 | |||
56 | class LockHeld(LockError): |
|
59 | class LockHeld(LockError): | |
57 | def __init__(self, errno, filename, desc, locker): |
|
60 | def __init__(self, errno, filename, desc, locker): | |
58 | LockError.__init__(self, errno, 'Lock held', filename, desc) |
|
61 | LockError.__init__(self, errno, 'Lock held', filename, desc) | |
59 | self.locker = locker |
|
62 | self.locker = locker | |
60 |
|
63 | |||
61 | class LockUnavailable(LockError): |
|
64 | class LockUnavailable(LockError): | |
62 | pass |
|
65 | pass | |
63 |
|
66 | |||
64 | class ResponseError(Exception): |
|
67 | class ResponseError(Exception): | |
65 | """Raised to print an error with part of output and exit.""" |
|
68 | """Raised to print an error with part of output and exit.""" | |
66 |
|
69 | |||
67 | class UnknownCommand(Exception): |
|
70 | class UnknownCommand(Exception): | |
68 | """Exception raised if command is not in the command table.""" |
|
71 | """Exception raised if command is not in the command table.""" | |
69 |
|
72 | |||
70 | class AmbiguousCommand(Exception): |
|
73 | class AmbiguousCommand(Exception): | |
71 | """Exception raised if command shortcut matches more than one command.""" |
|
74 | """Exception raised if command shortcut matches more than one command.""" | |
72 |
|
75 | |||
73 | # derived from KeyboardInterrupt to simplify some breakout code |
|
76 | # derived from KeyboardInterrupt to simplify some breakout code | |
74 | class SignalInterrupt(KeyboardInterrupt): |
|
77 | class SignalInterrupt(KeyboardInterrupt): | |
75 | """Exception raised on SIGTERM and SIGHUP.""" |
|
78 | """Exception raised on SIGTERM and SIGHUP.""" | |
76 |
|
79 | |||
77 | class SignatureError(Exception): |
|
80 | class SignatureError(Exception): | |
78 | pass |
|
81 | pass |
@@ -1,323 +1,323 b'' | |||||
1 | updating to branch default |
|
1 | updating to branch default | |
2 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
2 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
3 | pushing to ../a |
|
3 | pushing to ../a | |
4 | searching for changes |
|
4 | searching for changes | |
5 | abort: push creates new remote heads on branch 'default'! |
|
5 | abort: push creates new remote heads on branch 'default'! | |
6 | (you should pull and merge or use push -f to force) |
|
6 | (you should pull and merge or use push -f to force) | |
7 | pulling from ../a |
|
7 | pulling from ../a | |
8 | searching for changes |
|
8 | searching for changes | |
9 | adding changesets |
|
9 | adding changesets | |
10 | adding manifests |
|
10 | adding manifests | |
11 | adding file changes |
|
11 | adding file changes | |
12 | added 1 changesets with 1 changes to 1 files (+1 heads) |
|
12 | added 1 changesets with 1 changes to 1 files (+1 heads) | |
13 | (run 'hg heads' to see heads, 'hg merge' to merge) |
|
13 | (run 'hg heads' to see heads, 'hg merge' to merge) | |
14 | pushing to ../a |
|
14 | pushing to ../a | |
15 | searching for changes |
|
15 | searching for changes | |
16 | abort: push creates new remote heads on branch 'default'! |
|
16 | abort: push creates new remote heads on branch 'default'! | |
17 | (did you forget to merge? use push -f to force) |
|
17 | (did you forget to merge? use push -f to force) | |
18 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
18 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
19 | (branch merge, don't forget to commit) |
|
19 | (branch merge, don't forget to commit) | |
20 | pushing to ../a |
|
20 | pushing to ../a | |
21 | searching for changes |
|
21 | searching for changes | |
22 | adding changesets |
|
22 | adding changesets | |
23 | adding manifests |
|
23 | adding manifests | |
24 | adding file changes |
|
24 | adding file changes | |
25 | added 2 changesets with 1 changes to 1 files |
|
25 | added 2 changesets with 1 changes to 1 files | |
26 | adding foo |
|
26 | adding foo | |
27 | updating to branch default |
|
27 | updating to branch default | |
28 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
28 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
29 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
29 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
30 | created new head |
|
30 | created new head | |
31 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
31 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
32 | created new head |
|
32 | created new head | |
33 | merging foo |
|
33 | merging foo | |
34 | 0 files updated, 1 files merged, 0 files removed, 0 files unresolved |
|
34 | 0 files updated, 1 files merged, 0 files removed, 0 files unresolved | |
35 | (branch merge, don't forget to commit) |
|
35 | (branch merge, don't forget to commit) | |
36 | pushing to ../c |
|
36 | pushing to ../c | |
37 | searching for changes |
|
37 | searching for changes | |
38 | abort: push creates new remote heads on branch 'default'! |
|
38 | abort: push creates new remote heads on branch 'default'! | |
39 | (did you forget to merge? use push -f to force) |
|
39 | (did you forget to merge? use push -f to force) | |
40 | 1 |
|
40 | 255 | |
41 | pushing to ../c |
|
41 | pushing to ../c | |
42 | searching for changes |
|
42 | searching for changes | |
43 | no changes found |
|
43 | no changes found | |
44 | 0 |
|
44 | 0 | |
45 | pushing to ../c |
|
45 | pushing to ../c | |
46 | searching for changes |
|
46 | searching for changes | |
47 | abort: push creates new remote heads on branch 'default'! |
|
47 | abort: push creates new remote heads on branch 'default'! | |
48 | (did you forget to merge? use push -f to force) |
|
48 | (did you forget to merge? use push -f to force) | |
49 | 1 |
|
49 | 255 | |
50 | pushing to ../c |
|
50 | pushing to ../c | |
51 | searching for changes |
|
51 | searching for changes | |
52 | abort: push creates new remote heads on branch 'default'! |
|
52 | abort: push creates new remote heads on branch 'default'! | |
53 | (did you forget to merge? use push -f to force) |
|
53 | (did you forget to merge? use push -f to force) | |
54 | 1 |
|
54 | 255 | |
55 | pushing to ../c |
|
55 | pushing to ../c | |
56 | searching for changes |
|
56 | searching for changes | |
57 | adding changesets |
|
57 | adding changesets | |
58 | adding manifests |
|
58 | adding manifests | |
59 | adding file changes |
|
59 | adding file changes | |
60 | added 2 changesets with 2 changes to 1 files (+2 heads) |
|
60 | added 2 changesets with 2 changes to 1 files (+2 heads) | |
61 | 0 |
|
61 | 0 | |
62 | pushing to ../c |
|
62 | pushing to ../c | |
63 | searching for changes |
|
63 | searching for changes | |
64 | adding changesets |
|
64 | adding changesets | |
65 | adding manifests |
|
65 | adding manifests | |
66 | adding file changes |
|
66 | adding file changes | |
67 | added 1 changesets with 1 changes to 1 files (-1 heads) |
|
67 | added 1 changesets with 1 changes to 1 files (-1 heads) | |
68 | 0 |
|
68 | 0 | |
69 | comparing with ../c |
|
69 | comparing with ../c | |
70 | searching for changes |
|
70 | searching for changes | |
71 | no changes found |
|
71 | no changes found | |
72 | % issue 450 |
|
72 | % issue 450 | |
73 | pushing to ../e |
|
73 | pushing to ../e | |
74 | searching for changes |
|
74 | searching for changes | |
75 | adding changesets |
|
75 | adding changesets | |
76 | adding manifests |
|
76 | adding manifests | |
77 | adding file changes |
|
77 | adding file changes | |
78 | added 1 changesets with 1 changes to 1 files |
|
78 | added 1 changesets with 1 changes to 1 files | |
79 | 0 |
|
79 | 0 | |
80 | pushing to ../e |
|
80 | pushing to ../e | |
81 | searching for changes |
|
81 | searching for changes | |
82 | adding changesets |
|
82 | adding changesets | |
83 | adding manifests |
|
83 | adding manifests | |
84 | adding file changes |
|
84 | adding file changes | |
85 | added 1 changesets with 1 changes to 1 files |
|
85 | added 1 changesets with 1 changes to 1 files | |
86 | 0 |
|
86 | 0 | |
87 | % issue 736 |
|
87 | % issue 736 | |
88 | % push on existing branch and new branch |
|
88 | % push on existing branch and new branch | |
89 | pushing to ../f |
|
89 | pushing to ../f | |
90 | searching for changes |
|
90 | searching for changes | |
91 | abort: push creates new remote branches: c! |
|
91 | abort: push creates new remote branches: c! | |
92 | (use 'hg push --new-branch' to create new remote branches) |
|
92 | (use 'hg push --new-branch' to create new remote branches) | |
93 | 1 |
|
93 | 255 | |
94 | pushing to ../f |
|
94 | pushing to ../f | |
95 | searching for changes |
|
95 | searching for changes | |
96 | abort: push creates new remote branches: c! |
|
96 | abort: push creates new remote branches: c! | |
97 | (use 'hg push --new-branch' to create new remote branches) |
|
97 | (use 'hg push --new-branch' to create new remote branches) | |
98 | 1 |
|
98 | 255 | |
99 | % multiple new branches |
|
99 | % multiple new branches | |
100 | pushing to ../f |
|
100 | pushing to ../f | |
101 | searching for changes |
|
101 | searching for changes | |
102 | abort: push creates new remote branches: c, d! |
|
102 | abort: push creates new remote branches: c, d! | |
103 | (use 'hg push --new-branch' to create new remote branches) |
|
103 | (use 'hg push --new-branch' to create new remote branches) | |
104 | 1 |
|
104 | 255 | |
105 | pushing to ../f |
|
105 | pushing to ../f | |
106 | searching for changes |
|
106 | searching for changes | |
107 | abort: push creates new remote branches: c, d! |
|
107 | abort: push creates new remote branches: c, d! | |
108 | (use 'hg push --new-branch' to create new remote branches) |
|
108 | (use 'hg push --new-branch' to create new remote branches) | |
109 | 1 |
|
109 | 255 | |
110 | % fail on multiple head push |
|
110 | % fail on multiple head push | |
111 | pushing to ../f |
|
111 | pushing to ../f | |
112 | searching for changes |
|
112 | searching for changes | |
113 | abort: push creates new remote heads on branch 'a'! |
|
113 | abort: push creates new remote heads on branch 'a'! | |
114 | (did you forget to merge? use push -f to force) |
|
114 | (did you forget to merge? use push -f to force) | |
115 | 1 |
|
115 | 255 | |
116 | % push replacement head on existing branches |
|
116 | % push replacement head on existing branches | |
117 | pushing to ../f |
|
117 | pushing to ../f | |
118 | searching for changes |
|
118 | searching for changes | |
119 | adding changesets |
|
119 | adding changesets | |
120 | adding manifests |
|
120 | adding manifests | |
121 | adding file changes |
|
121 | adding file changes | |
122 | added 2 changesets with 2 changes to 1 files |
|
122 | added 2 changesets with 2 changes to 1 files | |
123 | 0 |
|
123 | 0 | |
124 | % merge of branch a to other branch b followed by unrelated push on branch a |
|
124 | % merge of branch a to other branch b followed by unrelated push on branch a | |
125 | pushing to ../f |
|
125 | pushing to ../f | |
126 | searching for changes |
|
126 | searching for changes | |
127 | adding changesets |
|
127 | adding changesets | |
128 | adding manifests |
|
128 | adding manifests | |
129 | adding file changes |
|
129 | adding file changes | |
130 | added 1 changesets with 1 changes to 1 files (-1 heads) |
|
130 | added 1 changesets with 1 changes to 1 files (-1 heads) | |
131 | 0 |
|
131 | 0 | |
132 | pushing to ../f |
|
132 | pushing to ../f | |
133 | searching for changes |
|
133 | searching for changes | |
134 | adding changesets |
|
134 | adding changesets | |
135 | adding manifests |
|
135 | adding manifests | |
136 | adding file changes |
|
136 | adding file changes | |
137 | added 1 changesets with 1 changes to 1 files (+1 heads) |
|
137 | added 1 changesets with 1 changes to 1 files (+1 heads) | |
138 | 0 |
|
138 | 0 | |
139 | % cheating the counting algorithm |
|
139 | % cheating the counting algorithm | |
140 | pushing to ../f |
|
140 | pushing to ../f | |
141 | searching for changes |
|
141 | searching for changes | |
142 | adding changesets |
|
142 | adding changesets | |
143 | adding manifests |
|
143 | adding manifests | |
144 | adding file changes |
|
144 | adding file changes | |
145 | added 2 changesets with 2 changes to 1 files |
|
145 | added 2 changesets with 2 changes to 1 files | |
146 | 0 |
|
146 | 0 | |
147 | % failed push of new named branch |
|
147 | % failed push of new named branch | |
148 | pushing to ../f |
|
148 | pushing to ../f | |
149 | searching for changes |
|
149 | searching for changes | |
150 | abort: push creates new remote branches: e! |
|
150 | abort: push creates new remote branches: e! | |
151 | (use 'hg push --new-branch' to create new remote branches) |
|
151 | (use 'hg push --new-branch' to create new remote branches) | |
152 | 1 |
|
152 | 255 | |
153 | % using --new-branch to push new named branch |
|
153 | % using --new-branch to push new named branch | |
154 | pushing to ../f |
|
154 | pushing to ../f | |
155 | searching for changes |
|
155 | searching for changes | |
156 | adding changesets |
|
156 | adding changesets | |
157 | adding manifests |
|
157 | adding manifests | |
158 | adding file changes |
|
158 | adding file changes | |
159 | added 1 changesets with 1 changes to 1 files |
|
159 | added 1 changesets with 1 changes to 1 files | |
160 | 0 |
|
160 | 0 | |
161 | % checking prepush logic does not allow silently pushing multiple new heads |
|
161 | % checking prepush logic does not allow silently pushing multiple new heads | |
162 | adding init |
|
162 | adding init | |
163 | adding a |
|
163 | adding a | |
164 | updating to branch default |
|
164 | updating to branch default | |
165 | 2 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
165 | 2 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
166 | 0 files updated, 0 files merged, 1 files removed, 0 files unresolved |
|
166 | 0 files updated, 0 files merged, 1 files removed, 0 files unresolved | |
167 | adding b |
|
167 | adding b | |
168 | created new head |
|
168 | created new head | |
169 | 0 files updated, 0 files merged, 1 files removed, 0 files unresolved |
|
169 | 0 files updated, 0 files merged, 1 files removed, 0 files unresolved | |
170 | adding c |
|
170 | adding c | |
171 | created new head |
|
171 | created new head | |
172 | pushing to h |
|
172 | pushing to h | |
173 | searching for changes |
|
173 | searching for changes | |
174 | abort: push creates new remote heads on branch 'default'! |
|
174 | abort: push creates new remote heads on branch 'default'! | |
175 | (you should pull and merge or use push -f to force) |
|
175 | (you should pull and merge or use push -f to force) | |
176 |
|
176 | |||
177 | % check prepush logic with merged branches |
|
177 | % check prepush logic with merged branches | |
178 | marked working directory as branch a |
|
178 | marked working directory as branch a | |
179 | adding foo |
|
179 | adding foo | |
180 | updating to branch a |
|
180 | updating to branch a | |
181 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
181 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
182 | marked working directory as branch b |
|
182 | marked working directory as branch b | |
183 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
183 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
184 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
184 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
185 | (branch merge, don't forget to commit) |
|
185 | (branch merge, don't forget to commit) | |
186 | pushing to j |
|
186 | pushing to j | |
187 | searching for changes |
|
187 | searching for changes | |
188 | abort: push creates new remote branches: b! |
|
188 | abort: push creates new remote branches: b! | |
189 | (use 'hg push --new-branch' to create new remote branches) |
|
189 | (use 'hg push --new-branch' to create new remote branches) | |
190 |
|
190 | |||
191 | % prepush -r should not allow you to sneak in new heads |
|
191 | % prepush -r should not allow you to sneak in new heads | |
192 | pushing to ../l |
|
192 | pushing to ../l | |
193 | searching for changes |
|
193 | searching for changes | |
194 | abort: push creates new remote heads on branch 'a'! |
|
194 | abort: push creates new remote heads on branch 'a'! | |
195 | (did you forget to merge? use push -f to force) |
|
195 | (did you forget to merge? use push -f to force) | |
196 | % check prepush with new branch head on former topo non-head |
|
196 | % check prepush with new branch head on former topo non-head | |
197 | marked working directory as branch A |
|
197 | marked working directory as branch A | |
198 | adding a |
|
198 | adding a | |
199 | marked working directory as branch B |
|
199 | marked working directory as branch B | |
200 | adding b |
|
200 | adding b | |
201 | updating to branch B |
|
201 | updating to branch B | |
202 | 2 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
202 | 2 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
203 | 0 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
203 | 0 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
204 | adding b1 |
|
204 | adding b1 | |
205 | 0 files updated, 0 files merged, 1 files removed, 0 files unresolved |
|
205 | 0 files updated, 0 files merged, 1 files removed, 0 files unresolved | |
206 | adding a2 |
|
206 | adding a2 | |
207 | %% glog of local |
|
207 | %% glog of local | |
208 | @ 2: A a2 |
|
208 | @ 2: A a2 | |
209 | | |
|
209 | | | |
210 | | o 1: B b |
|
210 | | o 1: B b | |
211 | |/ |
|
211 | |/ | |
212 | o 0: A a |
|
212 | o 0: A a | |
213 |
|
213 | |||
214 | %% glog of remote |
|
214 | %% glog of remote | |
215 | @ 2: B b1 |
|
215 | @ 2: B b1 | |
216 | | |
|
216 | | | |
217 | o 1: B b |
|
217 | o 1: B b | |
218 | | |
|
218 | | | |
219 | o 0: A a |
|
219 | o 0: A a | |
220 |
|
220 | |||
221 | %% outgoing |
|
221 | %% outgoing | |
222 | comparing with inner |
|
222 | comparing with inner | |
223 | searching for changes |
|
223 | searching for changes | |
224 | 2: A a2 |
|
224 | 2: A a2 | |
225 | pushing to inner |
|
225 | pushing to inner | |
226 | searching for changes |
|
226 | searching for changes | |
227 | adding changesets |
|
227 | adding changesets | |
228 | adding manifests |
|
228 | adding manifests | |
229 | adding file changes |
|
229 | adding file changes | |
230 | added 1 changesets with 1 changes to 1 files (+1 heads) |
|
230 | added 1 changesets with 1 changes to 1 files (+1 heads) | |
231 | % check prepush with new branch head on former topo head |
|
231 | % check prepush with new branch head on former topo head | |
232 | marked working directory as branch A |
|
232 | marked working directory as branch A | |
233 | adding a |
|
233 | adding a | |
234 | marked working directory as branch B |
|
234 | marked working directory as branch B | |
235 | adding b |
|
235 | adding b | |
236 | 0 files updated, 0 files merged, 1 files removed, 0 files unresolved |
|
236 | 0 files updated, 0 files merged, 1 files removed, 0 files unresolved | |
237 | adding a1 |
|
237 | adding a1 | |
238 | updating to branch A |
|
238 | updating to branch A | |
239 | 2 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
239 | 2 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
240 | 1 files updated, 0 files merged, 1 files removed, 0 files unresolved |
|
240 | 1 files updated, 0 files merged, 1 files removed, 0 files unresolved | |
241 | adding b1 |
|
241 | adding b1 | |
242 | adding a2 |
|
242 | adding a2 | |
243 | %% glog of local |
|
243 | %% glog of local | |
244 | @ 3: A a2 |
|
244 | @ 3: A a2 | |
245 | | |
|
245 | | | |
246 | o 2: A a1 |
|
246 | o 2: A a1 | |
247 | | |
|
247 | | | |
248 | | o 1: B b |
|
248 | | o 1: B b | |
249 | |/ |
|
249 | |/ | |
250 | o 0: A a |
|
250 | o 0: A a | |
251 |
|
251 | |||
252 | %% glog of remote |
|
252 | %% glog of remote | |
253 | @ 3: B b1 |
|
253 | @ 3: B b1 | |
254 | | |
|
254 | | | |
255 | | o 2: A a1 |
|
255 | | o 2: A a1 | |
256 | | | |
|
256 | | | | |
257 | o | 1: B b |
|
257 | o | 1: B b | |
258 | |/ |
|
258 | |/ | |
259 | o 0: A a |
|
259 | o 0: A a | |
260 |
|
260 | |||
261 | %% outgoing |
|
261 | %% outgoing | |
262 | comparing with inner |
|
262 | comparing with inner | |
263 | searching for changes |
|
263 | searching for changes | |
264 | 3: A a2 |
|
264 | 3: A a2 | |
265 | pushing to inner |
|
265 | pushing to inner | |
266 | searching for changes |
|
266 | searching for changes | |
267 | adding changesets |
|
267 | adding changesets | |
268 | adding manifests |
|
268 | adding manifests | |
269 | adding file changes |
|
269 | adding file changes | |
270 | added 1 changesets with 1 changes to 1 files |
|
270 | added 1 changesets with 1 changes to 1 files | |
271 | % check prepush with new branch head and new child of former branch head |
|
271 | % check prepush with new branch head and new child of former branch head | |
272 | % but child is on different branch |
|
272 | % but child is on different branch | |
273 | marked working directory as branch A |
|
273 | marked working directory as branch A | |
274 | adding a |
|
274 | adding a | |
275 | 0 files updated, 0 files merged, 1 files removed, 0 files unresolved |
|
275 | 0 files updated, 0 files merged, 1 files removed, 0 files unresolved | |
276 | marked working directory as branch B |
|
276 | marked working directory as branch B | |
277 | adding b |
|
277 | adding b | |
278 | updating to branch B |
|
278 | updating to branch B | |
279 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
279 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
280 | 1 files updated, 0 files merged, 1 files removed, 0 files unresolved |
|
280 | 1 files updated, 0 files merged, 1 files removed, 0 files unresolved | |
281 | marked working directory as branch B |
|
281 | marked working directory as branch B | |
282 | created new head |
|
282 | created new head | |
283 | 1 files updated, 0 files merged, 1 files removed, 0 files unresolved |
|
283 | 1 files updated, 0 files merged, 1 files removed, 0 files unresolved | |
284 | marked working directory as branch A |
|
284 | marked working directory as branch A | |
285 | created new head |
|
285 | created new head | |
286 | %% glog of local |
|
286 | %% glog of local | |
287 | @ 5: A b3 |
|
287 | @ 5: A b3 | |
288 | | |
|
288 | | | |
289 | | o 4: B a3 |
|
289 | | o 4: B a3 | |
290 | | | |
|
290 | | | | |
291 | o | 3: B b1 |
|
291 | o | 3: B b1 | |
292 | | | |
|
292 | | | | |
293 | o | 2: B b0 |
|
293 | o | 2: B b0 | |
294 | / |
|
294 | / | |
295 | o 1: A a1 |
|
295 | o 1: A a1 | |
296 | | |
|
296 | | | |
297 | o 0: A a0 |
|
297 | o 0: A a0 | |
298 |
|
298 | |||
299 | %% glog of remote |
|
299 | %% glog of remote | |
300 | @ 3: B b1 |
|
300 | @ 3: B b1 | |
301 | | |
|
301 | | | |
302 | o 2: B b0 |
|
302 | o 2: B b0 | |
303 |
|
303 | |||
304 | o 1: A a1 |
|
304 | o 1: A a1 | |
305 | | |
|
305 | | | |
306 | o 0: A a0 |
|
306 | o 0: A a0 | |
307 |
|
307 | |||
308 | %% outgoing |
|
308 | %% outgoing | |
309 | comparing with inner |
|
309 | comparing with inner | |
310 | searching for changes |
|
310 | searching for changes | |
311 | 4: B a3 |
|
311 | 4: B a3 | |
312 | 5: A b3 |
|
312 | 5: A b3 | |
313 | pushing to inner |
|
313 | pushing to inner | |
314 | searching for changes |
|
314 | searching for changes | |
315 | abort: push creates new remote heads on branch 'A'! |
|
315 | abort: push creates new remote heads on branch 'A'! | |
316 | (did you forget to merge? use push -f to force) |
|
316 | (did you forget to merge? use push -f to force) | |
317 | pushing to inner |
|
317 | pushing to inner | |
318 | searching for changes |
|
318 | searching for changes | |
319 | abort: push creates new remote heads on branch 'A'! |
|
319 | abort: push creates new remote heads on branch 'A'! | |
320 | (did you forget to merge? use push -f to force) |
|
320 | (did you forget to merge? use push -f to force) | |
321 | comparing with inner |
|
321 | comparing with inner | |
322 | searching for changes |
|
322 | searching for changes | |
323 | no changes found |
|
323 | no changes found |
General Comments 0
You need to be logged in to leave comments.
Login now