Show More
@@ -1,285 +1,332 b'' | |||||
1 | # narrowbundle2.py - bundle2 extensions for narrow repository support |
|
1 | # narrowbundle2.py - bundle2 extensions for narrow repository support | |
2 | # |
|
2 | # | |
3 | # Copyright 2017 Google, Inc. |
|
3 | # Copyright 2017 Google, Inc. | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from __future__ import absolute_import |
|
8 | from __future__ import absolute_import | |
9 |
|
9 | |||
10 | import errno |
|
10 | import errno | |
11 | import struct |
|
11 | import struct | |
12 |
|
12 | |||
13 | from mercurial.i18n import _ |
|
13 | from mercurial.i18n import _ | |
14 | from mercurial.node import ( |
|
14 | from mercurial.node import ( | |
15 | bin, |
|
15 | bin, | |
16 | nullid, |
|
16 | nullid, | |
17 | ) |
|
17 | ) | |
18 | from mercurial import ( |
|
18 | from mercurial import ( | |
19 | bundle2, |
|
19 | bundle2, | |
20 | changegroup, |
|
20 | changegroup, | |
21 | error, |
|
21 | error, | |
22 | exchange, |
|
22 | exchange, | |
23 | extensions, |
|
23 | extensions, | |
24 | narrowspec, |
|
24 | narrowspec, | |
25 | repair, |
|
25 | repair, | |
26 | repository, |
|
26 | repository, | |
27 | util, |
|
27 | util, | |
28 | wireprototypes, |
|
28 | wireprototypes, | |
29 | ) |
|
29 | ) | |
30 | from mercurial.utils import ( |
|
30 | from mercurial.utils import ( | |
31 | stringutil, |
|
31 | stringutil, | |
32 | ) |
|
32 | ) | |
33 |
|
33 | |||
34 | NARROWCAP = 'narrow' |
|
34 | NARROWCAP = 'narrow' | |
35 | _NARROWACL_SECTION = 'narrowhgacl' |
|
35 | _NARROWACL_SECTION = 'narrowhgacl' | |
36 | _CHANGESPECPART = NARROWCAP + ':changespec' |
|
36 | _CHANGESPECPART = NARROWCAP + ':changespec' | |
37 | _SPECPART = NARROWCAP + ':spec' |
|
37 | _SPECPART = NARROWCAP + ':spec' | |
38 | _SPECPART_INCLUDE = 'include' |
|
38 | _SPECPART_INCLUDE = 'include' | |
39 | _SPECPART_EXCLUDE = 'exclude' |
|
39 | _SPECPART_EXCLUDE = 'exclude' | |
40 | _KILLNODESIGNAL = 'KILL' |
|
40 | _KILLNODESIGNAL = 'KILL' | |
41 | _DONESIGNAL = 'DONE' |
|
41 | _DONESIGNAL = 'DONE' | |
42 | _ELIDEDCSHEADER = '>20s20s20sl' # cset id, p1, p2, len(text) |
|
42 | _ELIDEDCSHEADER = '>20s20s20sl' # cset id, p1, p2, len(text) | |
43 | _ELIDEDMFHEADER = '>20s20s20s20sl' # manifest id, p1, p2, link id, len(text) |
|
43 | _ELIDEDMFHEADER = '>20s20s20s20sl' # manifest id, p1, p2, link id, len(text) | |
44 | _CSHEADERSIZE = struct.calcsize(_ELIDEDCSHEADER) |
|
44 | _CSHEADERSIZE = struct.calcsize(_ELIDEDCSHEADER) | |
45 | _MFHEADERSIZE = struct.calcsize(_ELIDEDMFHEADER) |
|
45 | _MFHEADERSIZE = struct.calcsize(_ELIDEDMFHEADER) | |
46 |
|
46 | |||
47 | # When advertising capabilities, always include narrow clone support. |
|
47 | # When advertising capabilities, always include narrow clone support. | |
48 | def getrepocaps_narrow(orig, repo, **kwargs): |
|
48 | def getrepocaps_narrow(orig, repo, **kwargs): | |
49 | caps = orig(repo, **kwargs) |
|
49 | caps = orig(repo, **kwargs) | |
50 | caps[NARROWCAP] = ['v0'] |
|
50 | caps[NARROWCAP] = ['v0'] | |
51 | return caps |
|
51 | return caps | |
52 |
|
52 | |||
|
53 | def getbundlechangegrouppart_nonellipsis(bundler, repo, source, bundlecaps=None, | |||
|
54 | b2caps=None, heads=None, common=None, | |||
|
55 | **kwargs): | |||
|
56 | """Handling changegroup changegroup generation on the server when user | |||
|
57 | is widening their narrowspec""" | |||
|
58 | ||||
|
59 | cgversions = b2caps.get('changegroup') | |||
|
60 | if cgversions: # 3.1 and 3.2 ship with an empty value | |||
|
61 | cgversions = [v for v in cgversions | |||
|
62 | if v in changegroup.supportedoutgoingversions(repo)] | |||
|
63 | if not cgversions: | |||
|
64 | raise ValueError(_('no common changegroup version')) | |||
|
65 | version = max(cgversions) | |||
|
66 | else: | |||
|
67 | raise ValueError(_("server does not advertise changegroup version," | |||
|
68 | " can't negotiate support for ellipsis nodes")) | |||
|
69 | ||||
|
70 | include = sorted(filter(bool, kwargs.get(r'includepats', []))) | |||
|
71 | exclude = sorted(filter(bool, kwargs.get(r'excludepats', []))) | |||
|
72 | newmatch = narrowspec.match(repo.root, include=include, exclude=exclude) | |||
|
73 | oldinclude = sorted(filter(bool, kwargs.get(r'oldincludepats', []))) | |||
|
74 | oldexclude = sorted(filter(bool, kwargs.get(r'oldexcludepats', []))) | |||
|
75 | common = set(common or [nullid]) | |||
|
76 | ||||
|
77 | if (oldinclude != include or oldexclude != exclude): | |||
|
78 | common = repo.revs("::%ln", common) | |||
|
79 | commonnodes = set() | |||
|
80 | cl = repo.changelog | |||
|
81 | for c in common: | |||
|
82 | commonnodes.add(cl.node(c)) | |||
|
83 | if commonnodes: | |||
|
84 | # XXX: we should only send the filelogs (and treemanifest). user | |||
|
85 | # already has the changelog and manifest | |||
|
86 | packer = changegroup.getbundler(version, repo, | |||
|
87 | filematcher=newmatch, | |||
|
88 | fullnodes=commonnodes) | |||
|
89 | cgdata = packer.generate(set([nullid]), list(commonnodes), False, | |||
|
90 | source) | |||
|
91 | ||||
|
92 | part = bundler.newpart('changegroup', data=cgdata) | |||
|
93 | part.addparam('version', version) | |||
|
94 | if 'treemanifest' in repo.requirements: | |||
|
95 | part.addparam('treemanifest', '1') | |||
|
96 | ||||
53 | # Serve a changegroup for a client with a narrow clone. |
|
97 | # Serve a changegroup for a client with a narrow clone. | |
54 | def getbundlechangegrouppart_narrow(bundler, repo, source, |
|
98 | def getbundlechangegrouppart_narrow(bundler, repo, source, | |
55 | bundlecaps=None, b2caps=None, heads=None, |
|
99 | bundlecaps=None, b2caps=None, heads=None, | |
56 | common=None, **kwargs): |
|
100 | common=None, **kwargs): | |
57 | assert repo.ui.configbool('experimental', 'narrowservebrokenellipses') |
|
101 | assert repo.ui.configbool('experimental', 'narrowservebrokenellipses') | |
58 |
|
102 | |||
59 | cgversions = b2caps.get('changegroup') |
|
103 | cgversions = b2caps.get('changegroup') | |
60 | if cgversions: # 3.1 and 3.2 ship with an empty value |
|
104 | if cgversions: # 3.1 and 3.2 ship with an empty value | |
61 | cgversions = [v for v in cgversions |
|
105 | cgversions = [v for v in cgversions | |
62 | if v in changegroup.supportedoutgoingversions(repo)] |
|
106 | if v in changegroup.supportedoutgoingversions(repo)] | |
63 | if not cgversions: |
|
107 | if not cgversions: | |
64 | raise ValueError(_('no common changegroup version')) |
|
108 | raise ValueError(_('no common changegroup version')) | |
65 | version = max(cgversions) |
|
109 | version = max(cgversions) | |
66 | else: |
|
110 | else: | |
67 | raise ValueError(_("server does not advertise changegroup version," |
|
111 | raise ValueError(_("server does not advertise changegroup version," | |
68 | " can't negotiate support for ellipsis nodes")) |
|
112 | " can't negotiate support for ellipsis nodes")) | |
69 |
|
113 | |||
70 | include = sorted(filter(bool, kwargs.get(r'includepats', []))) |
|
114 | include = sorted(filter(bool, kwargs.get(r'includepats', []))) | |
71 | exclude = sorted(filter(bool, kwargs.get(r'excludepats', []))) |
|
115 | exclude = sorted(filter(bool, kwargs.get(r'excludepats', []))) | |
72 | newmatch = narrowspec.match(repo.root, include=include, exclude=exclude) |
|
116 | newmatch = narrowspec.match(repo.root, include=include, exclude=exclude) | |
73 |
|
117 | |||
74 | depth = kwargs.get(r'depth', None) |
|
118 | depth = kwargs.get(r'depth', None) | |
75 | if depth is not None: |
|
119 | if depth is not None: | |
76 | depth = int(depth) |
|
120 | depth = int(depth) | |
77 | if depth < 1: |
|
121 | if depth < 1: | |
78 | raise error.Abort(_('depth must be positive, got %d') % depth) |
|
122 | raise error.Abort(_('depth must be positive, got %d') % depth) | |
79 |
|
123 | |||
80 | heads = set(heads or repo.heads()) |
|
124 | heads = set(heads or repo.heads()) | |
81 | common = set(common or [nullid]) |
|
125 | common = set(common or [nullid]) | |
82 | oldinclude = sorted(filter(bool, kwargs.get(r'oldincludepats', []))) |
|
126 | oldinclude = sorted(filter(bool, kwargs.get(r'oldincludepats', []))) | |
83 | oldexclude = sorted(filter(bool, kwargs.get(r'oldexcludepats', []))) |
|
127 | oldexclude = sorted(filter(bool, kwargs.get(r'oldexcludepats', []))) | |
84 | known = {bin(n) for n in kwargs.get(r'known', [])} |
|
128 | known = {bin(n) for n in kwargs.get(r'known', [])} | |
85 | if known and (oldinclude != include or oldexclude != exclude): |
|
129 | if known and (oldinclude != include or oldexclude != exclude): | |
86 | # Steps: |
|
130 | # Steps: | |
87 | # 1. Send kill for "$known & ::common" |
|
131 | # 1. Send kill for "$known & ::common" | |
88 | # |
|
132 | # | |
89 | # 2. Send changegroup for ::common |
|
133 | # 2. Send changegroup for ::common | |
90 | # |
|
134 | # | |
91 | # 3. Proceed. |
|
135 | # 3. Proceed. | |
92 | # |
|
136 | # | |
93 | # In the future, we can send kills for only the specific |
|
137 | # In the future, we can send kills for only the specific | |
94 | # nodes we know should go away or change shape, and then |
|
138 | # nodes we know should go away or change shape, and then | |
95 | # send a data stream that tells the client something like this: |
|
139 | # send a data stream that tells the client something like this: | |
96 | # |
|
140 | # | |
97 | # a) apply this changegroup |
|
141 | # a) apply this changegroup | |
98 | # b) apply nodes XXX, YYY, ZZZ that you already have |
|
142 | # b) apply nodes XXX, YYY, ZZZ that you already have | |
99 | # c) goto a |
|
143 | # c) goto a | |
100 | # |
|
144 | # | |
101 | # until they've built up the full new state. |
|
145 | # until they've built up the full new state. | |
102 | # Convert to revnums and intersect with "common". The client should |
|
146 | # Convert to revnums and intersect with "common". The client should | |
103 | # have made it a subset of "common" already, but let's be safe. |
|
147 | # have made it a subset of "common" already, but let's be safe. | |
104 | known = set(repo.revs("%ln & ::%ln", known, common)) |
|
148 | known = set(repo.revs("%ln & ::%ln", known, common)) | |
105 | # TODO: we could send only roots() of this set, and the |
|
149 | # TODO: we could send only roots() of this set, and the | |
106 | # list of nodes in common, and the client could work out |
|
150 | # list of nodes in common, and the client could work out | |
107 | # what to strip, instead of us explicitly sending every |
|
151 | # what to strip, instead of us explicitly sending every | |
108 | # single node. |
|
152 | # single node. | |
109 | deadrevs = known |
|
153 | deadrevs = known | |
110 | def genkills(): |
|
154 | def genkills(): | |
111 | for r in deadrevs: |
|
155 | for r in deadrevs: | |
112 | yield _KILLNODESIGNAL |
|
156 | yield _KILLNODESIGNAL | |
113 | yield repo.changelog.node(r) |
|
157 | yield repo.changelog.node(r) | |
114 | yield _DONESIGNAL |
|
158 | yield _DONESIGNAL | |
115 | bundler.newpart(_CHANGESPECPART, data=genkills()) |
|
159 | bundler.newpart(_CHANGESPECPART, data=genkills()) | |
116 | newvisit, newfull, newellipsis = exchange._computeellipsis( |
|
160 | newvisit, newfull, newellipsis = exchange._computeellipsis( | |
117 | repo, set(), common, known, newmatch) |
|
161 | repo, set(), common, known, newmatch) | |
118 | if newvisit: |
|
162 | if newvisit: | |
119 | packer = changegroup.getbundler(version, repo, |
|
163 | packer = changegroup.getbundler(version, repo, | |
120 | filematcher=newmatch, |
|
164 | filematcher=newmatch, | |
121 | ellipses=True, |
|
165 | ellipses=True, | |
122 | shallow=depth is not None, |
|
166 | shallow=depth is not None, | |
123 | ellipsisroots=newellipsis, |
|
167 | ellipsisroots=newellipsis, | |
124 | fullnodes=newfull) |
|
168 | fullnodes=newfull) | |
125 | cgdata = packer.generate(common, newvisit, False, source) |
|
169 | cgdata = packer.generate(common, newvisit, False, source) | |
126 |
|
170 | |||
127 | part = bundler.newpart('changegroup', data=cgdata) |
|
171 | part = bundler.newpart('changegroup', data=cgdata) | |
128 | part.addparam('version', version) |
|
172 | part.addparam('version', version) | |
129 | if 'treemanifest' in repo.requirements: |
|
173 | if 'treemanifest' in repo.requirements: | |
130 | part.addparam('treemanifest', '1') |
|
174 | part.addparam('treemanifest', '1') | |
131 |
|
175 | |||
132 | visitnodes, relevant_nodes, ellipsisroots = exchange._computeellipsis( |
|
176 | visitnodes, relevant_nodes, ellipsisroots = exchange._computeellipsis( | |
133 | repo, common, heads, set(), newmatch, depth=depth) |
|
177 | repo, common, heads, set(), newmatch, depth=depth) | |
134 |
|
178 | |||
135 | repo.ui.debug('Found %d relevant revs\n' % len(relevant_nodes)) |
|
179 | repo.ui.debug('Found %d relevant revs\n' % len(relevant_nodes)) | |
136 | if visitnodes: |
|
180 | if visitnodes: | |
137 | packer = changegroup.getbundler(version, repo, |
|
181 | packer = changegroup.getbundler(version, repo, | |
138 | filematcher=newmatch, |
|
182 | filematcher=newmatch, | |
139 | ellipses=True, |
|
183 | ellipses=True, | |
140 | shallow=depth is not None, |
|
184 | shallow=depth is not None, | |
141 | ellipsisroots=ellipsisroots, |
|
185 | ellipsisroots=ellipsisroots, | |
142 | fullnodes=relevant_nodes) |
|
186 | fullnodes=relevant_nodes) | |
143 | cgdata = packer.generate(common, visitnodes, False, source) |
|
187 | cgdata = packer.generate(common, visitnodes, False, source) | |
144 |
|
188 | |||
145 | part = bundler.newpart('changegroup', data=cgdata) |
|
189 | part = bundler.newpart('changegroup', data=cgdata) | |
146 | part.addparam('version', version) |
|
190 | part.addparam('version', version) | |
147 | if 'treemanifest' in repo.requirements: |
|
191 | if 'treemanifest' in repo.requirements: | |
148 | part.addparam('treemanifest', '1') |
|
192 | part.addparam('treemanifest', '1') | |
149 |
|
193 | |||
150 | @bundle2.parthandler(_SPECPART, (_SPECPART_INCLUDE, _SPECPART_EXCLUDE)) |
|
194 | @bundle2.parthandler(_SPECPART, (_SPECPART_INCLUDE, _SPECPART_EXCLUDE)) | |
151 | def _handlechangespec_2(op, inpart): |
|
195 | def _handlechangespec_2(op, inpart): | |
152 | includepats = set(inpart.params.get(_SPECPART_INCLUDE, '').splitlines()) |
|
196 | includepats = set(inpart.params.get(_SPECPART_INCLUDE, '').splitlines()) | |
153 | excludepats = set(inpart.params.get(_SPECPART_EXCLUDE, '').splitlines()) |
|
197 | excludepats = set(inpart.params.get(_SPECPART_EXCLUDE, '').splitlines()) | |
154 | if not repository.NARROW_REQUIREMENT in op.repo.requirements: |
|
198 | if not repository.NARROW_REQUIREMENT in op.repo.requirements: | |
155 | op.repo.requirements.add(repository.NARROW_REQUIREMENT) |
|
199 | op.repo.requirements.add(repository.NARROW_REQUIREMENT) | |
156 | op.repo._writerequirements() |
|
200 | op.repo._writerequirements() | |
157 | op.repo.setnarrowpats(includepats, excludepats) |
|
201 | op.repo.setnarrowpats(includepats, excludepats) | |
158 |
|
202 | |||
159 | @bundle2.parthandler(_CHANGESPECPART) |
|
203 | @bundle2.parthandler(_CHANGESPECPART) | |
160 | def _handlechangespec(op, inpart): |
|
204 | def _handlechangespec(op, inpart): | |
161 | repo = op.repo |
|
205 | repo = op.repo | |
162 | cl = repo.changelog |
|
206 | cl = repo.changelog | |
163 |
|
207 | |||
164 | # changesets which need to be stripped entirely. either they're no longer |
|
208 | # changesets which need to be stripped entirely. either they're no longer | |
165 | # needed in the new narrow spec, or the server is sending a replacement |
|
209 | # needed in the new narrow spec, or the server is sending a replacement | |
166 | # in the changegroup part. |
|
210 | # in the changegroup part. | |
167 | clkills = set() |
|
211 | clkills = set() | |
168 |
|
212 | |||
169 | # A changespec part contains all the updates to ellipsis nodes |
|
213 | # A changespec part contains all the updates to ellipsis nodes | |
170 | # that will happen as a result of widening or narrowing a |
|
214 | # that will happen as a result of widening or narrowing a | |
171 | # repo. All the changes that this block encounters are ellipsis |
|
215 | # repo. All the changes that this block encounters are ellipsis | |
172 | # nodes or flags to kill an existing ellipsis. |
|
216 | # nodes or flags to kill an existing ellipsis. | |
173 | chunksignal = changegroup.readexactly(inpart, 4) |
|
217 | chunksignal = changegroup.readexactly(inpart, 4) | |
174 | while chunksignal != _DONESIGNAL: |
|
218 | while chunksignal != _DONESIGNAL: | |
175 | if chunksignal == _KILLNODESIGNAL: |
|
219 | if chunksignal == _KILLNODESIGNAL: | |
176 | # a node used to be an ellipsis but isn't anymore |
|
220 | # a node used to be an ellipsis but isn't anymore | |
177 | ck = changegroup.readexactly(inpart, 20) |
|
221 | ck = changegroup.readexactly(inpart, 20) | |
178 | if cl.hasnode(ck): |
|
222 | if cl.hasnode(ck): | |
179 | clkills.add(ck) |
|
223 | clkills.add(ck) | |
180 | else: |
|
224 | else: | |
181 | raise error.Abort( |
|
225 | raise error.Abort( | |
182 | _('unexpected changespec node chunk type: %s') % chunksignal) |
|
226 | _('unexpected changespec node chunk type: %s') % chunksignal) | |
183 | chunksignal = changegroup.readexactly(inpart, 4) |
|
227 | chunksignal = changegroup.readexactly(inpart, 4) | |
184 |
|
228 | |||
185 | if clkills: |
|
229 | if clkills: | |
186 | # preserve bookmarks that repair.strip() would otherwise strip |
|
230 | # preserve bookmarks that repair.strip() would otherwise strip | |
187 | bmstore = repo._bookmarks |
|
231 | bmstore = repo._bookmarks | |
188 | class dummybmstore(dict): |
|
232 | class dummybmstore(dict): | |
189 | def applychanges(self, repo, tr, changes): |
|
233 | def applychanges(self, repo, tr, changes): | |
190 | pass |
|
234 | pass | |
191 | def recordchange(self, tr): # legacy version |
|
235 | def recordchange(self, tr): # legacy version | |
192 | pass |
|
236 | pass | |
193 | repo._bookmarks = dummybmstore() |
|
237 | repo._bookmarks = dummybmstore() | |
194 | chgrpfile = repair.strip(op.ui, repo, list(clkills), backup=True, |
|
238 | chgrpfile = repair.strip(op.ui, repo, list(clkills), backup=True, | |
195 | topic='widen') |
|
239 | topic='widen') | |
196 | repo._bookmarks = bmstore |
|
240 | repo._bookmarks = bmstore | |
197 | if chgrpfile: |
|
241 | if chgrpfile: | |
198 | op._widen_uninterr = repo.ui.uninterruptable() |
|
242 | op._widen_uninterr = repo.ui.uninterruptable() | |
199 | op._widen_uninterr.__enter__() |
|
243 | op._widen_uninterr.__enter__() | |
200 | # presence of _widen_bundle attribute activates widen handler later |
|
244 | # presence of _widen_bundle attribute activates widen handler later | |
201 | op._widen_bundle = chgrpfile |
|
245 | op._widen_bundle = chgrpfile | |
202 | # Set the new narrowspec if we're widening. The setnewnarrowpats() method |
|
246 | # Set the new narrowspec if we're widening. The setnewnarrowpats() method | |
203 | # will currently always be there when using the core+narrowhg server, but |
|
247 | # will currently always be there when using the core+narrowhg server, but | |
204 | # other servers may include a changespec part even when not widening (e.g. |
|
248 | # other servers may include a changespec part even when not widening (e.g. | |
205 | # because we're deepening a shallow repo). |
|
249 | # because we're deepening a shallow repo). | |
206 | if util.safehasattr(repo, 'setnewnarrowpats'): |
|
250 | if util.safehasattr(repo, 'setnewnarrowpats'): | |
207 | repo.setnewnarrowpats() |
|
251 | repo.setnewnarrowpats() | |
208 |
|
252 | |||
209 | def handlechangegroup_widen(op, inpart): |
|
253 | def handlechangegroup_widen(op, inpart): | |
210 | """Changegroup exchange handler which restores temporarily-stripped nodes""" |
|
254 | """Changegroup exchange handler which restores temporarily-stripped nodes""" | |
211 | # We saved a bundle with stripped node data we must now restore. |
|
255 | # We saved a bundle with stripped node data we must now restore. | |
212 | # This approach is based on mercurial/repair.py@6ee26a53c111. |
|
256 | # This approach is based on mercurial/repair.py@6ee26a53c111. | |
213 | repo = op.repo |
|
257 | repo = op.repo | |
214 | ui = op.ui |
|
258 | ui = op.ui | |
215 |
|
259 | |||
216 | chgrpfile = op._widen_bundle |
|
260 | chgrpfile = op._widen_bundle | |
217 | del op._widen_bundle |
|
261 | del op._widen_bundle | |
218 | vfs = repo.vfs |
|
262 | vfs = repo.vfs | |
219 |
|
263 | |||
220 | ui.note(_("adding branch\n")) |
|
264 | ui.note(_("adding branch\n")) | |
221 | f = vfs.open(chgrpfile, "rb") |
|
265 | f = vfs.open(chgrpfile, "rb") | |
222 | try: |
|
266 | try: | |
223 | gen = exchange.readbundle(ui, f, chgrpfile, vfs) |
|
267 | gen = exchange.readbundle(ui, f, chgrpfile, vfs) | |
224 | if not ui.verbose: |
|
268 | if not ui.verbose: | |
225 | # silence internal shuffling chatter |
|
269 | # silence internal shuffling chatter | |
226 | ui.pushbuffer() |
|
270 | ui.pushbuffer() | |
227 | if isinstance(gen, bundle2.unbundle20): |
|
271 | if isinstance(gen, bundle2.unbundle20): | |
228 | with repo.transaction('strip') as tr: |
|
272 | with repo.transaction('strip') as tr: | |
229 | bundle2.processbundle(repo, gen, lambda: tr) |
|
273 | bundle2.processbundle(repo, gen, lambda: tr) | |
230 | else: |
|
274 | else: | |
231 | gen.apply(repo, 'strip', 'bundle:' + vfs.join(chgrpfile), True) |
|
275 | gen.apply(repo, 'strip', 'bundle:' + vfs.join(chgrpfile), True) | |
232 | if not ui.verbose: |
|
276 | if not ui.verbose: | |
233 | ui.popbuffer() |
|
277 | ui.popbuffer() | |
234 | finally: |
|
278 | finally: | |
235 | f.close() |
|
279 | f.close() | |
236 |
|
280 | |||
237 | # remove undo files |
|
281 | # remove undo files | |
238 | for undovfs, undofile in repo.undofiles(): |
|
282 | for undovfs, undofile in repo.undofiles(): | |
239 | try: |
|
283 | try: | |
240 | undovfs.unlink(undofile) |
|
284 | undovfs.unlink(undofile) | |
241 | except OSError as e: |
|
285 | except OSError as e: | |
242 | if e.errno != errno.ENOENT: |
|
286 | if e.errno != errno.ENOENT: | |
243 | ui.warn(_('error removing %s: %s\n') % |
|
287 | ui.warn(_('error removing %s: %s\n') % | |
244 | (undovfs.join(undofile), stringutil.forcebytestr(e))) |
|
288 | (undovfs.join(undofile), stringutil.forcebytestr(e))) | |
245 |
|
289 | |||
246 | # Remove partial backup only if there were no exceptions |
|
290 | # Remove partial backup only if there were no exceptions | |
247 | op._widen_uninterr.__exit__(None, None, None) |
|
291 | op._widen_uninterr.__exit__(None, None, None) | |
248 | vfs.unlink(chgrpfile) |
|
292 | vfs.unlink(chgrpfile) | |
249 |
|
293 | |||
250 | def setup(): |
|
294 | def setup(): | |
251 | """Enable narrow repo support in bundle2-related extension points.""" |
|
295 | """Enable narrow repo support in bundle2-related extension points.""" | |
252 | extensions.wrapfunction(bundle2, 'getrepocaps', getrepocaps_narrow) |
|
296 | extensions.wrapfunction(bundle2, 'getrepocaps', getrepocaps_narrow) | |
253 |
|
297 | |||
254 | getbundleargs = wireprototypes.GETBUNDLE_ARGUMENTS |
|
298 | getbundleargs = wireprototypes.GETBUNDLE_ARGUMENTS | |
255 |
|
299 | |||
256 | getbundleargs['narrow'] = 'boolean' |
|
300 | getbundleargs['narrow'] = 'boolean' | |
|
301 | getbundleargs['widen'] = 'boolean' | |||
257 | getbundleargs['depth'] = 'plain' |
|
302 | getbundleargs['depth'] = 'plain' | |
258 | getbundleargs['oldincludepats'] = 'csv' |
|
303 | getbundleargs['oldincludepats'] = 'csv' | |
259 | getbundleargs['oldexcludepats'] = 'csv' |
|
304 | getbundleargs['oldexcludepats'] = 'csv' | |
260 | getbundleargs['includepats'] = 'csv' |
|
305 | getbundleargs['includepats'] = 'csv' | |
261 | getbundleargs['excludepats'] = 'csv' |
|
306 | getbundleargs['excludepats'] = 'csv' | |
262 | getbundleargs['known'] = 'csv' |
|
307 | getbundleargs['known'] = 'csv' | |
263 |
|
308 | |||
264 | # Extend changegroup serving to handle requests from narrow clients. |
|
309 | # Extend changegroup serving to handle requests from narrow clients. | |
265 | origcgfn = exchange.getbundle2partsmapping['changegroup'] |
|
310 | origcgfn = exchange.getbundle2partsmapping['changegroup'] | |
266 | def wrappedcgfn(*args, **kwargs): |
|
311 | def wrappedcgfn(*args, **kwargs): | |
267 | repo = args[1] |
|
312 | repo = args[1] | |
268 | if repo.ui.has_section(_NARROWACL_SECTION): |
|
313 | if repo.ui.has_section(_NARROWACL_SECTION): | |
269 | kwargs = exchange.applynarrowacl(repo, kwargs) |
|
314 | kwargs = exchange.applynarrowacl(repo, kwargs) | |
270 |
|
315 | |||
271 | if (kwargs.get(r'narrow', False) and |
|
316 | if (kwargs.get(r'narrow', False) and | |
272 | repo.ui.configbool('experimental', 'narrowservebrokenellipses')): |
|
317 | repo.ui.configbool('experimental', 'narrowservebrokenellipses')): | |
273 | getbundlechangegrouppart_narrow(*args, **kwargs) |
|
318 | getbundlechangegrouppart_narrow(*args, **kwargs) | |
|
319 | elif kwargs.get(r'widen', False) and kwargs.get(r'narrow', False): | |||
|
320 | getbundlechangegrouppart_nonellipsis(*args, **kwargs) | |||
274 | else: |
|
321 | else: | |
275 | origcgfn(*args, **kwargs) |
|
322 | origcgfn(*args, **kwargs) | |
276 | exchange.getbundle2partsmapping['changegroup'] = wrappedcgfn |
|
323 | exchange.getbundle2partsmapping['changegroup'] = wrappedcgfn | |
277 |
|
324 | |||
278 | # Extend changegroup receiver so client can fixup after widen requests. |
|
325 | # Extend changegroup receiver so client can fixup after widen requests. | |
279 | origcghandler = bundle2.parthandlermapping['changegroup'] |
|
326 | origcghandler = bundle2.parthandlermapping['changegroup'] | |
280 | def wrappedcghandler(op, inpart): |
|
327 | def wrappedcghandler(op, inpart): | |
281 | origcghandler(op, inpart) |
|
328 | origcghandler(op, inpart) | |
282 | if util.safehasattr(op, '_widen_bundle'): |
|
329 | if util.safehasattr(op, '_widen_bundle'): | |
283 | handlechangegroup_widen(op, inpart) |
|
330 | handlechangegroup_widen(op, inpart) | |
284 | wrappedcghandler.params = origcghandler.params |
|
331 | wrappedcghandler.params = origcghandler.params | |
285 | bundle2.parthandlermapping['changegroup'] = wrappedcghandler |
|
332 | bundle2.parthandlermapping['changegroup'] = wrappedcghandler |
@@ -1,428 +1,429 b'' | |||||
1 | # narrowcommands.py - command modifications for narrowhg extension |
|
1 | # narrowcommands.py - command modifications for narrowhg extension | |
2 | # |
|
2 | # | |
3 | # Copyright 2017 Google, Inc. |
|
3 | # Copyright 2017 Google, Inc. | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 | from __future__ import absolute_import |
|
7 | from __future__ import absolute_import | |
8 |
|
8 | |||
9 | import itertools |
|
9 | import itertools | |
10 | import os |
|
10 | import os | |
11 |
|
11 | |||
12 | from mercurial.i18n import _ |
|
12 | from mercurial.i18n import _ | |
13 | from mercurial import ( |
|
13 | from mercurial import ( | |
14 | cmdutil, |
|
14 | cmdutil, | |
15 | commands, |
|
15 | commands, | |
16 | discovery, |
|
16 | discovery, | |
17 | error, |
|
17 | error, | |
18 | exchange, |
|
18 | exchange, | |
19 | extensions, |
|
19 | extensions, | |
20 | hg, |
|
20 | hg, | |
21 | merge, |
|
21 | merge, | |
22 | narrowspec, |
|
22 | narrowspec, | |
23 | node, |
|
23 | node, | |
24 | pycompat, |
|
24 | pycompat, | |
25 | registrar, |
|
25 | registrar, | |
26 | repair, |
|
26 | repair, | |
27 | repository, |
|
27 | repository, | |
28 | repoview, |
|
28 | repoview, | |
29 | sparse, |
|
29 | sparse, | |
30 | util, |
|
30 | util, | |
31 | ) |
|
31 | ) | |
32 |
|
32 | |||
33 | from . import ( |
|
33 | from . import ( | |
34 | narrowbundle2, |
|
34 | narrowbundle2, | |
35 | ) |
|
35 | ) | |
36 |
|
36 | |||
37 | table = {} |
|
37 | table = {} | |
38 | command = registrar.command(table) |
|
38 | command = registrar.command(table) | |
39 |
|
39 | |||
40 | def setup(): |
|
40 | def setup(): | |
41 | """Wraps user-facing mercurial commands with narrow-aware versions.""" |
|
41 | """Wraps user-facing mercurial commands with narrow-aware versions.""" | |
42 |
|
42 | |||
43 | entry = extensions.wrapcommand(commands.table, 'clone', clonenarrowcmd) |
|
43 | entry = extensions.wrapcommand(commands.table, 'clone', clonenarrowcmd) | |
44 | entry[1].append(('', 'narrow', None, |
|
44 | entry[1].append(('', 'narrow', None, | |
45 | _("create a narrow clone of select files"))) |
|
45 | _("create a narrow clone of select files"))) | |
46 | entry[1].append(('', 'depth', '', |
|
46 | entry[1].append(('', 'depth', '', | |
47 | _("limit the history fetched by distance from heads"))) |
|
47 | _("limit the history fetched by distance from heads"))) | |
48 | entry[1].append(('', 'narrowspec', '', |
|
48 | entry[1].append(('', 'narrowspec', '', | |
49 | _("read narrowspecs from file"))) |
|
49 | _("read narrowspecs from file"))) | |
50 | # TODO(durin42): unify sparse/narrow --include/--exclude logic a bit |
|
50 | # TODO(durin42): unify sparse/narrow --include/--exclude logic a bit | |
51 | if 'sparse' not in extensions.enabled(): |
|
51 | if 'sparse' not in extensions.enabled(): | |
52 | entry[1].append(('', 'include', [], |
|
52 | entry[1].append(('', 'include', [], | |
53 | _("specifically fetch this file/directory"))) |
|
53 | _("specifically fetch this file/directory"))) | |
54 | entry[1].append( |
|
54 | entry[1].append( | |
55 | ('', 'exclude', [], |
|
55 | ('', 'exclude', [], | |
56 | _("do not fetch this file/directory, even if included"))) |
|
56 | _("do not fetch this file/directory, even if included"))) | |
57 |
|
57 | |||
58 | entry = extensions.wrapcommand(commands.table, 'pull', pullnarrowcmd) |
|
58 | entry = extensions.wrapcommand(commands.table, 'pull', pullnarrowcmd) | |
59 | entry[1].append(('', 'depth', '', |
|
59 | entry[1].append(('', 'depth', '', | |
60 | _("limit the history fetched by distance from heads"))) |
|
60 | _("limit the history fetched by distance from heads"))) | |
61 |
|
61 | |||
62 | extensions.wrapcommand(commands.table, 'archive', archivenarrowcmd) |
|
62 | extensions.wrapcommand(commands.table, 'archive', archivenarrowcmd) | |
63 |
|
63 | |||
64 | def expandpull(pullop, includepats, excludepats): |
|
64 | def expandpull(pullop, includepats, excludepats): | |
65 | if not narrowspec.needsexpansion(includepats): |
|
65 | if not narrowspec.needsexpansion(includepats): | |
66 | return includepats, excludepats |
|
66 | return includepats, excludepats | |
67 |
|
67 | |||
68 | heads = pullop.heads or pullop.rheads |
|
68 | heads = pullop.heads or pullop.rheads | |
69 | includepats, excludepats = pullop.remote.expandnarrow( |
|
69 | includepats, excludepats = pullop.remote.expandnarrow( | |
70 | includepats, excludepats, heads) |
|
70 | includepats, excludepats, heads) | |
71 | pullop.repo.ui.debug('Expanded narrowspec to inc=%s, exc=%s\n' % ( |
|
71 | pullop.repo.ui.debug('Expanded narrowspec to inc=%s, exc=%s\n' % ( | |
72 | includepats, excludepats)) |
|
72 | includepats, excludepats)) | |
73 | return set(includepats), set(excludepats) |
|
73 | return set(includepats), set(excludepats) | |
74 |
|
74 | |||
75 | def clonenarrowcmd(orig, ui, repo, *args, **opts): |
|
75 | def clonenarrowcmd(orig, ui, repo, *args, **opts): | |
76 | """Wraps clone command, so 'hg clone' first wraps localrepo.clone().""" |
|
76 | """Wraps clone command, so 'hg clone' first wraps localrepo.clone().""" | |
77 | opts = pycompat.byteskwargs(opts) |
|
77 | opts = pycompat.byteskwargs(opts) | |
78 | wrappedextraprepare = util.nullcontextmanager() |
|
78 | wrappedextraprepare = util.nullcontextmanager() | |
79 | opts_narrow = opts['narrow'] |
|
79 | opts_narrow = opts['narrow'] | |
80 | narrowspecfile = opts['narrowspec'] |
|
80 | narrowspecfile = opts['narrowspec'] | |
81 |
|
81 | |||
82 | if narrowspecfile: |
|
82 | if narrowspecfile: | |
83 | filepath = os.path.join(pycompat.getcwd(), narrowspecfile) |
|
83 | filepath = os.path.join(pycompat.getcwd(), narrowspecfile) | |
84 | ui.status(_("reading narrowspec from '%s'\n") % filepath) |
|
84 | ui.status(_("reading narrowspec from '%s'\n") % filepath) | |
85 | try: |
|
85 | try: | |
86 | fp = open(filepath, 'rb') |
|
86 | fp = open(filepath, 'rb') | |
87 | except IOError: |
|
87 | except IOError: | |
88 | raise error.Abort(_("file '%s' not found") % filepath) |
|
88 | raise error.Abort(_("file '%s' not found") % filepath) | |
89 |
|
89 | |||
90 | includes, excludes, profiles = sparse.parseconfig(ui, fp.read(), |
|
90 | includes, excludes, profiles = sparse.parseconfig(ui, fp.read(), | |
91 | 'narrow') |
|
91 | 'narrow') | |
92 | if profiles: |
|
92 | if profiles: | |
93 | raise error.Abort(_("cannot specify other files using '%include' in" |
|
93 | raise error.Abort(_("cannot specify other files using '%include' in" | |
94 | " narrowspec")) |
|
94 | " narrowspec")) | |
95 |
|
95 | |||
96 | # narrowspec is passed so we should assume that user wants narrow clone |
|
96 | # narrowspec is passed so we should assume that user wants narrow clone | |
97 | opts_narrow = True |
|
97 | opts_narrow = True | |
98 | opts['include'].extend(includes) |
|
98 | opts['include'].extend(includes) | |
99 | opts['exclude'].extend(excludes) |
|
99 | opts['exclude'].extend(excludes) | |
100 |
|
100 | |||
101 | if opts_narrow: |
|
101 | if opts_narrow: | |
102 | def pullbundle2extraprepare_widen(orig, pullop, kwargs): |
|
102 | def pullbundle2extraprepare_widen(orig, pullop, kwargs): | |
103 | # Create narrow spec patterns from clone flags |
|
103 | # Create narrow spec patterns from clone flags | |
104 | includepats = narrowspec.parsepatterns(opts['include']) |
|
104 | includepats = narrowspec.parsepatterns(opts['include']) | |
105 | excludepats = narrowspec.parsepatterns(opts['exclude']) |
|
105 | excludepats = narrowspec.parsepatterns(opts['exclude']) | |
106 |
|
106 | |||
107 | # If necessary, ask the server to expand the narrowspec. |
|
107 | # If necessary, ask the server to expand the narrowspec. | |
108 | includepats, excludepats = expandpull( |
|
108 | includepats, excludepats = expandpull( | |
109 | pullop, includepats, excludepats) |
|
109 | pullop, includepats, excludepats) | |
110 |
|
110 | |||
111 | if not includepats and excludepats: |
|
111 | if not includepats and excludepats: | |
112 | # If nothing was included, we assume the user meant to include |
|
112 | # If nothing was included, we assume the user meant to include | |
113 | # everything, except what they asked to exclude. |
|
113 | # everything, except what they asked to exclude. | |
114 | includepats = {'path:.'} |
|
114 | includepats = {'path:.'} | |
115 |
|
115 | |||
116 | pullop.repo.setnarrowpats(includepats, excludepats) |
|
116 | pullop.repo.setnarrowpats(includepats, excludepats) | |
117 |
|
117 | |||
118 | # This will populate 'includepats' etc with the values from the |
|
118 | # This will populate 'includepats' etc with the values from the | |
119 | # narrowspec we just saved. |
|
119 | # narrowspec we just saved. | |
120 | orig(pullop, kwargs) |
|
120 | orig(pullop, kwargs) | |
121 |
|
121 | |||
122 | if opts.get('depth'): |
|
122 | if opts.get('depth'): | |
123 | kwargs['depth'] = opts['depth'] |
|
123 | kwargs['depth'] = opts['depth'] | |
124 | wrappedextraprepare = extensions.wrappedfunction(exchange, |
|
124 | wrappedextraprepare = extensions.wrappedfunction(exchange, | |
125 | '_pullbundle2extraprepare', pullbundle2extraprepare_widen) |
|
125 | '_pullbundle2extraprepare', pullbundle2extraprepare_widen) | |
126 |
|
126 | |||
127 | def pullnarrow(orig, repo, *args, **kwargs): |
|
127 | def pullnarrow(orig, repo, *args, **kwargs): | |
128 | if opts_narrow: |
|
128 | if opts_narrow: | |
129 | repo.requirements.add(repository.NARROW_REQUIREMENT) |
|
129 | repo.requirements.add(repository.NARROW_REQUIREMENT) | |
130 | repo._writerequirements() |
|
130 | repo._writerequirements() | |
131 |
|
131 | |||
132 | return orig(repo, *args, **kwargs) |
|
132 | return orig(repo, *args, **kwargs) | |
133 |
|
133 | |||
134 | wrappedpull = extensions.wrappedfunction(exchange, 'pull', pullnarrow) |
|
134 | wrappedpull = extensions.wrappedfunction(exchange, 'pull', pullnarrow) | |
135 |
|
135 | |||
136 | with wrappedextraprepare, wrappedpull: |
|
136 | with wrappedextraprepare, wrappedpull: | |
137 | return orig(ui, repo, *args, **pycompat.strkwargs(opts)) |
|
137 | return orig(ui, repo, *args, **pycompat.strkwargs(opts)) | |
138 |
|
138 | |||
139 | def pullnarrowcmd(orig, ui, repo, *args, **opts): |
|
139 | def pullnarrowcmd(orig, ui, repo, *args, **opts): | |
140 | """Wraps pull command to allow modifying narrow spec.""" |
|
140 | """Wraps pull command to allow modifying narrow spec.""" | |
141 | wrappedextraprepare = util.nullcontextmanager() |
|
141 | wrappedextraprepare = util.nullcontextmanager() | |
142 | if repository.NARROW_REQUIREMENT in repo.requirements: |
|
142 | if repository.NARROW_REQUIREMENT in repo.requirements: | |
143 |
|
143 | |||
144 | def pullbundle2extraprepare_widen(orig, pullop, kwargs): |
|
144 | def pullbundle2extraprepare_widen(orig, pullop, kwargs): | |
145 | orig(pullop, kwargs) |
|
145 | orig(pullop, kwargs) | |
146 | if opts.get(r'depth'): |
|
146 | if opts.get(r'depth'): | |
147 | kwargs['depth'] = opts[r'depth'] |
|
147 | kwargs['depth'] = opts[r'depth'] | |
148 | wrappedextraprepare = extensions.wrappedfunction(exchange, |
|
148 | wrappedextraprepare = extensions.wrappedfunction(exchange, | |
149 | '_pullbundle2extraprepare', pullbundle2extraprepare_widen) |
|
149 | '_pullbundle2extraprepare', pullbundle2extraprepare_widen) | |
150 |
|
150 | |||
151 | with wrappedextraprepare: |
|
151 | with wrappedextraprepare: | |
152 | return orig(ui, repo, *args, **opts) |
|
152 | return orig(ui, repo, *args, **opts) | |
153 |
|
153 | |||
154 | def archivenarrowcmd(orig, ui, repo, *args, **opts): |
|
154 | def archivenarrowcmd(orig, ui, repo, *args, **opts): | |
155 | """Wraps archive command to narrow the default includes.""" |
|
155 | """Wraps archive command to narrow the default includes.""" | |
156 | if repository.NARROW_REQUIREMENT in repo.requirements: |
|
156 | if repository.NARROW_REQUIREMENT in repo.requirements: | |
157 | repo_includes, repo_excludes = repo.narrowpats |
|
157 | repo_includes, repo_excludes = repo.narrowpats | |
158 | includes = set(opts.get(r'include', [])) |
|
158 | includes = set(opts.get(r'include', [])) | |
159 | excludes = set(opts.get(r'exclude', [])) |
|
159 | excludes = set(opts.get(r'exclude', [])) | |
160 | includes, excludes, unused_invalid = narrowspec.restrictpatterns( |
|
160 | includes, excludes, unused_invalid = narrowspec.restrictpatterns( | |
161 | includes, excludes, repo_includes, repo_excludes) |
|
161 | includes, excludes, repo_includes, repo_excludes) | |
162 | if includes: |
|
162 | if includes: | |
163 | opts[r'include'] = includes |
|
163 | opts[r'include'] = includes | |
164 | if excludes: |
|
164 | if excludes: | |
165 | opts[r'exclude'] = excludes |
|
165 | opts[r'exclude'] = excludes | |
166 | return orig(ui, repo, *args, **opts) |
|
166 | return orig(ui, repo, *args, **opts) | |
167 |
|
167 | |||
168 | def pullbundle2extraprepare(orig, pullop, kwargs): |
|
168 | def pullbundle2extraprepare(orig, pullop, kwargs): | |
169 | repo = pullop.repo |
|
169 | repo = pullop.repo | |
170 | if repository.NARROW_REQUIREMENT not in repo.requirements: |
|
170 | if repository.NARROW_REQUIREMENT not in repo.requirements: | |
171 | return orig(pullop, kwargs) |
|
171 | return orig(pullop, kwargs) | |
172 |
|
172 | |||
173 | if narrowbundle2.NARROWCAP not in pullop.remotebundle2caps: |
|
173 | if narrowbundle2.NARROWCAP not in pullop.remotebundle2caps: | |
174 | raise error.Abort(_("server doesn't support narrow clones")) |
|
174 | raise error.Abort(_("server doesn't support narrow clones")) | |
175 | orig(pullop, kwargs) |
|
175 | orig(pullop, kwargs) | |
176 | kwargs['narrow'] = True |
|
176 | kwargs['narrow'] = True | |
177 | include, exclude = repo.narrowpats |
|
177 | include, exclude = repo.narrowpats | |
178 | kwargs['oldincludepats'] = include |
|
178 | kwargs['oldincludepats'] = include | |
179 | kwargs['oldexcludepats'] = exclude |
|
179 | kwargs['oldexcludepats'] = exclude | |
180 | kwargs['includepats'] = include |
|
180 | kwargs['includepats'] = include | |
181 | kwargs['excludepats'] = exclude |
|
181 | kwargs['excludepats'] = exclude | |
182 | kwargs['known'] = [node.hex(ctx.node()) for ctx in |
|
182 | kwargs['known'] = [node.hex(ctx.node()) for ctx in | |
183 | repo.set('::%ln', pullop.common) |
|
183 | repo.set('::%ln', pullop.common) | |
184 | if ctx.node() != node.nullid] |
|
184 | if ctx.node() != node.nullid] | |
185 | if not kwargs['known']: |
|
185 | if not kwargs['known']: | |
186 | # Mercurial serialized an empty list as '' and deserializes it as |
|
186 | # Mercurial serialized an empty list as '' and deserializes it as | |
187 | # [''], so delete it instead to avoid handling the empty string on the |
|
187 | # [''], so delete it instead to avoid handling the empty string on the | |
188 | # server. |
|
188 | # server. | |
189 | del kwargs['known'] |
|
189 | del kwargs['known'] | |
190 |
|
190 | |||
191 | extensions.wrapfunction(exchange,'_pullbundle2extraprepare', |
|
191 | extensions.wrapfunction(exchange,'_pullbundle2extraprepare', | |
192 | pullbundle2extraprepare) |
|
192 | pullbundle2extraprepare) | |
193 |
|
193 | |||
194 | def _narrow(ui, repo, remote, commoninc, oldincludes, oldexcludes, |
|
194 | def _narrow(ui, repo, remote, commoninc, oldincludes, oldexcludes, | |
195 | newincludes, newexcludes, force): |
|
195 | newincludes, newexcludes, force): | |
196 | oldmatch = narrowspec.match(repo.root, oldincludes, oldexcludes) |
|
196 | oldmatch = narrowspec.match(repo.root, oldincludes, oldexcludes) | |
197 | newmatch = narrowspec.match(repo.root, newincludes, newexcludes) |
|
197 | newmatch = narrowspec.match(repo.root, newincludes, newexcludes) | |
198 |
|
198 | |||
199 | # This is essentially doing "hg outgoing" to find all local-only |
|
199 | # This is essentially doing "hg outgoing" to find all local-only | |
200 | # commits. We will then check that the local-only commits don't |
|
200 | # commits. We will then check that the local-only commits don't | |
201 | # have any changes to files that will be untracked. |
|
201 | # have any changes to files that will be untracked. | |
202 | unfi = repo.unfiltered() |
|
202 | unfi = repo.unfiltered() | |
203 | outgoing = discovery.findcommonoutgoing(unfi, remote, |
|
203 | outgoing = discovery.findcommonoutgoing(unfi, remote, | |
204 | commoninc=commoninc) |
|
204 | commoninc=commoninc) | |
205 | ui.status(_('looking for local changes to affected paths\n')) |
|
205 | ui.status(_('looking for local changes to affected paths\n')) | |
206 | localnodes = [] |
|
206 | localnodes = [] | |
207 | for n in itertools.chain(outgoing.missing, outgoing.excluded): |
|
207 | for n in itertools.chain(outgoing.missing, outgoing.excluded): | |
208 | if any(oldmatch(f) and not newmatch(f) for f in unfi[n].files()): |
|
208 | if any(oldmatch(f) and not newmatch(f) for f in unfi[n].files()): | |
209 | localnodes.append(n) |
|
209 | localnodes.append(n) | |
210 | revstostrip = unfi.revs('descendants(%ln)', localnodes) |
|
210 | revstostrip = unfi.revs('descendants(%ln)', localnodes) | |
211 | hiddenrevs = repoview.filterrevs(repo, 'visible') |
|
211 | hiddenrevs = repoview.filterrevs(repo, 'visible') | |
212 | visibletostrip = list(repo.changelog.node(r) |
|
212 | visibletostrip = list(repo.changelog.node(r) | |
213 | for r in (revstostrip - hiddenrevs)) |
|
213 | for r in (revstostrip - hiddenrevs)) | |
214 | if visibletostrip: |
|
214 | if visibletostrip: | |
215 | ui.status(_('The following changeset(s) or their ancestors have ' |
|
215 | ui.status(_('The following changeset(s) or their ancestors have ' | |
216 | 'local changes not on the remote:\n')) |
|
216 | 'local changes not on the remote:\n')) | |
217 | maxnodes = 10 |
|
217 | maxnodes = 10 | |
218 | if ui.verbose or len(visibletostrip) <= maxnodes: |
|
218 | if ui.verbose or len(visibletostrip) <= maxnodes: | |
219 | for n in visibletostrip: |
|
219 | for n in visibletostrip: | |
220 | ui.status('%s\n' % node.short(n)) |
|
220 | ui.status('%s\n' % node.short(n)) | |
221 | else: |
|
221 | else: | |
222 | for n in visibletostrip[:maxnodes]: |
|
222 | for n in visibletostrip[:maxnodes]: | |
223 | ui.status('%s\n' % node.short(n)) |
|
223 | ui.status('%s\n' % node.short(n)) | |
224 | ui.status(_('...and %d more, use --verbose to list all\n') % |
|
224 | ui.status(_('...and %d more, use --verbose to list all\n') % | |
225 | (len(visibletostrip) - maxnodes)) |
|
225 | (len(visibletostrip) - maxnodes)) | |
226 | if not force: |
|
226 | if not force: | |
227 | raise error.Abort(_('local changes found'), |
|
227 | raise error.Abort(_('local changes found'), | |
228 | hint=_('use --force-delete-local-changes to ' |
|
228 | hint=_('use --force-delete-local-changes to ' | |
229 | 'ignore')) |
|
229 | 'ignore')) | |
230 |
|
230 | |||
231 | with ui.uninterruptable(): |
|
231 | with ui.uninterruptable(): | |
232 | if revstostrip: |
|
232 | if revstostrip: | |
233 | tostrip = [unfi.changelog.node(r) for r in revstostrip] |
|
233 | tostrip = [unfi.changelog.node(r) for r in revstostrip] | |
234 | if repo['.'].node() in tostrip: |
|
234 | if repo['.'].node() in tostrip: | |
235 | # stripping working copy, so move to a different commit first |
|
235 | # stripping working copy, so move to a different commit first | |
236 | urev = max(repo.revs('(::%n) - %ln + null', |
|
236 | urev = max(repo.revs('(::%n) - %ln + null', | |
237 | repo['.'].node(), visibletostrip)) |
|
237 | repo['.'].node(), visibletostrip)) | |
238 | hg.clean(repo, urev) |
|
238 | hg.clean(repo, urev) | |
239 | repair.strip(ui, unfi, tostrip, topic='narrow') |
|
239 | repair.strip(ui, unfi, tostrip, topic='narrow') | |
240 |
|
240 | |||
241 | todelete = [] |
|
241 | todelete = [] | |
242 | for f, f2, size in repo.store.datafiles(): |
|
242 | for f, f2, size in repo.store.datafiles(): | |
243 | if f.startswith('data/'): |
|
243 | if f.startswith('data/'): | |
244 | file = f[5:-2] |
|
244 | file = f[5:-2] | |
245 | if not newmatch(file): |
|
245 | if not newmatch(file): | |
246 | todelete.append(f) |
|
246 | todelete.append(f) | |
247 | elif f.startswith('meta/'): |
|
247 | elif f.startswith('meta/'): | |
248 | dir = f[5:-13] |
|
248 | dir = f[5:-13] | |
249 | dirs = ['.'] + sorted(util.dirs({dir})) + [dir] |
|
249 | dirs = ['.'] + sorted(util.dirs({dir})) + [dir] | |
250 | include = True |
|
250 | include = True | |
251 | for d in dirs: |
|
251 | for d in dirs: | |
252 | visit = newmatch.visitdir(d) |
|
252 | visit = newmatch.visitdir(d) | |
253 | if not visit: |
|
253 | if not visit: | |
254 | include = False |
|
254 | include = False | |
255 | break |
|
255 | break | |
256 | if visit == 'all': |
|
256 | if visit == 'all': | |
257 | break |
|
257 | break | |
258 | if not include: |
|
258 | if not include: | |
259 | todelete.append(f) |
|
259 | todelete.append(f) | |
260 |
|
260 | |||
261 | repo.destroying() |
|
261 | repo.destroying() | |
262 |
|
262 | |||
263 | with repo.transaction("narrowing"): |
|
263 | with repo.transaction("narrowing"): | |
264 | for f in todelete: |
|
264 | for f in todelete: | |
265 | ui.status(_('deleting %s\n') % f) |
|
265 | ui.status(_('deleting %s\n') % f) | |
266 | util.unlinkpath(repo.svfs.join(f)) |
|
266 | util.unlinkpath(repo.svfs.join(f)) | |
267 | repo.store.markremoved(f) |
|
267 | repo.store.markremoved(f) | |
268 |
|
268 | |||
269 | for f in repo.dirstate: |
|
269 | for f in repo.dirstate: | |
270 | if not newmatch(f): |
|
270 | if not newmatch(f): | |
271 | repo.dirstate.drop(f) |
|
271 | repo.dirstate.drop(f) | |
272 | repo.wvfs.unlinkpath(f) |
|
272 | repo.wvfs.unlinkpath(f) | |
273 | repo.setnarrowpats(newincludes, newexcludes) |
|
273 | repo.setnarrowpats(newincludes, newexcludes) | |
274 |
|
274 | |||
275 | repo.destroyed() |
|
275 | repo.destroyed() | |
276 |
|
276 | |||
277 | def _widen(ui, repo, remote, commoninc, newincludes, newexcludes): |
|
277 | def _widen(ui, repo, remote, commoninc, newincludes, newexcludes): | |
278 | newmatch = narrowspec.match(repo.root, newincludes, newexcludes) |
|
278 | newmatch = narrowspec.match(repo.root, newincludes, newexcludes) | |
279 |
|
279 | |||
280 | # TODO(martinvonz): Get expansion working with widening/narrowing. |
|
280 | # TODO(martinvonz): Get expansion working with widening/narrowing. | |
281 | if narrowspec.needsexpansion(newincludes): |
|
281 | if narrowspec.needsexpansion(newincludes): | |
282 | raise error.Abort('Expansion not yet supported on pull') |
|
282 | raise error.Abort('Expansion not yet supported on pull') | |
283 |
|
283 | |||
284 | def pullbundle2extraprepare_widen(orig, pullop, kwargs): |
|
284 | def pullbundle2extraprepare_widen(orig, pullop, kwargs): | |
285 | orig(pullop, kwargs) |
|
285 | orig(pullop, kwargs) | |
286 | # The old{in,ex}cludepats have already been set by orig() |
|
286 | # The old{in,ex}cludepats have already been set by orig() | |
287 | kwargs['includepats'] = newincludes |
|
287 | kwargs['includepats'] = newincludes | |
288 | kwargs['excludepats'] = newexcludes |
|
288 | kwargs['excludepats'] = newexcludes | |
|
289 | kwargs['widen'] = True | |||
289 | wrappedextraprepare = extensions.wrappedfunction(exchange, |
|
290 | wrappedextraprepare = extensions.wrappedfunction(exchange, | |
290 | '_pullbundle2extraprepare', pullbundle2extraprepare_widen) |
|
291 | '_pullbundle2extraprepare', pullbundle2extraprepare_widen) | |
291 |
|
292 | |||
292 | # define a function that narrowbundle2 can call after creating the |
|
293 | # define a function that narrowbundle2 can call after creating the | |
293 | # backup bundle, but before applying the bundle from the server |
|
294 | # backup bundle, but before applying the bundle from the server | |
294 | def setnewnarrowpats(): |
|
295 | def setnewnarrowpats(): | |
295 | repo.setnarrowpats(newincludes, newexcludes) |
|
296 | repo.setnarrowpats(newincludes, newexcludes) | |
296 | repo.setnewnarrowpats = setnewnarrowpats |
|
297 | repo.setnewnarrowpats = setnewnarrowpats | |
297 |
|
298 | |||
298 | with ui.uninterruptable(): |
|
299 | with ui.uninterruptable(): | |
299 | ds = repo.dirstate |
|
300 | ds = repo.dirstate | |
300 | p1, p2 = ds.p1(), ds.p2() |
|
301 | p1, p2 = ds.p1(), ds.p2() | |
301 | with ds.parentchange(): |
|
302 | with ds.parentchange(): | |
302 | ds.setparents(node.nullid, node.nullid) |
|
303 | ds.setparents(node.nullid, node.nullid) | |
303 | common = commoninc[0] |
|
304 | common = commoninc[0] | |
304 | with wrappedextraprepare: |
|
305 | with wrappedextraprepare: | |
305 | exchange.pull(repo, remote, heads=common) |
|
306 | exchange.pull(repo, remote, heads=common) | |
306 | with ds.parentchange(): |
|
307 | with ds.parentchange(): | |
307 | ds.setparents(p1, p2) |
|
308 | ds.setparents(p1, p2) | |
308 |
|
309 | |||
309 | actions = {k: [] for k in 'a am f g cd dc r dm dg m e k p pr'.split()} |
|
310 | actions = {k: [] for k in 'a am f g cd dc r dm dg m e k p pr'.split()} | |
310 | addgaction = actions['g'].append |
|
311 | addgaction = actions['g'].append | |
311 |
|
312 | |||
312 | mf = repo['.'].manifest().matches(newmatch) |
|
313 | mf = repo['.'].manifest().matches(newmatch) | |
313 | for f, fn in mf.iteritems(): |
|
314 | for f, fn in mf.iteritems(): | |
314 | if f not in repo.dirstate: |
|
315 | if f not in repo.dirstate: | |
315 | addgaction((f, (mf.flags(f), False), |
|
316 | addgaction((f, (mf.flags(f), False), | |
316 | "add from widened narrow clone")) |
|
317 | "add from widened narrow clone")) | |
317 |
|
318 | |||
318 | merge.applyupdates(repo, actions, wctx=repo[None], |
|
319 | merge.applyupdates(repo, actions, wctx=repo[None], | |
319 | mctx=repo['.'], overwrite=False) |
|
320 | mctx=repo['.'], overwrite=False) | |
320 | merge.recordupdates(repo, actions, branchmerge=False) |
|
321 | merge.recordupdates(repo, actions, branchmerge=False) | |
321 |
|
322 | |||
322 | # TODO(rdamazio): Make new matcher format and update description |
|
323 | # TODO(rdamazio): Make new matcher format and update description | |
323 | @command('tracked', |
|
324 | @command('tracked', | |
324 | [('', 'addinclude', [], _('new paths to include')), |
|
325 | [('', 'addinclude', [], _('new paths to include')), | |
325 | ('', 'removeinclude', [], _('old paths to no longer include')), |
|
326 | ('', 'removeinclude', [], _('old paths to no longer include')), | |
326 | ('', 'addexclude', [], _('new paths to exclude')), |
|
327 | ('', 'addexclude', [], _('new paths to exclude')), | |
327 | ('', 'removeexclude', [], _('old paths to no longer exclude')), |
|
328 | ('', 'removeexclude', [], _('old paths to no longer exclude')), | |
328 | ('', 'clear', False, _('whether to replace the existing narrowspec')), |
|
329 | ('', 'clear', False, _('whether to replace the existing narrowspec')), | |
329 | ('', 'force-delete-local-changes', False, |
|
330 | ('', 'force-delete-local-changes', False, | |
330 | _('forces deletion of local changes when narrowing')), |
|
331 | _('forces deletion of local changes when narrowing')), | |
331 | ] + commands.remoteopts, |
|
332 | ] + commands.remoteopts, | |
332 | _('[OPTIONS]... [REMOTE]'), |
|
333 | _('[OPTIONS]... [REMOTE]'), | |
333 | inferrepo=True) |
|
334 | inferrepo=True) | |
334 | def trackedcmd(ui, repo, remotepath=None, *pats, **opts): |
|
335 | def trackedcmd(ui, repo, remotepath=None, *pats, **opts): | |
335 | """show or change the current narrowspec |
|
336 | """show or change the current narrowspec | |
336 |
|
337 | |||
337 | With no argument, shows the current narrowspec entries, one per line. Each |
|
338 | With no argument, shows the current narrowspec entries, one per line. Each | |
338 | line will be prefixed with 'I' or 'X' for included or excluded patterns, |
|
339 | line will be prefixed with 'I' or 'X' for included or excluded patterns, | |
339 | respectively. |
|
340 | respectively. | |
340 |
|
341 | |||
341 | The narrowspec is comprised of expressions to match remote files and/or |
|
342 | The narrowspec is comprised of expressions to match remote files and/or | |
342 | directories that should be pulled into your client. |
|
343 | directories that should be pulled into your client. | |
343 | The narrowspec has *include* and *exclude* expressions, with excludes always |
|
344 | The narrowspec has *include* and *exclude* expressions, with excludes always | |
344 | trumping includes: that is, if a file matches an exclude expression, it will |
|
345 | trumping includes: that is, if a file matches an exclude expression, it will | |
345 | be excluded even if it also matches an include expression. |
|
346 | be excluded even if it also matches an include expression. | |
346 | Excluding files that were never included has no effect. |
|
347 | Excluding files that were never included has no effect. | |
347 |
|
348 | |||
348 | Each included or excluded entry is in the format described by |
|
349 | Each included or excluded entry is in the format described by | |
349 | 'hg help patterns'. |
|
350 | 'hg help patterns'. | |
350 |
|
351 | |||
351 | The options allow you to add or remove included and excluded expressions. |
|
352 | The options allow you to add or remove included and excluded expressions. | |
352 |
|
353 | |||
353 | If --clear is specified, then all previous includes and excludes are DROPPED |
|
354 | If --clear is specified, then all previous includes and excludes are DROPPED | |
354 | and replaced by the new ones specified to --addinclude and --addexclude. |
|
355 | and replaced by the new ones specified to --addinclude and --addexclude. | |
355 | If --clear is specified without any further options, the narrowspec will be |
|
356 | If --clear is specified without any further options, the narrowspec will be | |
356 | empty and will not match any files. |
|
357 | empty and will not match any files. | |
357 | """ |
|
358 | """ | |
358 | opts = pycompat.byteskwargs(opts) |
|
359 | opts = pycompat.byteskwargs(opts) | |
359 | if repository.NARROW_REQUIREMENT not in repo.requirements: |
|
360 | if repository.NARROW_REQUIREMENT not in repo.requirements: | |
360 | ui.warn(_('The narrow command is only supported on respositories cloned' |
|
361 | ui.warn(_('The narrow command is only supported on respositories cloned' | |
361 | ' with --narrow.\n')) |
|
362 | ' with --narrow.\n')) | |
362 | return 1 |
|
363 | return 1 | |
363 |
|
364 | |||
364 | # Before supporting, decide whether it "hg tracked --clear" should mean |
|
365 | # Before supporting, decide whether it "hg tracked --clear" should mean | |
365 | # tracking no paths or all paths. |
|
366 | # tracking no paths or all paths. | |
366 | if opts['clear']: |
|
367 | if opts['clear']: | |
367 | ui.warn(_('The --clear option is not yet supported.\n')) |
|
368 | ui.warn(_('The --clear option is not yet supported.\n')) | |
368 | return 1 |
|
369 | return 1 | |
369 |
|
370 | |||
370 | if narrowspec.needsexpansion(opts['addinclude'] + opts['addexclude']): |
|
371 | if narrowspec.needsexpansion(opts['addinclude'] + opts['addexclude']): | |
371 | raise error.Abort('Expansion not yet supported on widen/narrow') |
|
372 | raise error.Abort('Expansion not yet supported on widen/narrow') | |
372 |
|
373 | |||
373 | addedincludes = narrowspec.parsepatterns(opts['addinclude']) |
|
374 | addedincludes = narrowspec.parsepatterns(opts['addinclude']) | |
374 | removedincludes = narrowspec.parsepatterns(opts['removeinclude']) |
|
375 | removedincludes = narrowspec.parsepatterns(opts['removeinclude']) | |
375 | addedexcludes = narrowspec.parsepatterns(opts['addexclude']) |
|
376 | addedexcludes = narrowspec.parsepatterns(opts['addexclude']) | |
376 | removedexcludes = narrowspec.parsepatterns(opts['removeexclude']) |
|
377 | removedexcludes = narrowspec.parsepatterns(opts['removeexclude']) | |
377 | widening = addedincludes or removedexcludes |
|
378 | widening = addedincludes or removedexcludes | |
378 | narrowing = removedincludes or addedexcludes |
|
379 | narrowing = removedincludes or addedexcludes | |
379 | only_show = not widening and not narrowing |
|
380 | only_show = not widening and not narrowing | |
380 |
|
381 | |||
381 | # Only print the current narrowspec. |
|
382 | # Only print the current narrowspec. | |
382 | if only_show: |
|
383 | if only_show: | |
383 | include, exclude = repo.narrowpats |
|
384 | include, exclude = repo.narrowpats | |
384 |
|
385 | |||
385 | ui.pager('tracked') |
|
386 | ui.pager('tracked') | |
386 | fm = ui.formatter('narrow', opts) |
|
387 | fm = ui.formatter('narrow', opts) | |
387 | for i in sorted(include): |
|
388 | for i in sorted(include): | |
388 | fm.startitem() |
|
389 | fm.startitem() | |
389 | fm.write('status', '%s ', 'I', label='narrow.included') |
|
390 | fm.write('status', '%s ', 'I', label='narrow.included') | |
390 | fm.write('pat', '%s\n', i, label='narrow.included') |
|
391 | fm.write('pat', '%s\n', i, label='narrow.included') | |
391 | for i in sorted(exclude): |
|
392 | for i in sorted(exclude): | |
392 | fm.startitem() |
|
393 | fm.startitem() | |
393 | fm.write('status', '%s ', 'X', label='narrow.excluded') |
|
394 | fm.write('status', '%s ', 'X', label='narrow.excluded') | |
394 | fm.write('pat', '%s\n', i, label='narrow.excluded') |
|
395 | fm.write('pat', '%s\n', i, label='narrow.excluded') | |
395 | fm.end() |
|
396 | fm.end() | |
396 | return 0 |
|
397 | return 0 | |
397 |
|
398 | |||
398 | with repo.wlock(), repo.lock(): |
|
399 | with repo.wlock(), repo.lock(): | |
399 | cmdutil.bailifchanged(repo) |
|
400 | cmdutil.bailifchanged(repo) | |
400 |
|
401 | |||
401 | # Find the revisions we have in common with the remote. These will |
|
402 | # Find the revisions we have in common with the remote. These will | |
402 | # be used for finding local-only changes for narrowing. They will |
|
403 | # be used for finding local-only changes for narrowing. They will | |
403 | # also define the set of revisions to update for widening. |
|
404 | # also define the set of revisions to update for widening. | |
404 | remotepath = ui.expandpath(remotepath or 'default') |
|
405 | remotepath = ui.expandpath(remotepath or 'default') | |
405 | url, branches = hg.parseurl(remotepath) |
|
406 | url, branches = hg.parseurl(remotepath) | |
406 | ui.status(_('comparing with %s\n') % util.hidepassword(url)) |
|
407 | ui.status(_('comparing with %s\n') % util.hidepassword(url)) | |
407 | remote = hg.peer(repo, opts, url) |
|
408 | remote = hg.peer(repo, opts, url) | |
408 | commoninc = discovery.findcommonincoming(repo, remote) |
|
409 | commoninc = discovery.findcommonincoming(repo, remote) | |
409 |
|
410 | |||
410 | oldincludes, oldexcludes = repo.narrowpats |
|
411 | oldincludes, oldexcludes = repo.narrowpats | |
411 | if narrowing: |
|
412 | if narrowing: | |
412 | newincludes = oldincludes - removedincludes |
|
413 | newincludes = oldincludes - removedincludes | |
413 | newexcludes = oldexcludes | addedexcludes |
|
414 | newexcludes = oldexcludes | addedexcludes | |
414 | _narrow(ui, repo, remote, commoninc, oldincludes, oldexcludes, |
|
415 | _narrow(ui, repo, remote, commoninc, oldincludes, oldexcludes, | |
415 | newincludes, newexcludes, |
|
416 | newincludes, newexcludes, | |
416 | opts['force_delete_local_changes']) |
|
417 | opts['force_delete_local_changes']) | |
417 | # _narrow() updated the narrowspec and _widen() below needs to |
|
418 | # _narrow() updated the narrowspec and _widen() below needs to | |
418 | # use the updated values as its base (otherwise removed includes |
|
419 | # use the updated values as its base (otherwise removed includes | |
419 | # and addedexcludes will be lost in the resulting narrowspec) |
|
420 | # and addedexcludes will be lost in the resulting narrowspec) | |
420 | oldincludes = newincludes |
|
421 | oldincludes = newincludes | |
421 | oldexcludes = newexcludes |
|
422 | oldexcludes = newexcludes | |
422 |
|
423 | |||
423 | if widening: |
|
424 | if widening: | |
424 | newincludes = oldincludes | addedincludes |
|
425 | newincludes = oldincludes | addedincludes | |
425 | newexcludes = oldexcludes - removedexcludes |
|
426 | newexcludes = oldexcludes - removedexcludes | |
426 | _widen(ui, repo, remote, commoninc, newincludes, newexcludes) |
|
427 | _widen(ui, repo, remote, commoninc, newincludes, newexcludes) | |
427 |
|
428 | |||
428 | return 0 |
|
429 | return 0 |
@@ -1,378 +1,397 b'' | |||||
1 | $ . "$TESTDIR/narrow-library.sh" |
|
1 | $ . "$TESTDIR/narrow-library.sh" | |
2 |
|
2 | |||
3 | $ cat << EOF >> $HGRCPATH |
|
3 | $ cat << EOF >> $HGRCPATH | |
4 | > [experimental] |
|
4 | > [experimental] | |
5 | > treemanifest = 1 |
|
5 | > treemanifest = 1 | |
6 | > EOF |
|
6 | > EOF | |
7 |
|
7 | |||
8 | $ hg init master |
|
8 | $ hg init master | |
9 | $ cd master |
|
9 | $ cd master | |
10 |
|
10 | |||
11 | $ mkdir inside |
|
11 | $ mkdir inside | |
12 | $ echo 'inside' > inside/f |
|
12 | $ echo 'inside' > inside/f | |
13 | $ hg add inside/f |
|
13 | $ hg add inside/f | |
14 | $ hg commit -m 'add inside' |
|
14 | $ hg commit -m 'add inside' | |
15 |
|
15 | |||
16 | $ mkdir widest |
|
16 | $ mkdir widest | |
17 | $ echo 'widest' > widest/f |
|
17 | $ echo 'widest' > widest/f | |
18 | $ hg add widest/f |
|
18 | $ hg add widest/f | |
19 | $ hg commit -m 'add widest' |
|
19 | $ hg commit -m 'add widest' | |
20 |
|
20 | |||
21 | $ mkdir outside |
|
21 | $ mkdir outside | |
22 | $ echo 'outside' > outside/f |
|
22 | $ echo 'outside' > outside/f | |
23 | $ hg add outside/f |
|
23 | $ hg add outside/f | |
24 | $ hg commit -m 'add outside' |
|
24 | $ hg commit -m 'add outside' | |
25 |
|
25 | |||
26 | $ cd .. |
|
26 | $ cd .. | |
27 |
|
27 | |||
28 | narrow clone the inside file |
|
28 | narrow clone the inside file | |
29 |
|
29 | |||
30 | $ hg clone --narrow ssh://user@dummy/master narrow --include inside |
|
30 | $ hg clone --narrow ssh://user@dummy/master narrow --include inside | |
31 | requesting all changes |
|
31 | requesting all changes | |
32 | adding changesets |
|
32 | adding changesets | |
33 | adding manifests |
|
33 | adding manifests | |
34 | adding file changes |
|
34 | adding file changes | |
35 | added 3 changesets with 1 changes to 1 files |
|
35 | added 3 changesets with 1 changes to 1 files | |
36 | new changesets *:* (glob) |
|
36 | new changesets *:* (glob) | |
37 | updating to branch default |
|
37 | updating to branch default | |
38 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
38 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
39 | $ cd narrow |
|
39 | $ cd narrow | |
40 | $ hg tracked |
|
40 | $ hg tracked | |
41 | I path:inside |
|
41 | I path:inside | |
42 | $ ls |
|
42 | $ ls | |
43 | inside |
|
43 | inside | |
44 | $ cat inside/f |
|
44 | $ cat inside/f | |
45 | inside |
|
45 | inside | |
46 | $ cd .. |
|
46 | $ cd .. | |
47 |
|
47 | |||
48 | add more upstream files which we will include in a wider narrow spec |
|
48 | add more upstream files which we will include in a wider narrow spec | |
49 |
|
49 | |||
50 | $ cd master |
|
50 | $ cd master | |
51 |
|
51 | |||
52 | $ mkdir wider |
|
52 | $ mkdir wider | |
53 | $ echo 'wider' > wider/f |
|
53 | $ echo 'wider' > wider/f | |
54 | $ hg add wider/f |
|
54 | $ hg add wider/f | |
55 | $ echo 'widest v2' > widest/f |
|
55 | $ echo 'widest v2' > widest/f | |
56 | $ hg commit -m 'add wider, update widest' |
|
56 | $ hg commit -m 'add wider, update widest' | |
57 |
|
57 | |||
58 | $ echo 'widest v3' > widest/f |
|
58 | $ echo 'widest v3' > widest/f | |
59 | $ hg commit -m 'update widest v3' |
|
59 | $ hg commit -m 'update widest v3' | |
60 |
|
60 | |||
61 | $ echo 'inside v2' > inside/f |
|
61 | $ echo 'inside v2' > inside/f | |
62 | $ hg commit -m 'update inside' |
|
62 | $ hg commit -m 'update inside' | |
63 |
|
63 | |||
64 | $ mkdir outside2 |
|
64 | $ mkdir outside2 | |
65 | $ echo 'outside2' > outside2/f |
|
65 | $ echo 'outside2' > outside2/f | |
66 | $ hg add outside2/f |
|
66 | $ hg add outside2/f | |
67 | $ hg commit -m 'add outside2' |
|
67 | $ hg commit -m 'add outside2' | |
68 |
|
68 | |||
69 | $ echo 'widest v4' > widest/f |
|
69 | $ echo 'widest v4' > widest/f | |
70 | $ hg commit -m 'update widest v4' |
|
70 | $ hg commit -m 'update widest v4' | |
71 |
|
71 | |||
72 | $ hg log -T "{if(ellipsis, '...')}{node|short}: {desc}\n" |
|
72 | $ hg log -T "{if(ellipsis, '...')}{node|short}: {desc}\n" | |
73 | *: update widest v4 (glob) |
|
73 | *: update widest v4 (glob) | |
74 | *: add outside2 (glob) |
|
74 | *: add outside2 (glob) | |
75 | *: update inside (glob) |
|
75 | *: update inside (glob) | |
76 | *: update widest v3 (glob) |
|
76 | *: update widest v3 (glob) | |
77 | *: add wider, update widest (glob) |
|
77 | *: add wider, update widest (glob) | |
78 | *: add outside (glob) |
|
78 | *: add outside (glob) | |
79 | *: add widest (glob) |
|
79 | *: add widest (glob) | |
80 | *: add inside (glob) |
|
80 | *: add inside (glob) | |
81 |
|
81 | |||
82 | $ cd .. |
|
82 | $ cd .. | |
83 |
|
83 | |||
84 | Widen the narrow spec to see the wider file. This should not get the newly |
|
84 | Widen the narrow spec to see the wider file. This should not get the newly | |
85 | added upstream revisions. |
|
85 | added upstream revisions. | |
86 |
|
86 | |||
87 | $ cd narrow |
|
87 | $ cd narrow | |
88 | $ hg tracked --addinclude wider/f |
|
88 | $ hg tracked --addinclude wider/f | |
89 | comparing with ssh://user@dummy/master |
|
89 | comparing with ssh://user@dummy/master | |
90 | searching for changes |
|
90 | searching for changes | |
91 | no changes found |
|
91 | no changes found | |
|
92 | adding changesets | |||
|
93 | adding manifests | |||
|
94 | adding file changes | |||
|
95 | added 0 changesets with 0 changes to 1 files | |||
92 | 3 local changesets published |
|
96 | 3 local changesets published | |
93 | $ hg tracked |
|
97 | $ hg tracked | |
94 | I path:inside |
|
98 | I path:inside | |
95 |
|
99 | |||
96 | Pull down the newly added upstream revision. |
|
100 | Pull down the newly added upstream revision. | |
97 |
|
101 | |||
98 | $ hg pull |
|
102 | $ hg pull | |
99 | pulling from ssh://user@dummy/master |
|
103 | pulling from ssh://user@dummy/master | |
100 | searching for changes |
|
104 | searching for changes | |
101 | adding changesets |
|
105 | adding changesets | |
102 | adding manifests |
|
106 | adding manifests | |
103 | adding file changes |
|
107 | adding file changes | |
104 | added 5 changesets with 1 changes to 1 files |
|
108 | added 5 changesets with 1 changes to 1 files | |
105 | new changesets *:* (glob) |
|
109 | new changesets *:* (glob) | |
106 | (run 'hg update' to get a working copy) |
|
110 | (run 'hg update' to get a working copy) | |
107 | $ hg update -r 'desc("add wider")' |
|
111 | $ hg update -r 'desc("add wider")' | |
108 | 0 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
112 | 0 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
109 | $ cat wider/f |
|
113 | $ cat wider/f | |
110 | cat: wider/f: $ENOENT$ |
|
114 | cat: wider/f: $ENOENT$ | |
111 | [1] |
|
115 | [1] | |
112 |
|
116 | |||
113 | $ hg update -r 'desc("update inside")' |
|
117 | $ hg update -r 'desc("update inside")' | |
114 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
118 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
115 | $ cat wider/f |
|
119 | $ cat wider/f | |
116 | cat: wider/f: $ENOENT$ |
|
120 | cat: wider/f: $ENOENT$ | |
117 | [1] |
|
121 | [1] | |
118 | $ cat inside/f |
|
122 | $ cat inside/f | |
119 | inside v2 |
|
123 | inside v2 | |
120 |
|
124 | |||
121 | $ hg log -T "{if(ellipsis, '...')}{node|short}: {desc}\n" |
|
125 | $ hg log -T "{if(ellipsis, '...')}{node|short}: {desc}\n" | |
122 | 45662f0793c7: update widest v4 |
|
126 | 45662f0793c7: update widest v4 | |
123 | 1dd1364b566e: add outside2 |
|
127 | 1dd1364b566e: add outside2 | |
124 | *: update inside (glob) |
|
128 | *: update inside (glob) | |
125 | be0600e3ccba: update widest v3 |
|
129 | be0600e3ccba: update widest v3 | |
126 | *: add wider, update widest (glob) |
|
130 | *: add wider, update widest (glob) | |
127 | 4922ea71b958: add outside |
|
131 | 4922ea71b958: add outside | |
128 | 40e0ea6c8cd7: add widest |
|
132 | 40e0ea6c8cd7: add widest | |
129 | *: add inside (glob) |
|
133 | *: add inside (glob) | |
130 |
|
134 | |||
131 | Check that widening with a newline fails |
|
135 | Check that widening with a newline fails | |
132 |
|
136 | |||
133 | $ hg tracked --addinclude 'widest |
|
137 | $ hg tracked --addinclude 'widest | |
134 | > ' |
|
138 | > ' | |
135 | abort: newlines are not allowed in narrowspec paths |
|
139 | abort: newlines are not allowed in narrowspec paths | |
136 | [255] |
|
140 | [255] | |
137 |
|
141 | |||
138 | widen the narrow spec to include the widest file |
|
142 | widen the narrow spec to include the widest file | |
139 |
|
143 | |||
140 | $ hg tracked --addinclude widest |
|
144 | $ hg tracked --addinclude widest | |
141 | comparing with ssh://user@dummy/master |
|
145 | comparing with ssh://user@dummy/master | |
142 | searching for changes |
|
146 | searching for changes | |
143 | no changes found |
|
147 | no changes found | |
|
148 | adding changesets | |||
|
149 | adding manifests | |||
|
150 | adding file changes | |||
|
151 | added 0 changesets with 4 changes to 2 files | |||
144 | 5 local changesets published |
|
152 | 5 local changesets published | |
145 | abort: path ends in directory separator: widest/ |
|
153 | abort: path ends in directory separator: widest/ | |
146 | [255] |
|
154 | [255] | |
147 | $ hg tracked |
|
155 | $ hg tracked | |
148 | I path:inside |
|
156 | I path:inside | |
149 | $ hg update 'desc("add widest")' |
|
157 | $ hg update 'desc("add widest")' | |
150 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
158 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
151 | $ cat widest/f |
|
159 | $ cat widest/f | |
152 | cat: widest/f: $ENOENT$ |
|
160 | cat: widest/f: $ENOENT$ | |
153 | [1] |
|
161 | [1] | |
154 | $ hg update 'desc("add wider, update widest")' |
|
162 | $ hg update 'desc("add wider, update widest")' | |
155 | 0 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
163 | 0 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
156 | $ cat wider/f |
|
164 | $ cat wider/f | |
157 | cat: wider/f: $ENOENT$ |
|
165 | cat: wider/f: $ENOENT$ | |
158 | [1] |
|
166 | [1] | |
159 | $ cat widest/f |
|
167 | $ cat widest/f | |
160 | cat: widest/f: $ENOENT$ |
|
168 | cat: widest/f: $ENOENT$ | |
161 | [1] |
|
169 | [1] | |
162 | $ hg update 'desc("update widest v3")' |
|
170 | $ hg update 'desc("update widest v3")' | |
163 | 0 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
171 | 0 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
164 | $ cat widest/f |
|
172 | $ cat widest/f | |
165 | cat: widest/f: $ENOENT$ |
|
173 | cat: widest/f: $ENOENT$ | |
166 | [1] |
|
174 | [1] | |
167 | $ hg update 'desc("update widest v4")' |
|
175 | $ hg update 'desc("update widest v4")' | |
168 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
176 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
169 | $ cat widest/f |
|
177 | $ cat widest/f | |
170 | cat: widest/f: $ENOENT$ |
|
178 | cat: widest/f: $ENOENT$ | |
171 | [1] |
|
179 | [1] | |
172 |
|
180 | |||
173 | $ hg log -T "{if(ellipsis, '...')}{node|short}: {desc}\n" |
|
181 | $ hg log -T "{if(ellipsis, '...')}{node|short}: {desc}\n" | |
174 | *: update widest v4 (glob) |
|
182 | *: update widest v4 (glob) | |
175 | 1dd1364b566e: add outside2 |
|
183 | 1dd1364b566e: add outside2 | |
176 | *: update inside (glob) |
|
184 | *: update inside (glob) | |
177 | *: update widest v3 (glob) |
|
185 | *: update widest v3 (glob) | |
178 | *: add wider, update widest (glob) |
|
186 | *: add wider, update widest (glob) | |
179 | 4922ea71b958: add outside |
|
187 | 4922ea71b958: add outside | |
180 | *: add widest (glob) |
|
188 | *: add widest (glob) | |
181 | *: add inside (glob) |
|
189 | *: add inside (glob) | |
182 |
|
190 | |||
183 | separate suite of tests: files from 0-10 modified in changes 0-10. This allows |
|
191 | separate suite of tests: files from 0-10 modified in changes 0-10. This allows | |
184 | more obvious precise tests tickling particular corner cases. |
|
192 | more obvious precise tests tickling particular corner cases. | |
185 |
|
193 | |||
186 | $ cd .. |
|
194 | $ cd .. | |
187 | $ hg init upstream |
|
195 | $ hg init upstream | |
188 | $ cd upstream |
|
196 | $ cd upstream | |
189 | $ for x in `$TESTDIR/seq.py 0 10` |
|
197 | $ for x in `$TESTDIR/seq.py 0 10` | |
190 | > do |
|
198 | > do | |
191 | > mkdir d$x |
|
199 | > mkdir d$x | |
192 | > echo $x > d$x/f |
|
200 | > echo $x > d$x/f | |
193 | > hg add d$x/f |
|
201 | > hg add d$x/f | |
194 | > hg commit -m "add d$x/f" |
|
202 | > hg commit -m "add d$x/f" | |
195 | > done |
|
203 | > done | |
196 | $ hg log -T "{node|short}: {desc}\n" |
|
204 | $ hg log -T "{node|short}: {desc}\n" | |
197 | *: add d10/f (glob) |
|
205 | *: add d10/f (glob) | |
198 | *: add d9/f (glob) |
|
206 | *: add d9/f (glob) | |
199 | *: add d8/f (glob) |
|
207 | *: add d8/f (glob) | |
200 | *: add d7/f (glob) |
|
208 | *: add d7/f (glob) | |
201 | *: add d6/f (glob) |
|
209 | *: add d6/f (glob) | |
202 | *: add d5/f (glob) |
|
210 | *: add d5/f (glob) | |
203 | *: add d4/f (glob) |
|
211 | *: add d4/f (glob) | |
204 | *: add d3/f (glob) |
|
212 | *: add d3/f (glob) | |
205 | *: add d2/f (glob) |
|
213 | *: add d2/f (glob) | |
206 | *: add d1/f (glob) |
|
214 | *: add d1/f (glob) | |
207 | *: add d0/f (glob) |
|
215 | *: add d0/f (glob) | |
208 |
|
216 | |||
209 | make narrow clone with every third node. |
|
217 | make narrow clone with every third node. | |
210 |
|
218 | |||
211 | $ cd .. |
|
219 | $ cd .. | |
212 | $ hg clone --narrow ssh://user@dummy/upstream narrow2 --include d0 --include d3 --include d6 --include d9 |
|
220 | $ hg clone --narrow ssh://user@dummy/upstream narrow2 --include d0 --include d3 --include d6 --include d9 | |
213 | requesting all changes |
|
221 | requesting all changes | |
214 | adding changesets |
|
222 | adding changesets | |
215 | adding manifests |
|
223 | adding manifests | |
216 | adding file changes |
|
224 | adding file changes | |
217 | added 11 changesets with 4 changes to 4 files |
|
225 | added 11 changesets with 4 changes to 4 files | |
218 | new changesets *:* (glob) |
|
226 | new changesets *:* (glob) | |
219 | updating to branch default |
|
227 | updating to branch default | |
220 | 4 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
228 | 4 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
221 | $ cd narrow2 |
|
229 | $ cd narrow2 | |
222 | $ hg tracked |
|
230 | $ hg tracked | |
223 | I path:d0 |
|
231 | I path:d0 | |
224 | I path:d3 |
|
232 | I path:d3 | |
225 | I path:d6 |
|
233 | I path:d6 | |
226 | I path:d9 |
|
234 | I path:d9 | |
227 | $ hg verify |
|
235 | $ hg verify | |
228 | checking changesets |
|
236 | checking changesets | |
229 | checking manifests |
|
237 | checking manifests | |
230 | checking directory manifests |
|
238 | checking directory manifests | |
231 | crosschecking files in changesets and manifests |
|
239 | crosschecking files in changesets and manifests | |
232 | checking files |
|
240 | checking files | |
233 | 4 files, 11 changesets, 4 total revisions |
|
241 | 4 files, 11 changesets, 4 total revisions | |
234 | $ hg log -T "{if(ellipsis, '...')}{node|short}: {desc}\n" |
|
242 | $ hg log -T "{if(ellipsis, '...')}{node|short}: {desc}\n" | |
235 | 5dcf948d1e26: add d10/f |
|
243 | 5dcf948d1e26: add d10/f | |
236 | *: add d9/f (glob) |
|
244 | *: add d9/f (glob) | |
237 | ed07d334af10: add d8/f |
|
245 | ed07d334af10: add d8/f | |
238 | 472749d2eed8: add d7/f |
|
246 | 472749d2eed8: add d7/f | |
239 | *: add d6/f (glob) |
|
247 | *: add d6/f (glob) | |
240 | 47c482f555ec: add d5/f |
|
248 | 47c482f555ec: add d5/f | |
241 | 3c6772db7d10: add d4/f |
|
249 | 3c6772db7d10: add d4/f | |
242 | *: add d3/f (glob) |
|
250 | *: add d3/f (glob) | |
243 | a68ce05aaaed: add d2/f |
|
251 | a68ce05aaaed: add d2/f | |
244 | 5934322a52dd: add d1/f |
|
252 | 5934322a52dd: add d1/f | |
245 | *: add d0/f (glob) |
|
253 | *: add d0/f (glob) | |
246 | $ hg tracked --addinclude d1 |
|
254 | $ hg tracked --addinclude d1 | |
247 | comparing with ssh://user@dummy/upstream |
|
255 | comparing with ssh://user@dummy/upstream | |
248 | searching for changes |
|
256 | searching for changes | |
249 | no changes found |
|
257 | no changes found | |
|
258 | adding changesets | |||
|
259 | adding manifests | |||
|
260 | adding file changes | |||
|
261 | added 0 changesets with 1 changes to 5 files | |||
250 | 11 local changesets published |
|
262 | 11 local changesets published | |
251 | abort: path ends in directory separator: d1/ |
|
263 | abort: path ends in directory separator: d1/ | |
252 | [255] |
|
264 | [255] | |
253 | $ hg tracked |
|
265 | $ hg tracked | |
254 | I path:d0 |
|
266 | I path:d0 | |
255 | I path:d3 |
|
267 | I path:d3 | |
256 | I path:d6 |
|
268 | I path:d6 | |
257 | I path:d9 |
|
269 | I path:d9 | |
258 | $ hg log -T "{if(ellipsis, '...')}{node|short}: {desc}\n" |
|
270 | $ hg log -T "{if(ellipsis, '...')}{node|short}: {desc}\n" | |
259 | 5dcf948d1e26: add d10/f |
|
271 | 5dcf948d1e26: add d10/f | |
260 | *: add d9/f (glob) |
|
272 | *: add d9/f (glob) | |
261 | ed07d334af10: add d8/f |
|
273 | ed07d334af10: add d8/f | |
262 | 472749d2eed8: add d7/f |
|
274 | 472749d2eed8: add d7/f | |
263 | *: add d6/f (glob) |
|
275 | *: add d6/f (glob) | |
264 | 47c482f555ec: add d5/f |
|
276 | 47c482f555ec: add d5/f | |
265 | 3c6772db7d10: add d4/f |
|
277 | 3c6772db7d10: add d4/f | |
266 | *: add d3/f (glob) |
|
278 | *: add d3/f (glob) | |
267 | a68ce05aaaed: add d2/f |
|
279 | a68ce05aaaed: add d2/f | |
268 | *: add d1/f (glob) |
|
280 | *: add d1/f (glob) | |
269 | *: add d0/f (glob) |
|
281 | *: add d0/f (glob) | |
270 |
|
282 | |||
271 | Verify shouldn't claim the repo is corrupt after a widen. |
|
283 | Verify shouldn't claim the repo is corrupt after a widen. | |
272 |
|
284 | |||
273 | $ hg verify |
|
285 | $ hg verify | |
274 | checking changesets |
|
286 | checking changesets | |
275 | checking manifests |
|
287 | checking manifests | |
276 | checking directory manifests |
|
288 | checking directory manifests | |
|
289 | warning: orphan data file 'meta/d1/00manifest.i' | |||
277 | crosschecking files in changesets and manifests |
|
290 | crosschecking files in changesets and manifests | |
278 | checking files |
|
291 | checking files | |
|
292 | warning: orphan data file 'data/d1/f.i' | |||
279 | 4 files, 11 changesets, 4 total revisions |
|
293 | 4 files, 11 changesets, 4 total revisions | |
|
294 | 2 warnings encountered! | |||
280 |
|
295 | |||
281 | Widening preserves parent of local commit |
|
296 | Widening preserves parent of local commit | |
282 |
|
297 | |||
283 | $ cd .. |
|
298 | $ cd .. | |
284 | $ hg clone -q --narrow ssh://user@dummy/upstream narrow3 --include d2 -r 2 |
|
299 | $ hg clone -q --narrow ssh://user@dummy/upstream narrow3 --include d2 -r 2 | |
285 | $ cd narrow3 |
|
300 | $ cd narrow3 | |
286 | $ hg log -T "{if(ellipsis, '...')}{node|short}: {desc}\n" |
|
301 | $ hg log -T "{if(ellipsis, '...')}{node|short}: {desc}\n" | |
287 | *: add d2/f (glob) |
|
302 | *: add d2/f (glob) | |
288 | 5934322a52dd: add d1/f |
|
303 | 5934322a52dd: add d1/f | |
289 | 44d97ac7c511: add d0/f |
|
304 | 44d97ac7c511: add d0/f | |
290 | $ hg pull -q -r 3 |
|
305 | $ hg pull -q -r 3 | |
291 | $ hg co -q tip |
|
306 | $ hg co -q tip | |
292 | $ hg pull -q -r 4 |
|
307 | $ hg pull -q -r 4 | |
293 | $ echo local > d2/f |
|
308 | $ echo local > d2/f | |
294 | $ hg ci -m local |
|
309 | $ hg ci -m local | |
295 | created new head |
|
310 | created new head | |
296 | $ hg tracked -q --addinclude d0 --addinclude d9 |
|
311 | $ hg tracked -q --addinclude d0 --addinclude d9 | |
297 | abort: path ends in directory separator: d0/ |
|
312 | abort: path ends in directory separator: d0/ | |
298 | [255] |
|
313 | [255] | |
299 |
|
314 | |||
300 | Widening preserves bookmarks |
|
315 | Widening preserves bookmarks | |
301 |
|
316 | |||
302 | $ cd .. |
|
317 | $ cd .. | |
303 | $ hg clone -q --narrow ssh://user@dummy/upstream narrow-bookmarks --include d4 |
|
318 | $ hg clone -q --narrow ssh://user@dummy/upstream narrow-bookmarks --include d4 | |
304 | $ cd narrow-bookmarks |
|
319 | $ cd narrow-bookmarks | |
305 | $ echo local > d4/f |
|
320 | $ echo local > d4/f | |
306 | $ hg ci -m local |
|
321 | $ hg ci -m local | |
307 | $ hg bookmarks bookmark |
|
322 | $ hg bookmarks bookmark | |
308 | $ hg bookmarks |
|
323 | $ hg bookmarks | |
309 | * bookmark 11:42aed9c63197 |
|
324 | * bookmark 11:42aed9c63197 | |
310 | $ hg -q tracked --addinclude d2 |
|
325 | $ hg -q tracked --addinclude d2 | |
311 | abort: path ends in directory separator: d2/ |
|
326 | abort: path ends in directory separator: d2/ | |
312 | [255] |
|
327 | [255] | |
313 | $ hg bookmarks |
|
328 | $ hg bookmarks | |
314 | * bookmark 11:42aed9c63197 |
|
329 | * bookmark 11:42aed9c63197 | |
315 | $ hg log -r bookmark -T '{desc}\n' |
|
330 | $ hg log -r bookmark -T '{desc}\n' | |
316 | local |
|
331 | local | |
317 |
|
332 | |||
318 | Widening that fails can be recovered from |
|
333 | Widening that fails can be recovered from | |
319 |
|
334 | |||
320 | $ cd .. |
|
335 | $ cd .. | |
321 | $ hg clone -q --narrow ssh://user@dummy/upstream interrupted --include d0 |
|
336 | $ hg clone -q --narrow ssh://user@dummy/upstream interrupted --include d0 | |
322 | $ cd interrupted |
|
337 | $ cd interrupted | |
323 | $ echo local > d0/f |
|
338 | $ echo local > d0/f | |
324 | $ hg ci -m local |
|
339 | $ hg ci -m local | |
325 | $ hg log -T "{if(ellipsis, '...')}{rev}: {desc}\n" |
|
340 | $ hg log -T "{if(ellipsis, '...')}{rev}: {desc}\n" | |
326 | 11: local |
|
341 | 11: local | |
327 | 10: add d10/f |
|
342 | 10: add d10/f | |
328 | 9: add d9/f |
|
343 | 9: add d9/f | |
329 | 8: add d8/f |
|
344 | 8: add d8/f | |
330 | 7: add d7/f |
|
345 | 7: add d7/f | |
331 | 6: add d6/f |
|
346 | 6: add d6/f | |
332 | 5: add d5/f |
|
347 | 5: add d5/f | |
333 | 4: add d4/f |
|
348 | 4: add d4/f | |
334 | 3: add d3/f |
|
349 | 3: add d3/f | |
335 | 2: add d2/f |
|
350 | 2: add d2/f | |
336 | 1: add d1/f |
|
351 | 1: add d1/f | |
337 | 0: add d0/f |
|
352 | 0: add d0/f | |
338 | $ hg bookmarks bookmark |
|
353 | $ hg bookmarks bookmark | |
339 | $ hg --config hooks.pretxnchangegroup.bad=false tracked --addinclude d1 |
|
354 | $ hg --config hooks.pretxnchangegroup.bad=false tracked --addinclude d1 | |
340 | comparing with ssh://user@dummy/upstream |
|
355 | comparing with ssh://user@dummy/upstream | |
341 | searching for changes |
|
356 | searching for changes | |
342 | no changes found |
|
357 | no changes found | |
|
358 | adding changesets | |||
|
359 | adding manifests | |||
|
360 | adding file changes | |||
|
361 | added 0 changesets with 1 changes to 2 files | |||
343 | 11 local changesets published |
|
362 | 11 local changesets published | |
344 | abort: path ends in directory separator: d1/ |
|
363 | abort: path ends in directory separator: d1/ | |
345 | [255] |
|
364 | [255] | |
346 | $ hg log -T "{if(ellipsis, '...')}{rev}: {desc}\n" |
|
365 | $ hg log -T "{if(ellipsis, '...')}{rev}: {desc}\n" | |
347 | 11: local |
|
366 | 11: local | |
348 | 10: add d10/f |
|
367 | 10: add d10/f | |
349 | 9: add d9/f |
|
368 | 9: add d9/f | |
350 | 8: add d8/f |
|
369 | 8: add d8/f | |
351 | 7: add d7/f |
|
370 | 7: add d7/f | |
352 | 6: add d6/f |
|
371 | 6: add d6/f | |
353 | 5: add d5/f |
|
372 | 5: add d5/f | |
354 | 4: add d4/f |
|
373 | 4: add d4/f | |
355 | 3: add d3/f |
|
374 | 3: add d3/f | |
356 | 2: add d2/f |
|
375 | 2: add d2/f | |
357 | 1: add d1/f |
|
376 | 1: add d1/f | |
358 | 0: add d0/f |
|
377 | 0: add d0/f | |
359 | $ hg bookmarks |
|
378 | $ hg bookmarks | |
360 | * bookmark 11:b7ce3df41eca |
|
379 | * bookmark 11:b7ce3df41eca | |
361 | $ hg unbundle .hg/strip-backup/*-widen.hg |
|
380 | $ hg unbundle .hg/strip-backup/*-widen.hg | |
362 | abort: $ENOENT$: .hg/strip-backup/*-widen.hg |
|
381 | abort: $ENOENT$: .hg/strip-backup/*-widen.hg | |
363 | [255] |
|
382 | [255] | |
364 | $ hg log -T "{if(ellipsis, '...')}{rev}: {desc}\n" |
|
383 | $ hg log -T "{if(ellipsis, '...')}{rev}: {desc}\n" | |
365 | 11: local |
|
384 | 11: local | |
366 | 10: add d10/f |
|
385 | 10: add d10/f | |
367 | 9: add d9/f |
|
386 | 9: add d9/f | |
368 | 8: add d8/f |
|
387 | 8: add d8/f | |
369 | 7: add d7/f |
|
388 | 7: add d7/f | |
370 | 6: add d6/f |
|
389 | 6: add d6/f | |
371 | 5: add d5/f |
|
390 | 5: add d5/f | |
372 | 4: add d4/f |
|
391 | 4: add d4/f | |
373 | 3: add d3/f |
|
392 | 3: add d3/f | |
374 | 2: add d2/f |
|
393 | 2: add d2/f | |
375 | 1: add d1/f |
|
394 | 1: add d1/f | |
376 | 0: add d0/f |
|
395 | 0: add d0/f | |
377 | $ hg bookmarks |
|
396 | $ hg bookmarks | |
378 | * bookmark 11:b7ce3df41eca |
|
397 | * bookmark 11:b7ce3df41eca |
General Comments 0
You need to be logged in to leave comments.
Login now