##// END OF EJS Templates
cmdutil: remove remainder of old walkchangerevs() implementation
Yuya Nishihara -
r46228:c7413ffe default
parent child Browse files
Show More
@@ -1,1291 +1,1255
1 # __init__.py - remotefilelog extension
1 # __init__.py - remotefilelog extension
2 #
2 #
3 # Copyright 2013 Facebook, Inc.
3 # Copyright 2013 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """remotefilelog causes Mercurial to lazilly fetch file contents (EXPERIMENTAL)
7 """remotefilelog causes Mercurial to lazilly fetch file contents (EXPERIMENTAL)
8
8
9 This extension is HIGHLY EXPERIMENTAL. There are NO BACKWARDS COMPATIBILITY
9 This extension is HIGHLY EXPERIMENTAL. There are NO BACKWARDS COMPATIBILITY
10 GUARANTEES. This means that repositories created with this extension may
10 GUARANTEES. This means that repositories created with this extension may
11 only be usable with the exact version of this extension/Mercurial that was
11 only be usable with the exact version of this extension/Mercurial that was
12 used. The extension attempts to enforce this in order to prevent repository
12 used. The extension attempts to enforce this in order to prevent repository
13 corruption.
13 corruption.
14
14
15 remotefilelog works by fetching file contents lazily and storing them
15 remotefilelog works by fetching file contents lazily and storing them
16 in a cache on the client rather than in revlogs. This allows enormous
16 in a cache on the client rather than in revlogs. This allows enormous
17 histories to be transferred only partially, making them easier to
17 histories to be transferred only partially, making them easier to
18 operate on.
18 operate on.
19
19
20 Configs:
20 Configs:
21
21
22 ``packs.maxchainlen`` specifies the maximum delta chain length in pack files
22 ``packs.maxchainlen`` specifies the maximum delta chain length in pack files
23
23
24 ``packs.maxpacksize`` specifies the maximum pack file size
24 ``packs.maxpacksize`` specifies the maximum pack file size
25
25
26 ``packs.maxpackfilecount`` specifies the maximum number of packs in the
26 ``packs.maxpackfilecount`` specifies the maximum number of packs in the
27 shared cache (trees only for now)
27 shared cache (trees only for now)
28
28
29 ``remotefilelog.backgroundprefetch`` runs prefetch in background when True
29 ``remotefilelog.backgroundprefetch`` runs prefetch in background when True
30
30
31 ``remotefilelog.bgprefetchrevs`` specifies revisions to fetch on commit and
31 ``remotefilelog.bgprefetchrevs`` specifies revisions to fetch on commit and
32 update, and on other commands that use them. Different from pullprefetch.
32 update, and on other commands that use them. Different from pullprefetch.
33
33
34 ``remotefilelog.gcrepack`` does garbage collection during repack when True
34 ``remotefilelog.gcrepack`` does garbage collection during repack when True
35
35
36 ``remotefilelog.nodettl`` specifies maximum TTL of a node in seconds before
36 ``remotefilelog.nodettl`` specifies maximum TTL of a node in seconds before
37 it is garbage collected
37 it is garbage collected
38
38
39 ``remotefilelog.repackonhggc`` runs repack on hg gc when True
39 ``remotefilelog.repackonhggc`` runs repack on hg gc when True
40
40
41 ``remotefilelog.prefetchdays`` specifies the maximum age of a commit in
41 ``remotefilelog.prefetchdays`` specifies the maximum age of a commit in
42 days after which it is no longer prefetched.
42 days after which it is no longer prefetched.
43
43
44 ``remotefilelog.prefetchdelay`` specifies delay between background
44 ``remotefilelog.prefetchdelay`` specifies delay between background
45 prefetches in seconds after operations that change the working copy parent
45 prefetches in seconds after operations that change the working copy parent
46
46
47 ``remotefilelog.data.gencountlimit`` constraints the minimum number of data
47 ``remotefilelog.data.gencountlimit`` constraints the minimum number of data
48 pack files required to be considered part of a generation. In particular,
48 pack files required to be considered part of a generation. In particular,
49 minimum number of packs files > gencountlimit.
49 minimum number of packs files > gencountlimit.
50
50
51 ``remotefilelog.data.generations`` list for specifying the lower bound of
51 ``remotefilelog.data.generations`` list for specifying the lower bound of
52 each generation of the data pack files. For example, list ['100MB','1MB']
52 each generation of the data pack files. For example, list ['100MB','1MB']
53 or ['1MB', '100MB'] will lead to three generations: [0, 1MB), [
53 or ['1MB', '100MB'] will lead to three generations: [0, 1MB), [
54 1MB, 100MB) and [100MB, infinity).
54 1MB, 100MB) and [100MB, infinity).
55
55
56 ``remotefilelog.data.maxrepackpacks`` the maximum number of pack files to
56 ``remotefilelog.data.maxrepackpacks`` the maximum number of pack files to
57 include in an incremental data repack.
57 include in an incremental data repack.
58
58
59 ``remotefilelog.data.repackmaxpacksize`` the maximum size of a pack file for
59 ``remotefilelog.data.repackmaxpacksize`` the maximum size of a pack file for
60 it to be considered for an incremental data repack.
60 it to be considered for an incremental data repack.
61
61
62 ``remotefilelog.data.repacksizelimit`` the maximum total size of pack files
62 ``remotefilelog.data.repacksizelimit`` the maximum total size of pack files
63 to include in an incremental data repack.
63 to include in an incremental data repack.
64
64
65 ``remotefilelog.history.gencountlimit`` constraints the minimum number of
65 ``remotefilelog.history.gencountlimit`` constraints the minimum number of
66 history pack files required to be considered part of a generation. In
66 history pack files required to be considered part of a generation. In
67 particular, minimum number of packs files > gencountlimit.
67 particular, minimum number of packs files > gencountlimit.
68
68
69 ``remotefilelog.history.generations`` list for specifying the lower bound of
69 ``remotefilelog.history.generations`` list for specifying the lower bound of
70 each generation of the history pack files. For example, list [
70 each generation of the history pack files. For example, list [
71 '100MB', '1MB'] or ['1MB', '100MB'] will lead to three generations: [
71 '100MB', '1MB'] or ['1MB', '100MB'] will lead to three generations: [
72 0, 1MB), [1MB, 100MB) and [100MB, infinity).
72 0, 1MB), [1MB, 100MB) and [100MB, infinity).
73
73
74 ``remotefilelog.history.maxrepackpacks`` the maximum number of pack files to
74 ``remotefilelog.history.maxrepackpacks`` the maximum number of pack files to
75 include in an incremental history repack.
75 include in an incremental history repack.
76
76
77 ``remotefilelog.history.repackmaxpacksize`` the maximum size of a pack file
77 ``remotefilelog.history.repackmaxpacksize`` the maximum size of a pack file
78 for it to be considered for an incremental history repack.
78 for it to be considered for an incremental history repack.
79
79
80 ``remotefilelog.history.repacksizelimit`` the maximum total size of pack
80 ``remotefilelog.history.repacksizelimit`` the maximum total size of pack
81 files to include in an incremental history repack.
81 files to include in an incremental history repack.
82
82
83 ``remotefilelog.backgroundrepack`` automatically consolidate packs in the
83 ``remotefilelog.backgroundrepack`` automatically consolidate packs in the
84 background
84 background
85
85
86 ``remotefilelog.cachepath`` path to cache
86 ``remotefilelog.cachepath`` path to cache
87
87
88 ``remotefilelog.cachegroup`` if set, make cache directory sgid to this
88 ``remotefilelog.cachegroup`` if set, make cache directory sgid to this
89 group
89 group
90
90
91 ``remotefilelog.cacheprocess`` binary to invoke for fetching file data
91 ``remotefilelog.cacheprocess`` binary to invoke for fetching file data
92
92
93 ``remotefilelog.debug`` turn on remotefilelog-specific debug output
93 ``remotefilelog.debug`` turn on remotefilelog-specific debug output
94
94
95 ``remotefilelog.excludepattern`` pattern of files to exclude from pulls
95 ``remotefilelog.excludepattern`` pattern of files to exclude from pulls
96
96
97 ``remotefilelog.includepattern`` pattern of files to include in pulls
97 ``remotefilelog.includepattern`` pattern of files to include in pulls
98
98
99 ``remotefilelog.fetchwarning``: message to print when too many
99 ``remotefilelog.fetchwarning``: message to print when too many
100 single-file fetches occur
100 single-file fetches occur
101
101
102 ``remotefilelog.getfilesstep`` number of files to request in a single RPC
102 ``remotefilelog.getfilesstep`` number of files to request in a single RPC
103
103
104 ``remotefilelog.getfilestype`` if set to 'threaded' use threads to fetch
104 ``remotefilelog.getfilestype`` if set to 'threaded' use threads to fetch
105 files, otherwise use optimistic fetching
105 files, otherwise use optimistic fetching
106
106
107 ``remotefilelog.pullprefetch`` revset for selecting files that should be
107 ``remotefilelog.pullprefetch`` revset for selecting files that should be
108 eagerly downloaded rather than lazily
108 eagerly downloaded rather than lazily
109
109
110 ``remotefilelog.reponame`` name of the repo. If set, used to partition
110 ``remotefilelog.reponame`` name of the repo. If set, used to partition
111 data from other repos in a shared store.
111 data from other repos in a shared store.
112
112
113 ``remotefilelog.server`` if true, enable server-side functionality
113 ``remotefilelog.server`` if true, enable server-side functionality
114
114
115 ``remotefilelog.servercachepath`` path for caching blobs on the server
115 ``remotefilelog.servercachepath`` path for caching blobs on the server
116
116
117 ``remotefilelog.serverexpiration`` number of days to keep cached server
117 ``remotefilelog.serverexpiration`` number of days to keep cached server
118 blobs
118 blobs
119
119
120 ``remotefilelog.validatecache`` if set, check cache entries for corruption
120 ``remotefilelog.validatecache`` if set, check cache entries for corruption
121 before returning blobs
121 before returning blobs
122
122
123 ``remotefilelog.validatecachelog`` if set, check cache entries for
123 ``remotefilelog.validatecachelog`` if set, check cache entries for
124 corruption before returning metadata
124 corruption before returning metadata
125
125
126 """
126 """
127 from __future__ import absolute_import
127 from __future__ import absolute_import
128
128
129 import os
129 import os
130 import time
130 import time
131 import traceback
131 import traceback
132
132
133 from mercurial.node import hex
133 from mercurial.node import hex
134 from mercurial.i18n import _
134 from mercurial.i18n import _
135 from mercurial.pycompat import open
135 from mercurial.pycompat import open
136 from mercurial import (
136 from mercurial import (
137 changegroup,
137 changegroup,
138 changelog,
138 changelog,
139 cmdutil,
140 commands,
139 commands,
141 configitems,
140 configitems,
142 context,
141 context,
143 copies,
142 copies,
144 debugcommands as hgdebugcommands,
143 debugcommands as hgdebugcommands,
145 dispatch,
144 dispatch,
146 error,
145 error,
147 exchange,
146 exchange,
148 extensions,
147 extensions,
149 hg,
148 hg,
150 localrepo,
149 localrepo,
151 match as matchmod,
150 match as matchmod,
152 merge,
151 merge,
153 mergestate as mergestatemod,
152 mergestate as mergestatemod,
154 node as nodemod,
153 node as nodemod,
155 patch,
154 patch,
156 pycompat,
155 pycompat,
157 registrar,
156 registrar,
158 repair,
157 repair,
159 repoview,
158 repoview,
160 revset,
159 revset,
161 scmutil,
160 scmutil,
162 smartset,
161 smartset,
163 streamclone,
162 streamclone,
164 util,
163 util,
165 )
164 )
166 from . import (
165 from . import (
167 constants,
166 constants,
168 debugcommands,
167 debugcommands,
169 fileserverclient,
168 fileserverclient,
170 remotefilectx,
169 remotefilectx,
171 remotefilelog,
170 remotefilelog,
172 remotefilelogserver,
171 remotefilelogserver,
173 repack as repackmod,
172 repack as repackmod,
174 shallowbundle,
173 shallowbundle,
175 shallowrepo,
174 shallowrepo,
176 shallowstore,
175 shallowstore,
177 shallowutil,
176 shallowutil,
178 shallowverifier,
177 shallowverifier,
179 )
178 )
180
179
181 # ensures debug commands are registered
180 # ensures debug commands are registered
182 hgdebugcommands.command
181 hgdebugcommands.command
183
182
184 cmdtable = {}
183 cmdtable = {}
185 command = registrar.command(cmdtable)
184 command = registrar.command(cmdtable)
186
185
187 configtable = {}
186 configtable = {}
188 configitem = registrar.configitem(configtable)
187 configitem = registrar.configitem(configtable)
189
188
190 configitem(b'remotefilelog', b'debug', default=False)
189 configitem(b'remotefilelog', b'debug', default=False)
191
190
192 configitem(b'remotefilelog', b'reponame', default=b'')
191 configitem(b'remotefilelog', b'reponame', default=b'')
193 configitem(b'remotefilelog', b'cachepath', default=None)
192 configitem(b'remotefilelog', b'cachepath', default=None)
194 configitem(b'remotefilelog', b'cachegroup', default=None)
193 configitem(b'remotefilelog', b'cachegroup', default=None)
195 configitem(b'remotefilelog', b'cacheprocess', default=None)
194 configitem(b'remotefilelog', b'cacheprocess', default=None)
196 configitem(b'remotefilelog', b'cacheprocess.includepath', default=None)
195 configitem(b'remotefilelog', b'cacheprocess.includepath', default=None)
197 configitem(b"remotefilelog", b"cachelimit", default=b"1000 GB")
196 configitem(b"remotefilelog", b"cachelimit", default=b"1000 GB")
198
197
199 configitem(
198 configitem(
200 b'remotefilelog',
199 b'remotefilelog',
201 b'fallbackpath',
200 b'fallbackpath',
202 default=configitems.dynamicdefault,
201 default=configitems.dynamicdefault,
203 alias=[(b'remotefilelog', b'fallbackrepo')],
202 alias=[(b'remotefilelog', b'fallbackrepo')],
204 )
203 )
205
204
206 configitem(b'remotefilelog', b'validatecachelog', default=None)
205 configitem(b'remotefilelog', b'validatecachelog', default=None)
207 configitem(b'remotefilelog', b'validatecache', default=b'on')
206 configitem(b'remotefilelog', b'validatecache', default=b'on')
208 configitem(b'remotefilelog', b'server', default=None)
207 configitem(b'remotefilelog', b'server', default=None)
209 configitem(b'remotefilelog', b'servercachepath', default=None)
208 configitem(b'remotefilelog', b'servercachepath', default=None)
210 configitem(b"remotefilelog", b"serverexpiration", default=30)
209 configitem(b"remotefilelog", b"serverexpiration", default=30)
211 configitem(b'remotefilelog', b'backgroundrepack', default=False)
210 configitem(b'remotefilelog', b'backgroundrepack', default=False)
212 configitem(b'remotefilelog', b'bgprefetchrevs', default=None)
211 configitem(b'remotefilelog', b'bgprefetchrevs', default=None)
213 configitem(b'remotefilelog', b'pullprefetch', default=None)
212 configitem(b'remotefilelog', b'pullprefetch', default=None)
214 configitem(b'remotefilelog', b'backgroundprefetch', default=False)
213 configitem(b'remotefilelog', b'backgroundprefetch', default=False)
215 configitem(b'remotefilelog', b'prefetchdelay', default=120)
214 configitem(b'remotefilelog', b'prefetchdelay', default=120)
216 configitem(b'remotefilelog', b'prefetchdays', default=14)
215 configitem(b'remotefilelog', b'prefetchdays', default=14)
217
216
218 configitem(b'remotefilelog', b'getfilesstep', default=10000)
217 configitem(b'remotefilelog', b'getfilesstep', default=10000)
219 configitem(b'remotefilelog', b'getfilestype', default=b'optimistic')
218 configitem(b'remotefilelog', b'getfilestype', default=b'optimistic')
220 configitem(b'remotefilelog', b'batchsize', configitems.dynamicdefault)
219 configitem(b'remotefilelog', b'batchsize', configitems.dynamicdefault)
221 configitem(b'remotefilelog', b'fetchwarning', default=b'')
220 configitem(b'remotefilelog', b'fetchwarning', default=b'')
222
221
223 configitem(b'remotefilelog', b'includepattern', default=None)
222 configitem(b'remotefilelog', b'includepattern', default=None)
224 configitem(b'remotefilelog', b'excludepattern', default=None)
223 configitem(b'remotefilelog', b'excludepattern', default=None)
225
224
226 configitem(b'remotefilelog', b'gcrepack', default=False)
225 configitem(b'remotefilelog', b'gcrepack', default=False)
227 configitem(b'remotefilelog', b'repackonhggc', default=False)
226 configitem(b'remotefilelog', b'repackonhggc', default=False)
228 configitem(b'repack', b'chainorphansbysize', default=True, experimental=True)
227 configitem(b'repack', b'chainorphansbysize', default=True, experimental=True)
229
228
230 configitem(b'packs', b'maxpacksize', default=0)
229 configitem(b'packs', b'maxpacksize', default=0)
231 configitem(b'packs', b'maxchainlen', default=1000)
230 configitem(b'packs', b'maxchainlen', default=1000)
232
231
233 configitem(b'devel', b'remotefilelog.bg-wait', default=False)
232 configitem(b'devel', b'remotefilelog.bg-wait', default=False)
234
233
235 # default TTL limit is 30 days
234 # default TTL limit is 30 days
236 _defaultlimit = 60 * 60 * 24 * 30
235 _defaultlimit = 60 * 60 * 24 * 30
237 configitem(b'remotefilelog', b'nodettl', default=_defaultlimit)
236 configitem(b'remotefilelog', b'nodettl', default=_defaultlimit)
238
237
239 configitem(b'remotefilelog', b'data.gencountlimit', default=2),
238 configitem(b'remotefilelog', b'data.gencountlimit', default=2),
240 configitem(
239 configitem(
241 b'remotefilelog', b'data.generations', default=[b'1GB', b'100MB', b'1MB']
240 b'remotefilelog', b'data.generations', default=[b'1GB', b'100MB', b'1MB']
242 )
241 )
243 configitem(b'remotefilelog', b'data.maxrepackpacks', default=50)
242 configitem(b'remotefilelog', b'data.maxrepackpacks', default=50)
244 configitem(b'remotefilelog', b'data.repackmaxpacksize', default=b'4GB')
243 configitem(b'remotefilelog', b'data.repackmaxpacksize', default=b'4GB')
245 configitem(b'remotefilelog', b'data.repacksizelimit', default=b'100MB')
244 configitem(b'remotefilelog', b'data.repacksizelimit', default=b'100MB')
246
245
247 configitem(b'remotefilelog', b'history.gencountlimit', default=2),
246 configitem(b'remotefilelog', b'history.gencountlimit', default=2),
248 configitem(b'remotefilelog', b'history.generations', default=[b'100MB'])
247 configitem(b'remotefilelog', b'history.generations', default=[b'100MB'])
249 configitem(b'remotefilelog', b'history.maxrepackpacks', default=50)
248 configitem(b'remotefilelog', b'history.maxrepackpacks', default=50)
250 configitem(b'remotefilelog', b'history.repackmaxpacksize', default=b'400MB')
249 configitem(b'remotefilelog', b'history.repackmaxpacksize', default=b'400MB')
251 configitem(b'remotefilelog', b'history.repacksizelimit', default=b'100MB')
250 configitem(b'remotefilelog', b'history.repacksizelimit', default=b'100MB')
252
251
253 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
252 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
254 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
253 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
255 # be specifying the version(s) of Mercurial they are tested with, or
254 # be specifying the version(s) of Mercurial they are tested with, or
256 # leave the attribute unspecified.
255 # leave the attribute unspecified.
257 testedwith = b'ships-with-hg-core'
256 testedwith = b'ships-with-hg-core'
258
257
259 repoclass = localrepo.localrepository
258 repoclass = localrepo.localrepository
260 repoclass._basesupported.add(constants.SHALLOWREPO_REQUIREMENT)
259 repoclass._basesupported.add(constants.SHALLOWREPO_REQUIREMENT)
261
260
262 isenabled = shallowutil.isenabled
261 isenabled = shallowutil.isenabled
263
262
264
263
265 def uisetup(ui):
264 def uisetup(ui):
266 """Wraps user facing Mercurial commands to swap them out with shallow
265 """Wraps user facing Mercurial commands to swap them out with shallow
267 versions.
266 versions.
268 """
267 """
269 hg.wirepeersetupfuncs.append(fileserverclient.peersetup)
268 hg.wirepeersetupfuncs.append(fileserverclient.peersetup)
270
269
271 entry = extensions.wrapcommand(commands.table, b'clone', cloneshallow)
270 entry = extensions.wrapcommand(commands.table, b'clone', cloneshallow)
272 entry[1].append(
271 entry[1].append(
273 (
272 (
274 b'',
273 b'',
275 b'shallow',
274 b'shallow',
276 None,
275 None,
277 _(b"create a shallow clone which uses remote file history"),
276 _(b"create a shallow clone which uses remote file history"),
278 )
277 )
279 )
278 )
280
279
281 extensions.wrapcommand(
280 extensions.wrapcommand(
282 commands.table, b'debugindex', debugcommands.debugindex
281 commands.table, b'debugindex', debugcommands.debugindex
283 )
282 )
284 extensions.wrapcommand(
283 extensions.wrapcommand(
285 commands.table, b'debugindexdot', debugcommands.debugindexdot
284 commands.table, b'debugindexdot', debugcommands.debugindexdot
286 )
285 )
287 extensions.wrapcommand(commands.table, b'log', log)
286 extensions.wrapcommand(commands.table, b'log', log)
288 extensions.wrapcommand(commands.table, b'pull', pull)
287 extensions.wrapcommand(commands.table, b'pull', pull)
289
288
290 # Prevent 'hg manifest --all'
289 # Prevent 'hg manifest --all'
291 def _manifest(orig, ui, repo, *args, **opts):
290 def _manifest(orig, ui, repo, *args, **opts):
292 if isenabled(repo) and opts.get('all'):
291 if isenabled(repo) and opts.get('all'):
293 raise error.Abort(_(b"--all is not supported in a shallow repo"))
292 raise error.Abort(_(b"--all is not supported in a shallow repo"))
294
293
295 return orig(ui, repo, *args, **opts)
294 return orig(ui, repo, *args, **opts)
296
295
297 extensions.wrapcommand(commands.table, b"manifest", _manifest)
296 extensions.wrapcommand(commands.table, b"manifest", _manifest)
298
297
299 # Wrap remotefilelog with lfs code
298 # Wrap remotefilelog with lfs code
300 def _lfsloaded(loaded=False):
299 def _lfsloaded(loaded=False):
301 lfsmod = None
300 lfsmod = None
302 try:
301 try:
303 lfsmod = extensions.find(b'lfs')
302 lfsmod = extensions.find(b'lfs')
304 except KeyError:
303 except KeyError:
305 pass
304 pass
306 if lfsmod:
305 if lfsmod:
307 lfsmod.wrapfilelog(remotefilelog.remotefilelog)
306 lfsmod.wrapfilelog(remotefilelog.remotefilelog)
308 fileserverclient._lfsmod = lfsmod
307 fileserverclient._lfsmod = lfsmod
309
308
310 extensions.afterloaded(b'lfs', _lfsloaded)
309 extensions.afterloaded(b'lfs', _lfsloaded)
311
310
312 # debugdata needs remotefilelog.len to work
311 # debugdata needs remotefilelog.len to work
313 extensions.wrapcommand(commands.table, b'debugdata', debugdatashallow)
312 extensions.wrapcommand(commands.table, b'debugdata', debugdatashallow)
314
313
315 changegroup.cgpacker = shallowbundle.shallowcg1packer
314 changegroup.cgpacker = shallowbundle.shallowcg1packer
316
315
317 extensions.wrapfunction(
316 extensions.wrapfunction(
318 changegroup, b'_addchangegroupfiles', shallowbundle.addchangegroupfiles
317 changegroup, b'_addchangegroupfiles', shallowbundle.addchangegroupfiles
319 )
318 )
320 extensions.wrapfunction(
319 extensions.wrapfunction(
321 changegroup, b'makechangegroup', shallowbundle.makechangegroup
320 changegroup, b'makechangegroup', shallowbundle.makechangegroup
322 )
321 )
323 extensions.wrapfunction(localrepo, b'makestore', storewrapper)
322 extensions.wrapfunction(localrepo, b'makestore', storewrapper)
324 extensions.wrapfunction(exchange, b'pull', exchangepull)
323 extensions.wrapfunction(exchange, b'pull', exchangepull)
325 extensions.wrapfunction(merge, b'applyupdates', applyupdates)
324 extensions.wrapfunction(merge, b'applyupdates', applyupdates)
326 extensions.wrapfunction(merge, b'_checkunknownfiles', checkunknownfiles)
325 extensions.wrapfunction(merge, b'_checkunknownfiles', checkunknownfiles)
327 extensions.wrapfunction(context.workingctx, b'_checklookup', checklookup)
326 extensions.wrapfunction(context.workingctx, b'_checklookup', checklookup)
328 extensions.wrapfunction(scmutil, b'_findrenames', findrenames)
327 extensions.wrapfunction(scmutil, b'_findrenames', findrenames)
329 extensions.wrapfunction(
328 extensions.wrapfunction(
330 copies, b'_computeforwardmissing', computeforwardmissing
329 copies, b'_computeforwardmissing', computeforwardmissing
331 )
330 )
332 extensions.wrapfunction(dispatch, b'runcommand', runcommand)
331 extensions.wrapfunction(dispatch, b'runcommand', runcommand)
333 extensions.wrapfunction(repair, b'_collectbrokencsets', _collectbrokencsets)
332 extensions.wrapfunction(repair, b'_collectbrokencsets', _collectbrokencsets)
334 extensions.wrapfunction(context.changectx, b'filectx', filectx)
333 extensions.wrapfunction(context.changectx, b'filectx', filectx)
335 extensions.wrapfunction(context.workingctx, b'filectx', workingfilectx)
334 extensions.wrapfunction(context.workingctx, b'filectx', workingfilectx)
336 extensions.wrapfunction(patch, b'trydiff', trydiff)
335 extensions.wrapfunction(patch, b'trydiff', trydiff)
337 extensions.wrapfunction(hg, b'verify', _verify)
336 extensions.wrapfunction(hg, b'verify', _verify)
338 scmutil.fileprefetchhooks.add(b'remotefilelog', _fileprefetchhook)
337 scmutil.fileprefetchhooks.add(b'remotefilelog', _fileprefetchhook)
339
338
340 # disappointing hacks below
339 # disappointing hacks below
341 extensions.wrapfunction(scmutil, b'getrenamedfn', getrenamedfn)
340 extensions.wrapfunction(scmutil, b'getrenamedfn', getrenamedfn)
342 extensions.wrapfunction(revset, b'filelog', filelogrevset)
341 extensions.wrapfunction(revset, b'filelog', filelogrevset)
343 revset.symbols[b'filelog'] = revset.filelog
342 revset.symbols[b'filelog'] = revset.filelog
344 extensions.wrapfunction(cmdutil, b'walkfilerevs', walkfilerevs)
345
343
346
344
347 def cloneshallow(orig, ui, repo, *args, **opts):
345 def cloneshallow(orig, ui, repo, *args, **opts):
348 if opts.get('shallow'):
346 if opts.get('shallow'):
349 repos = []
347 repos = []
350
348
351 def pull_shallow(orig, self, *args, **kwargs):
349 def pull_shallow(orig, self, *args, **kwargs):
352 if not isenabled(self):
350 if not isenabled(self):
353 repos.append(self.unfiltered())
351 repos.append(self.unfiltered())
354 # set up the client hooks so the post-clone update works
352 # set up the client hooks so the post-clone update works
355 setupclient(self.ui, self.unfiltered())
353 setupclient(self.ui, self.unfiltered())
356
354
357 # setupclient fixed the class on the repo itself
355 # setupclient fixed the class on the repo itself
358 # but we also need to fix it on the repoview
356 # but we also need to fix it on the repoview
359 if isinstance(self, repoview.repoview):
357 if isinstance(self, repoview.repoview):
360 self.__class__.__bases__ = (
358 self.__class__.__bases__ = (
361 self.__class__.__bases__[0],
359 self.__class__.__bases__[0],
362 self.unfiltered().__class__,
360 self.unfiltered().__class__,
363 )
361 )
364 self.requirements.add(constants.SHALLOWREPO_REQUIREMENT)
362 self.requirements.add(constants.SHALLOWREPO_REQUIREMENT)
365 with self.lock():
363 with self.lock():
366 # acquire store lock before writing requirements as some
364 # acquire store lock before writing requirements as some
367 # requirements might be written to .hg/store/requires
365 # requirements might be written to .hg/store/requires
368 scmutil.writereporequirements(self)
366 scmutil.writereporequirements(self)
369
367
370 # Since setupclient hadn't been called, exchange.pull was not
368 # Since setupclient hadn't been called, exchange.pull was not
371 # wrapped. So we need to manually invoke our version of it.
369 # wrapped. So we need to manually invoke our version of it.
372 return exchangepull(orig, self, *args, **kwargs)
370 return exchangepull(orig, self, *args, **kwargs)
373 else:
371 else:
374 return orig(self, *args, **kwargs)
372 return orig(self, *args, **kwargs)
375
373
376 extensions.wrapfunction(exchange, b'pull', pull_shallow)
374 extensions.wrapfunction(exchange, b'pull', pull_shallow)
377
375
378 # Wrap the stream logic to add requirements and to pass include/exclude
376 # Wrap the stream logic to add requirements and to pass include/exclude
379 # patterns around.
377 # patterns around.
380 def setup_streamout(repo, remote):
378 def setup_streamout(repo, remote):
381 # Replace remote.stream_out with a version that sends file
379 # Replace remote.stream_out with a version that sends file
382 # patterns.
380 # patterns.
383 def stream_out_shallow(orig):
381 def stream_out_shallow(orig):
384 caps = remote.capabilities()
382 caps = remote.capabilities()
385 if constants.NETWORK_CAP_LEGACY_SSH_GETFILES in caps:
383 if constants.NETWORK_CAP_LEGACY_SSH_GETFILES in caps:
386 opts = {}
384 opts = {}
387 if repo.includepattern:
385 if repo.includepattern:
388 opts['includepattern'] = b'\0'.join(repo.includepattern)
386 opts['includepattern'] = b'\0'.join(repo.includepattern)
389 if repo.excludepattern:
387 if repo.excludepattern:
390 opts['excludepattern'] = b'\0'.join(repo.excludepattern)
388 opts['excludepattern'] = b'\0'.join(repo.excludepattern)
391 return remote._callstream(b'stream_out_shallow', **opts)
389 return remote._callstream(b'stream_out_shallow', **opts)
392 else:
390 else:
393 return orig()
391 return orig()
394
392
395 extensions.wrapfunction(remote, b'stream_out', stream_out_shallow)
393 extensions.wrapfunction(remote, b'stream_out', stream_out_shallow)
396
394
397 def stream_wrap(orig, op):
395 def stream_wrap(orig, op):
398 setup_streamout(op.repo, op.remote)
396 setup_streamout(op.repo, op.remote)
399 return orig(op)
397 return orig(op)
400
398
401 extensions.wrapfunction(
399 extensions.wrapfunction(
402 streamclone, b'maybeperformlegacystreamclone', stream_wrap
400 streamclone, b'maybeperformlegacystreamclone', stream_wrap
403 )
401 )
404
402
405 def canperformstreamclone(orig, pullop, bundle2=False):
403 def canperformstreamclone(orig, pullop, bundle2=False):
406 # remotefilelog is currently incompatible with the
404 # remotefilelog is currently incompatible with the
407 # bundle2 flavor of streamclones, so force us to use
405 # bundle2 flavor of streamclones, so force us to use
408 # v1 instead.
406 # v1 instead.
409 if b'v2' in pullop.remotebundle2caps.get(b'stream', []):
407 if b'v2' in pullop.remotebundle2caps.get(b'stream', []):
410 pullop.remotebundle2caps[b'stream'] = [
408 pullop.remotebundle2caps[b'stream'] = [
411 c for c in pullop.remotebundle2caps[b'stream'] if c != b'v2'
409 c for c in pullop.remotebundle2caps[b'stream'] if c != b'v2'
412 ]
410 ]
413 if bundle2:
411 if bundle2:
414 return False, None
412 return False, None
415 supported, requirements = orig(pullop, bundle2=bundle2)
413 supported, requirements = orig(pullop, bundle2=bundle2)
416 if requirements is not None:
414 if requirements is not None:
417 requirements.add(constants.SHALLOWREPO_REQUIREMENT)
415 requirements.add(constants.SHALLOWREPO_REQUIREMENT)
418 return supported, requirements
416 return supported, requirements
419
417
420 extensions.wrapfunction(
418 extensions.wrapfunction(
421 streamclone, b'canperformstreamclone', canperformstreamclone
419 streamclone, b'canperformstreamclone', canperformstreamclone
422 )
420 )
423
421
424 try:
422 try:
425 orig(ui, repo, *args, **opts)
423 orig(ui, repo, *args, **opts)
426 finally:
424 finally:
427 if opts.get('shallow'):
425 if opts.get('shallow'):
428 for r in repos:
426 for r in repos:
429 if util.safehasattr(r, b'fileservice'):
427 if util.safehasattr(r, b'fileservice'):
430 r.fileservice.close()
428 r.fileservice.close()
431
429
432
430
433 def debugdatashallow(orig, *args, **kwds):
431 def debugdatashallow(orig, *args, **kwds):
434 oldlen = remotefilelog.remotefilelog.__len__
432 oldlen = remotefilelog.remotefilelog.__len__
435 try:
433 try:
436 remotefilelog.remotefilelog.__len__ = lambda x: 1
434 remotefilelog.remotefilelog.__len__ = lambda x: 1
437 return orig(*args, **kwds)
435 return orig(*args, **kwds)
438 finally:
436 finally:
439 remotefilelog.remotefilelog.__len__ = oldlen
437 remotefilelog.remotefilelog.__len__ = oldlen
440
438
441
439
442 def reposetup(ui, repo):
440 def reposetup(ui, repo):
443 if not repo.local():
441 if not repo.local():
444 return
442 return
445
443
446 # put here intentionally bc doesnt work in uisetup
444 # put here intentionally bc doesnt work in uisetup
447 ui.setconfig(b'hooks', b'update.prefetch', wcpprefetch)
445 ui.setconfig(b'hooks', b'update.prefetch', wcpprefetch)
448 ui.setconfig(b'hooks', b'commit.prefetch', wcpprefetch)
446 ui.setconfig(b'hooks', b'commit.prefetch', wcpprefetch)
449
447
450 isserverenabled = ui.configbool(b'remotefilelog', b'server')
448 isserverenabled = ui.configbool(b'remotefilelog', b'server')
451 isshallowclient = isenabled(repo)
449 isshallowclient = isenabled(repo)
452
450
453 if isserverenabled and isshallowclient:
451 if isserverenabled and isshallowclient:
454 raise RuntimeError(b"Cannot be both a server and shallow client.")
452 raise RuntimeError(b"Cannot be both a server and shallow client.")
455
453
456 if isshallowclient:
454 if isshallowclient:
457 setupclient(ui, repo)
455 setupclient(ui, repo)
458
456
459 if isserverenabled:
457 if isserverenabled:
460 remotefilelogserver.setupserver(ui, repo)
458 remotefilelogserver.setupserver(ui, repo)
461
459
462
460
463 def setupclient(ui, repo):
461 def setupclient(ui, repo):
464 if not isinstance(repo, localrepo.localrepository):
462 if not isinstance(repo, localrepo.localrepository):
465 return
463 return
466
464
467 # Even clients get the server setup since they need to have the
465 # Even clients get the server setup since they need to have the
468 # wireprotocol endpoints registered.
466 # wireprotocol endpoints registered.
469 remotefilelogserver.onetimesetup(ui)
467 remotefilelogserver.onetimesetup(ui)
470 onetimeclientsetup(ui)
468 onetimeclientsetup(ui)
471
469
472 shallowrepo.wraprepo(repo)
470 shallowrepo.wraprepo(repo)
473 repo.store = shallowstore.wrapstore(repo.store)
471 repo.store = shallowstore.wrapstore(repo.store)
474
472
475
473
476 def storewrapper(orig, requirements, path, vfstype):
474 def storewrapper(orig, requirements, path, vfstype):
477 s = orig(requirements, path, vfstype)
475 s = orig(requirements, path, vfstype)
478 if constants.SHALLOWREPO_REQUIREMENT in requirements:
476 if constants.SHALLOWREPO_REQUIREMENT in requirements:
479 s = shallowstore.wrapstore(s)
477 s = shallowstore.wrapstore(s)
480
478
481 return s
479 return s
482
480
483
481
484 # prefetch files before update
482 # prefetch files before update
485 def applyupdates(
483 def applyupdates(
486 orig, repo, mresult, wctx, mctx, overwrite, wantfiledata, **opts
484 orig, repo, mresult, wctx, mctx, overwrite, wantfiledata, **opts
487 ):
485 ):
488 if isenabled(repo):
486 if isenabled(repo):
489 manifest = mctx.manifest()
487 manifest = mctx.manifest()
490 files = []
488 files = []
491 for f, args, msg in mresult.getactions([mergestatemod.ACTION_GET]):
489 for f, args, msg in mresult.getactions([mergestatemod.ACTION_GET]):
492 files.append((f, hex(manifest[f])))
490 files.append((f, hex(manifest[f])))
493 # batch fetch the needed files from the server
491 # batch fetch the needed files from the server
494 repo.fileservice.prefetch(files)
492 repo.fileservice.prefetch(files)
495 return orig(repo, mresult, wctx, mctx, overwrite, wantfiledata, **opts)
493 return orig(repo, mresult, wctx, mctx, overwrite, wantfiledata, **opts)
496
494
497
495
498 # Prefetch merge checkunknownfiles
496 # Prefetch merge checkunknownfiles
499 def checkunknownfiles(orig, repo, wctx, mctx, force, mresult, *args, **kwargs):
497 def checkunknownfiles(orig, repo, wctx, mctx, force, mresult, *args, **kwargs):
500 if isenabled(repo):
498 if isenabled(repo):
501 files = []
499 files = []
502 sparsematch = repo.maybesparsematch(mctx.rev())
500 sparsematch = repo.maybesparsematch(mctx.rev())
503 for f, (m, actionargs, msg) in mresult.filemap():
501 for f, (m, actionargs, msg) in mresult.filemap():
504 if sparsematch and not sparsematch(f):
502 if sparsematch and not sparsematch(f):
505 continue
503 continue
506 if m in (
504 if m in (
507 mergestatemod.ACTION_CREATED,
505 mergestatemod.ACTION_CREATED,
508 mergestatemod.ACTION_DELETED_CHANGED,
506 mergestatemod.ACTION_DELETED_CHANGED,
509 mergestatemod.ACTION_CREATED_MERGE,
507 mergestatemod.ACTION_CREATED_MERGE,
510 ):
508 ):
511 files.append((f, hex(mctx.filenode(f))))
509 files.append((f, hex(mctx.filenode(f))))
512 elif m == mergestatemod.ACTION_LOCAL_DIR_RENAME_GET:
510 elif m == mergestatemod.ACTION_LOCAL_DIR_RENAME_GET:
513 f2 = actionargs[0]
511 f2 = actionargs[0]
514 files.append((f2, hex(mctx.filenode(f2))))
512 files.append((f2, hex(mctx.filenode(f2))))
515 # batch fetch the needed files from the server
513 # batch fetch the needed files from the server
516 repo.fileservice.prefetch(files)
514 repo.fileservice.prefetch(files)
517 return orig(repo, wctx, mctx, force, mresult, *args, **kwargs)
515 return orig(repo, wctx, mctx, force, mresult, *args, **kwargs)
518
516
519
517
520 # Prefetch files before status attempts to look at their size and contents
518 # Prefetch files before status attempts to look at their size and contents
521 def checklookup(orig, self, files):
519 def checklookup(orig, self, files):
522 repo = self._repo
520 repo = self._repo
523 if isenabled(repo):
521 if isenabled(repo):
524 prefetchfiles = []
522 prefetchfiles = []
525 for parent in self._parents:
523 for parent in self._parents:
526 for f in files:
524 for f in files:
527 if f in parent:
525 if f in parent:
528 prefetchfiles.append((f, hex(parent.filenode(f))))
526 prefetchfiles.append((f, hex(parent.filenode(f))))
529 # batch fetch the needed files from the server
527 # batch fetch the needed files from the server
530 repo.fileservice.prefetch(prefetchfiles)
528 repo.fileservice.prefetch(prefetchfiles)
531 return orig(self, files)
529 return orig(self, files)
532
530
533
531
534 # Prefetch the logic that compares added and removed files for renames
532 # Prefetch the logic that compares added and removed files for renames
535 def findrenames(orig, repo, matcher, added, removed, *args, **kwargs):
533 def findrenames(orig, repo, matcher, added, removed, *args, **kwargs):
536 if isenabled(repo):
534 if isenabled(repo):
537 files = []
535 files = []
538 pmf = repo[b'.'].manifest()
536 pmf = repo[b'.'].manifest()
539 for f in removed:
537 for f in removed:
540 if f in pmf:
538 if f in pmf:
541 files.append((f, hex(pmf[f])))
539 files.append((f, hex(pmf[f])))
542 # batch fetch the needed files from the server
540 # batch fetch the needed files from the server
543 repo.fileservice.prefetch(files)
541 repo.fileservice.prefetch(files)
544 return orig(repo, matcher, added, removed, *args, **kwargs)
542 return orig(repo, matcher, added, removed, *args, **kwargs)
545
543
546
544
547 # prefetch files before pathcopies check
545 # prefetch files before pathcopies check
548 def computeforwardmissing(orig, a, b, match=None):
546 def computeforwardmissing(orig, a, b, match=None):
549 missing = orig(a, b, match=match)
547 missing = orig(a, b, match=match)
550 repo = a._repo
548 repo = a._repo
551 if isenabled(repo):
549 if isenabled(repo):
552 mb = b.manifest()
550 mb = b.manifest()
553
551
554 files = []
552 files = []
555 sparsematch = repo.maybesparsematch(b.rev())
553 sparsematch = repo.maybesparsematch(b.rev())
556 if sparsematch:
554 if sparsematch:
557 sparsemissing = set()
555 sparsemissing = set()
558 for f in missing:
556 for f in missing:
559 if sparsematch(f):
557 if sparsematch(f):
560 files.append((f, hex(mb[f])))
558 files.append((f, hex(mb[f])))
561 sparsemissing.add(f)
559 sparsemissing.add(f)
562 missing = sparsemissing
560 missing = sparsemissing
563
561
564 # batch fetch the needed files from the server
562 # batch fetch the needed files from the server
565 repo.fileservice.prefetch(files)
563 repo.fileservice.prefetch(files)
566 return missing
564 return missing
567
565
568
566
569 # close cache miss server connection after the command has finished
567 # close cache miss server connection after the command has finished
570 def runcommand(orig, lui, repo, *args, **kwargs):
568 def runcommand(orig, lui, repo, *args, **kwargs):
571 fileservice = None
569 fileservice = None
572 # repo can be None when running in chg:
570 # repo can be None when running in chg:
573 # - at startup, reposetup was called because serve is not norepo
571 # - at startup, reposetup was called because serve is not norepo
574 # - a norepo command like "help" is called
572 # - a norepo command like "help" is called
575 if repo and isenabled(repo):
573 if repo and isenabled(repo):
576 fileservice = repo.fileservice
574 fileservice = repo.fileservice
577 try:
575 try:
578 return orig(lui, repo, *args, **kwargs)
576 return orig(lui, repo, *args, **kwargs)
579 finally:
577 finally:
580 if fileservice:
578 if fileservice:
581 fileservice.close()
579 fileservice.close()
582
580
583
581
584 # prevent strip from stripping remotefilelogs
582 # prevent strip from stripping remotefilelogs
585 def _collectbrokencsets(orig, repo, files, striprev):
583 def _collectbrokencsets(orig, repo, files, striprev):
586 if isenabled(repo):
584 if isenabled(repo):
587 files = list([f for f in files if not repo.shallowmatch(f)])
585 files = list([f for f in files if not repo.shallowmatch(f)])
588 return orig(repo, files, striprev)
586 return orig(repo, files, striprev)
589
587
590
588
591 # changectx wrappers
589 # changectx wrappers
592 def filectx(orig, self, path, fileid=None, filelog=None):
590 def filectx(orig, self, path, fileid=None, filelog=None):
593 if fileid is None:
591 if fileid is None:
594 fileid = self.filenode(path)
592 fileid = self.filenode(path)
595 if isenabled(self._repo) and self._repo.shallowmatch(path):
593 if isenabled(self._repo) and self._repo.shallowmatch(path):
596 return remotefilectx.remotefilectx(
594 return remotefilectx.remotefilectx(
597 self._repo, path, fileid=fileid, changectx=self, filelog=filelog
595 self._repo, path, fileid=fileid, changectx=self, filelog=filelog
598 )
596 )
599 return orig(self, path, fileid=fileid, filelog=filelog)
597 return orig(self, path, fileid=fileid, filelog=filelog)
600
598
601
599
602 def workingfilectx(orig, self, path, filelog=None):
600 def workingfilectx(orig, self, path, filelog=None):
603 if isenabled(self._repo) and self._repo.shallowmatch(path):
601 if isenabled(self._repo) and self._repo.shallowmatch(path):
604 return remotefilectx.remoteworkingfilectx(
602 return remotefilectx.remoteworkingfilectx(
605 self._repo, path, workingctx=self, filelog=filelog
603 self._repo, path, workingctx=self, filelog=filelog
606 )
604 )
607 return orig(self, path, filelog=filelog)
605 return orig(self, path, filelog=filelog)
608
606
609
607
610 # prefetch required revisions before a diff
608 # prefetch required revisions before a diff
611 def trydiff(
609 def trydiff(
612 orig,
610 orig,
613 repo,
611 repo,
614 revs,
612 revs,
615 ctx1,
613 ctx1,
616 ctx2,
614 ctx2,
617 modified,
615 modified,
618 added,
616 added,
619 removed,
617 removed,
620 copy,
618 copy,
621 getfilectx,
619 getfilectx,
622 *args,
620 *args,
623 **kwargs
621 **kwargs
624 ):
622 ):
625 if isenabled(repo):
623 if isenabled(repo):
626 prefetch = []
624 prefetch = []
627 mf1 = ctx1.manifest()
625 mf1 = ctx1.manifest()
628 for fname in modified + added + removed:
626 for fname in modified + added + removed:
629 if fname in mf1:
627 if fname in mf1:
630 fnode = getfilectx(fname, ctx1).filenode()
628 fnode = getfilectx(fname, ctx1).filenode()
631 # fnode can be None if it's a edited working ctx file
629 # fnode can be None if it's a edited working ctx file
632 if fnode:
630 if fnode:
633 prefetch.append((fname, hex(fnode)))
631 prefetch.append((fname, hex(fnode)))
634 if fname not in removed:
632 if fname not in removed:
635 fnode = getfilectx(fname, ctx2).filenode()
633 fnode = getfilectx(fname, ctx2).filenode()
636 if fnode:
634 if fnode:
637 prefetch.append((fname, hex(fnode)))
635 prefetch.append((fname, hex(fnode)))
638
636
639 repo.fileservice.prefetch(prefetch)
637 repo.fileservice.prefetch(prefetch)
640
638
641 return orig(
639 return orig(
642 repo,
640 repo,
643 revs,
641 revs,
644 ctx1,
642 ctx1,
645 ctx2,
643 ctx2,
646 modified,
644 modified,
647 added,
645 added,
648 removed,
646 removed,
649 copy,
647 copy,
650 getfilectx,
648 getfilectx,
651 *args,
649 *args,
652 **kwargs
650 **kwargs
653 )
651 )
654
652
655
653
656 # Prevent verify from processing files
654 # Prevent verify from processing files
657 # a stub for mercurial.hg.verify()
655 # a stub for mercurial.hg.verify()
658 def _verify(orig, repo, level=None):
656 def _verify(orig, repo, level=None):
659 lock = repo.lock()
657 lock = repo.lock()
660 try:
658 try:
661 return shallowverifier.shallowverifier(repo).verify()
659 return shallowverifier.shallowverifier(repo).verify()
662 finally:
660 finally:
663 lock.release()
661 lock.release()
664
662
665
663
666 clientonetime = False
664 clientonetime = False
667
665
668
666
669 def onetimeclientsetup(ui):
667 def onetimeclientsetup(ui):
670 global clientonetime
668 global clientonetime
671 if clientonetime:
669 if clientonetime:
672 return
670 return
673 clientonetime = True
671 clientonetime = True
674
672
675 # Don't commit filelogs until we know the commit hash, since the hash
673 # Don't commit filelogs until we know the commit hash, since the hash
676 # is present in the filelog blob.
674 # is present in the filelog blob.
677 # This violates Mercurial's filelog->manifest->changelog write order,
675 # This violates Mercurial's filelog->manifest->changelog write order,
678 # but is generally fine for client repos.
676 # but is generally fine for client repos.
679 pendingfilecommits = []
677 pendingfilecommits = []
680
678
681 def addrawrevision(
679 def addrawrevision(
682 orig,
680 orig,
683 self,
681 self,
684 rawtext,
682 rawtext,
685 transaction,
683 transaction,
686 link,
684 link,
687 p1,
685 p1,
688 p2,
686 p2,
689 node,
687 node,
690 flags,
688 flags,
691 cachedelta=None,
689 cachedelta=None,
692 _metatuple=None,
690 _metatuple=None,
693 ):
691 ):
694 if isinstance(link, int):
692 if isinstance(link, int):
695 pendingfilecommits.append(
693 pendingfilecommits.append(
696 (
694 (
697 self,
695 self,
698 rawtext,
696 rawtext,
699 transaction,
697 transaction,
700 link,
698 link,
701 p1,
699 p1,
702 p2,
700 p2,
703 node,
701 node,
704 flags,
702 flags,
705 cachedelta,
703 cachedelta,
706 _metatuple,
704 _metatuple,
707 )
705 )
708 )
706 )
709 return node
707 return node
710 else:
708 else:
711 return orig(
709 return orig(
712 self,
710 self,
713 rawtext,
711 rawtext,
714 transaction,
712 transaction,
715 link,
713 link,
716 p1,
714 p1,
717 p2,
715 p2,
718 node,
716 node,
719 flags,
717 flags,
720 cachedelta,
718 cachedelta,
721 _metatuple=_metatuple,
719 _metatuple=_metatuple,
722 )
720 )
723
721
724 extensions.wrapfunction(
722 extensions.wrapfunction(
725 remotefilelog.remotefilelog, b'addrawrevision', addrawrevision
723 remotefilelog.remotefilelog, b'addrawrevision', addrawrevision
726 )
724 )
727
725
728 def changelogadd(orig, self, *args, **kwargs):
726 def changelogadd(orig, self, *args, **kwargs):
729 oldlen = len(self)
727 oldlen = len(self)
730 node = orig(self, *args, **kwargs)
728 node = orig(self, *args, **kwargs)
731 newlen = len(self)
729 newlen = len(self)
732 if oldlen != newlen:
730 if oldlen != newlen:
733 for oldargs in pendingfilecommits:
731 for oldargs in pendingfilecommits:
734 log, rt, tr, link, p1, p2, n, fl, c, m = oldargs
732 log, rt, tr, link, p1, p2, n, fl, c, m = oldargs
735 linknode = self.node(link)
733 linknode = self.node(link)
736 if linknode == node:
734 if linknode == node:
737 log.addrawrevision(rt, tr, linknode, p1, p2, n, fl, c, m)
735 log.addrawrevision(rt, tr, linknode, p1, p2, n, fl, c, m)
738 else:
736 else:
739 raise error.ProgrammingError(
737 raise error.ProgrammingError(
740 b'pending multiple integer revisions are not supported'
738 b'pending multiple integer revisions are not supported'
741 )
739 )
742 else:
740 else:
743 # "link" is actually wrong here (it is set to len(changelog))
741 # "link" is actually wrong here (it is set to len(changelog))
744 # if changelog remains unchanged, skip writing file revisions
742 # if changelog remains unchanged, skip writing file revisions
745 # but still do a sanity check about pending multiple revisions
743 # but still do a sanity check about pending multiple revisions
746 if len({x[3] for x in pendingfilecommits}) > 1:
744 if len({x[3] for x in pendingfilecommits}) > 1:
747 raise error.ProgrammingError(
745 raise error.ProgrammingError(
748 b'pending multiple integer revisions are not supported'
746 b'pending multiple integer revisions are not supported'
749 )
747 )
750 del pendingfilecommits[:]
748 del pendingfilecommits[:]
751 return node
749 return node
752
750
753 extensions.wrapfunction(changelog.changelog, b'add', changelogadd)
751 extensions.wrapfunction(changelog.changelog, b'add', changelogadd)
754
752
755
753
756 def getrenamedfn(orig, repo, endrev=None):
754 def getrenamedfn(orig, repo, endrev=None):
757 if not isenabled(repo) or copies.usechangesetcentricalgo(repo):
755 if not isenabled(repo) or copies.usechangesetcentricalgo(repo):
758 return orig(repo, endrev)
756 return orig(repo, endrev)
759
757
760 rcache = {}
758 rcache = {}
761
759
762 def getrenamed(fn, rev):
760 def getrenamed(fn, rev):
763 '''looks up all renames for a file (up to endrev) the first
761 '''looks up all renames for a file (up to endrev) the first
764 time the file is given. It indexes on the changerev and only
762 time the file is given. It indexes on the changerev and only
765 parses the manifest if linkrev != changerev.
763 parses the manifest if linkrev != changerev.
766 Returns rename info for fn at changerev rev.'''
764 Returns rename info for fn at changerev rev.'''
767 if rev in rcache.setdefault(fn, {}):
765 if rev in rcache.setdefault(fn, {}):
768 return rcache[fn][rev]
766 return rcache[fn][rev]
769
767
770 try:
768 try:
771 fctx = repo[rev].filectx(fn)
769 fctx = repo[rev].filectx(fn)
772 for ancestor in fctx.ancestors():
770 for ancestor in fctx.ancestors():
773 if ancestor.path() == fn:
771 if ancestor.path() == fn:
774 renamed = ancestor.renamed()
772 renamed = ancestor.renamed()
775 rcache[fn][ancestor.rev()] = renamed and renamed[0]
773 rcache[fn][ancestor.rev()] = renamed and renamed[0]
776
774
777 renamed = fctx.renamed()
775 renamed = fctx.renamed()
778 return renamed and renamed[0]
776 return renamed and renamed[0]
779 except error.LookupError:
777 except error.LookupError:
780 return None
778 return None
781
779
782 return getrenamed
780 return getrenamed
783
781
784
782
785 def walkfilerevs(orig, repo, match, follow, revs, fncache):
786 if not isenabled(repo):
787 return orig(repo, match, follow, revs, fncache)
788
789 # remotefilelog's can't be walked in rev order, so throw.
790 # The caller will see the exception and walk the commit tree instead.
791 if not follow:
792 raise cmdutil.FileWalkError(b"Cannot walk via filelog")
793
794 wanted = set()
795 minrev, maxrev = min(revs), max(revs)
796
797 pctx = repo[b'.']
798 for filename in match.files():
799 if filename not in pctx:
800 raise error.Abort(
801 _(b'cannot follow file not in parent revision: "%s"') % filename
802 )
803 fctx = pctx[filename]
804
805 linkrev = fctx.linkrev()
806 if linkrev >= minrev and linkrev <= maxrev:
807 fncache.setdefault(linkrev, []).append(filename)
808 wanted.add(linkrev)
809
810 for ancestor in fctx.ancestors():
811 linkrev = ancestor.linkrev()
812 if linkrev >= minrev and linkrev <= maxrev:
813 fncache.setdefault(linkrev, []).append(ancestor.path())
814 wanted.add(linkrev)
815
816 return wanted
817
818
819 def filelogrevset(orig, repo, subset, x):
783 def filelogrevset(orig, repo, subset, x):
820 """``filelog(pattern)``
784 """``filelog(pattern)``
821 Changesets connected to the specified filelog.
785 Changesets connected to the specified filelog.
822
786
823 For performance reasons, ``filelog()`` does not show every changeset
787 For performance reasons, ``filelog()`` does not show every changeset
824 that affects the requested file(s). See :hg:`help log` for details. For
788 that affects the requested file(s). See :hg:`help log` for details. For
825 a slower, more accurate result, use ``file()``.
789 a slower, more accurate result, use ``file()``.
826 """
790 """
827
791
828 if not isenabled(repo):
792 if not isenabled(repo):
829 return orig(repo, subset, x)
793 return orig(repo, subset, x)
830
794
831 # i18n: "filelog" is a keyword
795 # i18n: "filelog" is a keyword
832 pat = revset.getstring(x, _(b"filelog requires a pattern"))
796 pat = revset.getstring(x, _(b"filelog requires a pattern"))
833 m = matchmod.match(
797 m = matchmod.match(
834 repo.root, repo.getcwd(), [pat], default=b'relpath', ctx=repo[None]
798 repo.root, repo.getcwd(), [pat], default=b'relpath', ctx=repo[None]
835 )
799 )
836 s = set()
800 s = set()
837
801
838 if not matchmod.patkind(pat):
802 if not matchmod.patkind(pat):
839 # slow
803 # slow
840 for r in subset:
804 for r in subset:
841 ctx = repo[r]
805 ctx = repo[r]
842 cfiles = ctx.files()
806 cfiles = ctx.files()
843 for f in m.files():
807 for f in m.files():
844 if f in cfiles:
808 if f in cfiles:
845 s.add(ctx.rev())
809 s.add(ctx.rev())
846 break
810 break
847 else:
811 else:
848 # partial
812 # partial
849 files = (f for f in repo[None] if m(f))
813 files = (f for f in repo[None] if m(f))
850 for f in files:
814 for f in files:
851 fctx = repo[None].filectx(f)
815 fctx = repo[None].filectx(f)
852 s.add(fctx.linkrev())
816 s.add(fctx.linkrev())
853 for actx in fctx.ancestors():
817 for actx in fctx.ancestors():
854 s.add(actx.linkrev())
818 s.add(actx.linkrev())
855
819
856 return smartset.baseset([r for r in subset if r in s])
820 return smartset.baseset([r for r in subset if r in s])
857
821
858
822
859 @command(b'gc', [], _(b'hg gc [REPO...]'), norepo=True)
823 @command(b'gc', [], _(b'hg gc [REPO...]'), norepo=True)
860 def gc(ui, *args, **opts):
824 def gc(ui, *args, **opts):
861 '''garbage collect the client and server filelog caches
825 '''garbage collect the client and server filelog caches
862 '''
826 '''
863 cachepaths = set()
827 cachepaths = set()
864
828
865 # get the system client cache
829 # get the system client cache
866 systemcache = shallowutil.getcachepath(ui, allowempty=True)
830 systemcache = shallowutil.getcachepath(ui, allowempty=True)
867 if systemcache:
831 if systemcache:
868 cachepaths.add(systemcache)
832 cachepaths.add(systemcache)
869
833
870 # get repo client and server cache
834 # get repo client and server cache
871 repopaths = []
835 repopaths = []
872 pwd = ui.environ.get(b'PWD')
836 pwd = ui.environ.get(b'PWD')
873 if pwd:
837 if pwd:
874 repopaths.append(pwd)
838 repopaths.append(pwd)
875
839
876 repopaths.extend(args)
840 repopaths.extend(args)
877 repos = []
841 repos = []
878 for repopath in repopaths:
842 for repopath in repopaths:
879 try:
843 try:
880 repo = hg.peer(ui, {}, repopath)
844 repo = hg.peer(ui, {}, repopath)
881 repos.append(repo)
845 repos.append(repo)
882
846
883 repocache = shallowutil.getcachepath(repo.ui, allowempty=True)
847 repocache = shallowutil.getcachepath(repo.ui, allowempty=True)
884 if repocache:
848 if repocache:
885 cachepaths.add(repocache)
849 cachepaths.add(repocache)
886 except error.RepoError:
850 except error.RepoError:
887 pass
851 pass
888
852
889 # gc client cache
853 # gc client cache
890 for cachepath in cachepaths:
854 for cachepath in cachepaths:
891 gcclient(ui, cachepath)
855 gcclient(ui, cachepath)
892
856
893 # gc server cache
857 # gc server cache
894 for repo in repos:
858 for repo in repos:
895 remotefilelogserver.gcserver(ui, repo._repo)
859 remotefilelogserver.gcserver(ui, repo._repo)
896
860
897
861
898 def gcclient(ui, cachepath):
862 def gcclient(ui, cachepath):
899 # get list of repos that use this cache
863 # get list of repos that use this cache
900 repospath = os.path.join(cachepath, b'repos')
864 repospath = os.path.join(cachepath, b'repos')
901 if not os.path.exists(repospath):
865 if not os.path.exists(repospath):
902 ui.warn(_(b"no known cache at %s\n") % cachepath)
866 ui.warn(_(b"no known cache at %s\n") % cachepath)
903 return
867 return
904
868
905 reposfile = open(repospath, b'rb')
869 reposfile = open(repospath, b'rb')
906 repos = {r[:-1] for r in reposfile.readlines()}
870 repos = {r[:-1] for r in reposfile.readlines()}
907 reposfile.close()
871 reposfile.close()
908
872
909 # build list of useful files
873 # build list of useful files
910 validrepos = []
874 validrepos = []
911 keepkeys = set()
875 keepkeys = set()
912
876
913 sharedcache = None
877 sharedcache = None
914 filesrepacked = False
878 filesrepacked = False
915
879
916 count = 0
880 count = 0
917 progress = ui.makeprogress(
881 progress = ui.makeprogress(
918 _(b"analyzing repositories"), unit=b"repos", total=len(repos)
882 _(b"analyzing repositories"), unit=b"repos", total=len(repos)
919 )
883 )
920 for path in repos:
884 for path in repos:
921 progress.update(count)
885 progress.update(count)
922 count += 1
886 count += 1
923 try:
887 try:
924 path = ui.expandpath(os.path.normpath(path))
888 path = ui.expandpath(os.path.normpath(path))
925 except TypeError as e:
889 except TypeError as e:
926 ui.warn(_(b"warning: malformed path: %r:%s\n") % (path, e))
890 ui.warn(_(b"warning: malformed path: %r:%s\n") % (path, e))
927 traceback.print_exc()
891 traceback.print_exc()
928 continue
892 continue
929 try:
893 try:
930 peer = hg.peer(ui, {}, path)
894 peer = hg.peer(ui, {}, path)
931 repo = peer._repo
895 repo = peer._repo
932 except error.RepoError:
896 except error.RepoError:
933 continue
897 continue
934
898
935 validrepos.append(path)
899 validrepos.append(path)
936
900
937 # Protect against any repo or config changes that have happened since
901 # Protect against any repo or config changes that have happened since
938 # this repo was added to the repos file. We'd rather this loop succeed
902 # this repo was added to the repos file. We'd rather this loop succeed
939 # and too much be deleted, than the loop fail and nothing gets deleted.
903 # and too much be deleted, than the loop fail and nothing gets deleted.
940 if not isenabled(repo):
904 if not isenabled(repo):
941 continue
905 continue
942
906
943 if not util.safehasattr(repo, b'name'):
907 if not util.safehasattr(repo, b'name'):
944 ui.warn(
908 ui.warn(
945 _(b"repo %s is a misconfigured remotefilelog repo\n") % path
909 _(b"repo %s is a misconfigured remotefilelog repo\n") % path
946 )
910 )
947 continue
911 continue
948
912
949 # If garbage collection on repack and repack on hg gc are enabled
913 # If garbage collection on repack and repack on hg gc are enabled
950 # then loose files are repacked and garbage collected.
914 # then loose files are repacked and garbage collected.
951 # Otherwise regular garbage collection is performed.
915 # Otherwise regular garbage collection is performed.
952 repackonhggc = repo.ui.configbool(b'remotefilelog', b'repackonhggc')
916 repackonhggc = repo.ui.configbool(b'remotefilelog', b'repackonhggc')
953 gcrepack = repo.ui.configbool(b'remotefilelog', b'gcrepack')
917 gcrepack = repo.ui.configbool(b'remotefilelog', b'gcrepack')
954 if repackonhggc and gcrepack:
918 if repackonhggc and gcrepack:
955 try:
919 try:
956 repackmod.incrementalrepack(repo)
920 repackmod.incrementalrepack(repo)
957 filesrepacked = True
921 filesrepacked = True
958 continue
922 continue
959 except (IOError, repackmod.RepackAlreadyRunning):
923 except (IOError, repackmod.RepackAlreadyRunning):
960 # If repack cannot be performed due to not enough disk space
924 # If repack cannot be performed due to not enough disk space
961 # continue doing garbage collection of loose files w/o repack
925 # continue doing garbage collection of loose files w/o repack
962 pass
926 pass
963
927
964 reponame = repo.name
928 reponame = repo.name
965 if not sharedcache:
929 if not sharedcache:
966 sharedcache = repo.sharedstore
930 sharedcache = repo.sharedstore
967
931
968 # Compute a keepset which is not garbage collected
932 # Compute a keepset which is not garbage collected
969 def keyfn(fname, fnode):
933 def keyfn(fname, fnode):
970 return fileserverclient.getcachekey(reponame, fname, hex(fnode))
934 return fileserverclient.getcachekey(reponame, fname, hex(fnode))
971
935
972 keepkeys = repackmod.keepset(repo, keyfn=keyfn, lastkeepkeys=keepkeys)
936 keepkeys = repackmod.keepset(repo, keyfn=keyfn, lastkeepkeys=keepkeys)
973
937
974 progress.complete()
938 progress.complete()
975
939
976 # write list of valid repos back
940 # write list of valid repos back
977 oldumask = os.umask(0o002)
941 oldumask = os.umask(0o002)
978 try:
942 try:
979 reposfile = open(repospath, b'wb')
943 reposfile = open(repospath, b'wb')
980 reposfile.writelines([(b"%s\n" % r) for r in validrepos])
944 reposfile.writelines([(b"%s\n" % r) for r in validrepos])
981 reposfile.close()
945 reposfile.close()
982 finally:
946 finally:
983 os.umask(oldumask)
947 os.umask(oldumask)
984
948
985 # prune cache
949 # prune cache
986 if sharedcache is not None:
950 if sharedcache is not None:
987 sharedcache.gc(keepkeys)
951 sharedcache.gc(keepkeys)
988 elif not filesrepacked:
952 elif not filesrepacked:
989 ui.warn(_(b"warning: no valid repos in repofile\n"))
953 ui.warn(_(b"warning: no valid repos in repofile\n"))
990
954
991
955
992 def log(orig, ui, repo, *pats, **opts):
956 def log(orig, ui, repo, *pats, **opts):
993 if not isenabled(repo):
957 if not isenabled(repo):
994 return orig(ui, repo, *pats, **opts)
958 return orig(ui, repo, *pats, **opts)
995
959
996 follow = opts.get('follow')
960 follow = opts.get('follow')
997 revs = opts.get('rev')
961 revs = opts.get('rev')
998 if pats:
962 if pats:
999 # Force slowpath for non-follow patterns and follows that start from
963 # Force slowpath for non-follow patterns and follows that start from
1000 # non-working-copy-parent revs.
964 # non-working-copy-parent revs.
1001 if not follow or revs:
965 if not follow or revs:
1002 # This forces the slowpath
966 # This forces the slowpath
1003 opts['removed'] = True
967 opts['removed'] = True
1004
968
1005 # If this is a non-follow log without any revs specified, recommend that
969 # If this is a non-follow log without any revs specified, recommend that
1006 # the user add -f to speed it up.
970 # the user add -f to speed it up.
1007 if not follow and not revs:
971 if not follow and not revs:
1008 match = scmutil.match(repo[b'.'], pats, pycompat.byteskwargs(opts))
972 match = scmutil.match(repo[b'.'], pats, pycompat.byteskwargs(opts))
1009 isfile = not match.anypats()
973 isfile = not match.anypats()
1010 if isfile:
974 if isfile:
1011 for file in match.files():
975 for file in match.files():
1012 if not os.path.isfile(repo.wjoin(file)):
976 if not os.path.isfile(repo.wjoin(file)):
1013 isfile = False
977 isfile = False
1014 break
978 break
1015
979
1016 if isfile:
980 if isfile:
1017 ui.warn(
981 ui.warn(
1018 _(
982 _(
1019 b"warning: file log can be slow on large repos - "
983 b"warning: file log can be slow on large repos - "
1020 + b"use -f to speed it up\n"
984 + b"use -f to speed it up\n"
1021 )
985 )
1022 )
986 )
1023
987
1024 return orig(ui, repo, *pats, **opts)
988 return orig(ui, repo, *pats, **opts)
1025
989
1026
990
1027 def revdatelimit(ui, revset):
991 def revdatelimit(ui, revset):
1028 """Update revset so that only changesets no older than 'prefetchdays' days
992 """Update revset so that only changesets no older than 'prefetchdays' days
1029 are included. The default value is set to 14 days. If 'prefetchdays' is set
993 are included. The default value is set to 14 days. If 'prefetchdays' is set
1030 to zero or negative value then date restriction is not applied.
994 to zero or negative value then date restriction is not applied.
1031 """
995 """
1032 days = ui.configint(b'remotefilelog', b'prefetchdays')
996 days = ui.configint(b'remotefilelog', b'prefetchdays')
1033 if days > 0:
997 if days > 0:
1034 revset = b'(%s) & date(-%s)' % (revset, days)
998 revset = b'(%s) & date(-%s)' % (revset, days)
1035 return revset
999 return revset
1036
1000
1037
1001
1038 def readytofetch(repo):
1002 def readytofetch(repo):
1039 """Check that enough time has passed since the last background prefetch.
1003 """Check that enough time has passed since the last background prefetch.
1040 This only relates to prefetches after operations that change the working
1004 This only relates to prefetches after operations that change the working
1041 copy parent. Default delay between background prefetches is 2 minutes.
1005 copy parent. Default delay between background prefetches is 2 minutes.
1042 """
1006 """
1043 timeout = repo.ui.configint(b'remotefilelog', b'prefetchdelay')
1007 timeout = repo.ui.configint(b'remotefilelog', b'prefetchdelay')
1044 fname = repo.vfs.join(b'lastprefetch')
1008 fname = repo.vfs.join(b'lastprefetch')
1045
1009
1046 ready = False
1010 ready = False
1047 with open(fname, b'a'):
1011 with open(fname, b'a'):
1048 # the with construct above is used to avoid race conditions
1012 # the with construct above is used to avoid race conditions
1049 modtime = os.path.getmtime(fname)
1013 modtime = os.path.getmtime(fname)
1050 if (time.time() - modtime) > timeout:
1014 if (time.time() - modtime) > timeout:
1051 os.utime(fname, None)
1015 os.utime(fname, None)
1052 ready = True
1016 ready = True
1053
1017
1054 return ready
1018 return ready
1055
1019
1056
1020
1057 def wcpprefetch(ui, repo, **kwargs):
1021 def wcpprefetch(ui, repo, **kwargs):
1058 """Prefetches in background revisions specified by bgprefetchrevs revset.
1022 """Prefetches in background revisions specified by bgprefetchrevs revset.
1059 Does background repack if backgroundrepack flag is set in config.
1023 Does background repack if backgroundrepack flag is set in config.
1060 """
1024 """
1061 shallow = isenabled(repo)
1025 shallow = isenabled(repo)
1062 bgprefetchrevs = ui.config(b'remotefilelog', b'bgprefetchrevs')
1026 bgprefetchrevs = ui.config(b'remotefilelog', b'bgprefetchrevs')
1063 isready = readytofetch(repo)
1027 isready = readytofetch(repo)
1064
1028
1065 if not (shallow and bgprefetchrevs and isready):
1029 if not (shallow and bgprefetchrevs and isready):
1066 return
1030 return
1067
1031
1068 bgrepack = repo.ui.configbool(b'remotefilelog', b'backgroundrepack')
1032 bgrepack = repo.ui.configbool(b'remotefilelog', b'backgroundrepack')
1069 # update a revset with a date limit
1033 # update a revset with a date limit
1070 bgprefetchrevs = revdatelimit(ui, bgprefetchrevs)
1034 bgprefetchrevs = revdatelimit(ui, bgprefetchrevs)
1071
1035
1072 def anon(unused_success):
1036 def anon(unused_success):
1073 if util.safehasattr(repo, b'ranprefetch') and repo.ranprefetch:
1037 if util.safehasattr(repo, b'ranprefetch') and repo.ranprefetch:
1074 return
1038 return
1075 repo.ranprefetch = True
1039 repo.ranprefetch = True
1076 repo.backgroundprefetch(bgprefetchrevs, repack=bgrepack)
1040 repo.backgroundprefetch(bgprefetchrevs, repack=bgrepack)
1077
1041
1078 repo._afterlock(anon)
1042 repo._afterlock(anon)
1079
1043
1080
1044
1081 def pull(orig, ui, repo, *pats, **opts):
1045 def pull(orig, ui, repo, *pats, **opts):
1082 result = orig(ui, repo, *pats, **opts)
1046 result = orig(ui, repo, *pats, **opts)
1083
1047
1084 if isenabled(repo):
1048 if isenabled(repo):
1085 # prefetch if it's configured
1049 # prefetch if it's configured
1086 prefetchrevset = ui.config(b'remotefilelog', b'pullprefetch')
1050 prefetchrevset = ui.config(b'remotefilelog', b'pullprefetch')
1087 bgrepack = repo.ui.configbool(b'remotefilelog', b'backgroundrepack')
1051 bgrepack = repo.ui.configbool(b'remotefilelog', b'backgroundrepack')
1088 bgprefetch = repo.ui.configbool(b'remotefilelog', b'backgroundprefetch')
1052 bgprefetch = repo.ui.configbool(b'remotefilelog', b'backgroundprefetch')
1089
1053
1090 if prefetchrevset:
1054 if prefetchrevset:
1091 ui.status(_(b"prefetching file contents\n"))
1055 ui.status(_(b"prefetching file contents\n"))
1092 revs = scmutil.revrange(repo, [prefetchrevset])
1056 revs = scmutil.revrange(repo, [prefetchrevset])
1093 base = repo[b'.'].rev()
1057 base = repo[b'.'].rev()
1094 if bgprefetch:
1058 if bgprefetch:
1095 repo.backgroundprefetch(prefetchrevset, repack=bgrepack)
1059 repo.backgroundprefetch(prefetchrevset, repack=bgrepack)
1096 else:
1060 else:
1097 repo.prefetch(revs, base=base)
1061 repo.prefetch(revs, base=base)
1098 if bgrepack:
1062 if bgrepack:
1099 repackmod.backgroundrepack(repo, incremental=True)
1063 repackmod.backgroundrepack(repo, incremental=True)
1100 elif bgrepack:
1064 elif bgrepack:
1101 repackmod.backgroundrepack(repo, incremental=True)
1065 repackmod.backgroundrepack(repo, incremental=True)
1102
1066
1103 return result
1067 return result
1104
1068
1105
1069
1106 def exchangepull(orig, repo, remote, *args, **kwargs):
1070 def exchangepull(orig, repo, remote, *args, **kwargs):
1107 # Hook into the callstream/getbundle to insert bundle capabilities
1071 # Hook into the callstream/getbundle to insert bundle capabilities
1108 # during a pull.
1072 # during a pull.
1109 def localgetbundle(
1073 def localgetbundle(
1110 orig, source, heads=None, common=None, bundlecaps=None, **kwargs
1074 orig, source, heads=None, common=None, bundlecaps=None, **kwargs
1111 ):
1075 ):
1112 if not bundlecaps:
1076 if not bundlecaps:
1113 bundlecaps = set()
1077 bundlecaps = set()
1114 bundlecaps.add(constants.BUNDLE2_CAPABLITY)
1078 bundlecaps.add(constants.BUNDLE2_CAPABLITY)
1115 return orig(
1079 return orig(
1116 source, heads=heads, common=common, bundlecaps=bundlecaps, **kwargs
1080 source, heads=heads, common=common, bundlecaps=bundlecaps, **kwargs
1117 )
1081 )
1118
1082
1119 if util.safehasattr(remote, b'_callstream'):
1083 if util.safehasattr(remote, b'_callstream'):
1120 remote._localrepo = repo
1084 remote._localrepo = repo
1121 elif util.safehasattr(remote, b'getbundle'):
1085 elif util.safehasattr(remote, b'getbundle'):
1122 extensions.wrapfunction(remote, b'getbundle', localgetbundle)
1086 extensions.wrapfunction(remote, b'getbundle', localgetbundle)
1123
1087
1124 return orig(repo, remote, *args, **kwargs)
1088 return orig(repo, remote, *args, **kwargs)
1125
1089
1126
1090
1127 def _fileprefetchhook(repo, revmatches):
1091 def _fileprefetchhook(repo, revmatches):
1128 if isenabled(repo):
1092 if isenabled(repo):
1129 allfiles = []
1093 allfiles = []
1130 for rev, match in revmatches:
1094 for rev, match in revmatches:
1131 if rev == nodemod.wdirrev or rev is None:
1095 if rev == nodemod.wdirrev or rev is None:
1132 continue
1096 continue
1133 ctx = repo[rev]
1097 ctx = repo[rev]
1134 mf = ctx.manifest()
1098 mf = ctx.manifest()
1135 sparsematch = repo.maybesparsematch(ctx.rev())
1099 sparsematch = repo.maybesparsematch(ctx.rev())
1136 for path in ctx.walk(match):
1100 for path in ctx.walk(match):
1137 if (not sparsematch or sparsematch(path)) and path in mf:
1101 if (not sparsematch or sparsematch(path)) and path in mf:
1138 allfiles.append((path, hex(mf[path])))
1102 allfiles.append((path, hex(mf[path])))
1139 repo.fileservice.prefetch(allfiles)
1103 repo.fileservice.prefetch(allfiles)
1140
1104
1141
1105
1142 @command(
1106 @command(
1143 b'debugremotefilelog',
1107 b'debugremotefilelog',
1144 [(b'd', b'decompress', None, _(b'decompress the filelog first')),],
1108 [(b'd', b'decompress', None, _(b'decompress the filelog first')),],
1145 _(b'hg debugremotefilelog <path>'),
1109 _(b'hg debugremotefilelog <path>'),
1146 norepo=True,
1110 norepo=True,
1147 )
1111 )
1148 def debugremotefilelog(ui, path, **opts):
1112 def debugremotefilelog(ui, path, **opts):
1149 return debugcommands.debugremotefilelog(ui, path, **opts)
1113 return debugcommands.debugremotefilelog(ui, path, **opts)
1150
1114
1151
1115
1152 @command(
1116 @command(
1153 b'verifyremotefilelog',
1117 b'verifyremotefilelog',
1154 [(b'd', b'decompress', None, _(b'decompress the filelogs first')),],
1118 [(b'd', b'decompress', None, _(b'decompress the filelogs first')),],
1155 _(b'hg verifyremotefilelogs <directory>'),
1119 _(b'hg verifyremotefilelogs <directory>'),
1156 norepo=True,
1120 norepo=True,
1157 )
1121 )
1158 def verifyremotefilelog(ui, path, **opts):
1122 def verifyremotefilelog(ui, path, **opts):
1159 return debugcommands.verifyremotefilelog(ui, path, **opts)
1123 return debugcommands.verifyremotefilelog(ui, path, **opts)
1160
1124
1161
1125
1162 @command(
1126 @command(
1163 b'debugdatapack',
1127 b'debugdatapack',
1164 [
1128 [
1165 (b'', b'long', None, _(b'print the long hashes')),
1129 (b'', b'long', None, _(b'print the long hashes')),
1166 (b'', b'node', b'', _(b'dump the contents of node'), b'NODE'),
1130 (b'', b'node', b'', _(b'dump the contents of node'), b'NODE'),
1167 ],
1131 ],
1168 _(b'hg debugdatapack <paths>'),
1132 _(b'hg debugdatapack <paths>'),
1169 norepo=True,
1133 norepo=True,
1170 )
1134 )
1171 def debugdatapack(ui, *paths, **opts):
1135 def debugdatapack(ui, *paths, **opts):
1172 return debugcommands.debugdatapack(ui, *paths, **opts)
1136 return debugcommands.debugdatapack(ui, *paths, **opts)
1173
1137
1174
1138
1175 @command(b'debughistorypack', [], _(b'hg debughistorypack <path>'), norepo=True)
1139 @command(b'debughistorypack', [], _(b'hg debughistorypack <path>'), norepo=True)
1176 def debughistorypack(ui, path, **opts):
1140 def debughistorypack(ui, path, **opts):
1177 return debugcommands.debughistorypack(ui, path)
1141 return debugcommands.debughistorypack(ui, path)
1178
1142
1179
1143
1180 @command(b'debugkeepset', [], _(b'hg debugkeepset'))
1144 @command(b'debugkeepset', [], _(b'hg debugkeepset'))
1181 def debugkeepset(ui, repo, **opts):
1145 def debugkeepset(ui, repo, **opts):
1182 # The command is used to measure keepset computation time
1146 # The command is used to measure keepset computation time
1183 def keyfn(fname, fnode):
1147 def keyfn(fname, fnode):
1184 return fileserverclient.getcachekey(repo.name, fname, hex(fnode))
1148 return fileserverclient.getcachekey(repo.name, fname, hex(fnode))
1185
1149
1186 repackmod.keepset(repo, keyfn)
1150 repackmod.keepset(repo, keyfn)
1187 return
1151 return
1188
1152
1189
1153
1190 @command(b'debugwaitonrepack', [], _(b'hg debugwaitonrepack'))
1154 @command(b'debugwaitonrepack', [], _(b'hg debugwaitonrepack'))
1191 def debugwaitonrepack(ui, repo, **opts):
1155 def debugwaitonrepack(ui, repo, **opts):
1192 return debugcommands.debugwaitonrepack(repo)
1156 return debugcommands.debugwaitonrepack(repo)
1193
1157
1194
1158
1195 @command(b'debugwaitonprefetch', [], _(b'hg debugwaitonprefetch'))
1159 @command(b'debugwaitonprefetch', [], _(b'hg debugwaitonprefetch'))
1196 def debugwaitonprefetch(ui, repo, **opts):
1160 def debugwaitonprefetch(ui, repo, **opts):
1197 return debugcommands.debugwaitonprefetch(repo)
1161 return debugcommands.debugwaitonprefetch(repo)
1198
1162
1199
1163
1200 def resolveprefetchopts(ui, opts):
1164 def resolveprefetchopts(ui, opts):
1201 if not opts.get(b'rev'):
1165 if not opts.get(b'rev'):
1202 revset = [b'.', b'draft()']
1166 revset = [b'.', b'draft()']
1203
1167
1204 prefetchrevset = ui.config(b'remotefilelog', b'pullprefetch', None)
1168 prefetchrevset = ui.config(b'remotefilelog', b'pullprefetch', None)
1205 if prefetchrevset:
1169 if prefetchrevset:
1206 revset.append(b'(%s)' % prefetchrevset)
1170 revset.append(b'(%s)' % prefetchrevset)
1207 bgprefetchrevs = ui.config(b'remotefilelog', b'bgprefetchrevs', None)
1171 bgprefetchrevs = ui.config(b'remotefilelog', b'bgprefetchrevs', None)
1208 if bgprefetchrevs:
1172 if bgprefetchrevs:
1209 revset.append(b'(%s)' % bgprefetchrevs)
1173 revset.append(b'(%s)' % bgprefetchrevs)
1210 revset = b'+'.join(revset)
1174 revset = b'+'.join(revset)
1211
1175
1212 # update a revset with a date limit
1176 # update a revset with a date limit
1213 revset = revdatelimit(ui, revset)
1177 revset = revdatelimit(ui, revset)
1214
1178
1215 opts[b'rev'] = [revset]
1179 opts[b'rev'] = [revset]
1216
1180
1217 if not opts.get(b'base'):
1181 if not opts.get(b'base'):
1218 opts[b'base'] = None
1182 opts[b'base'] = None
1219
1183
1220 return opts
1184 return opts
1221
1185
1222
1186
1223 @command(
1187 @command(
1224 b'prefetch',
1188 b'prefetch',
1225 [
1189 [
1226 (b'r', b'rev', [], _(b'prefetch the specified revisions'), _(b'REV')),
1190 (b'r', b'rev', [], _(b'prefetch the specified revisions'), _(b'REV')),
1227 (b'', b'repack', False, _(b'run repack after prefetch')),
1191 (b'', b'repack', False, _(b'run repack after prefetch')),
1228 (b'b', b'base', b'', _(b"rev that is assumed to already be local")),
1192 (b'b', b'base', b'', _(b"rev that is assumed to already be local")),
1229 ]
1193 ]
1230 + commands.walkopts,
1194 + commands.walkopts,
1231 _(b'hg prefetch [OPTIONS] [FILE...]'),
1195 _(b'hg prefetch [OPTIONS] [FILE...]'),
1232 helpcategory=command.CATEGORY_MAINTENANCE,
1196 helpcategory=command.CATEGORY_MAINTENANCE,
1233 )
1197 )
1234 def prefetch(ui, repo, *pats, **opts):
1198 def prefetch(ui, repo, *pats, **opts):
1235 """prefetch file revisions from the server
1199 """prefetch file revisions from the server
1236
1200
1237 Prefetchs file revisions for the specified revs and stores them in the
1201 Prefetchs file revisions for the specified revs and stores them in the
1238 local remotefilelog cache. If no rev is specified, the default rev is
1202 local remotefilelog cache. If no rev is specified, the default rev is
1239 used which is the union of dot, draft, pullprefetch and bgprefetchrev.
1203 used which is the union of dot, draft, pullprefetch and bgprefetchrev.
1240 File names or patterns can be used to limit which files are downloaded.
1204 File names or patterns can be used to limit which files are downloaded.
1241
1205
1242 Return 0 on success.
1206 Return 0 on success.
1243 """
1207 """
1244 opts = pycompat.byteskwargs(opts)
1208 opts = pycompat.byteskwargs(opts)
1245 if not isenabled(repo):
1209 if not isenabled(repo):
1246 raise error.Abort(_(b"repo is not shallow"))
1210 raise error.Abort(_(b"repo is not shallow"))
1247
1211
1248 opts = resolveprefetchopts(ui, opts)
1212 opts = resolveprefetchopts(ui, opts)
1249 revs = scmutil.revrange(repo, opts.get(b'rev'))
1213 revs = scmutil.revrange(repo, opts.get(b'rev'))
1250 repo.prefetch(revs, opts.get(b'base'), pats, opts)
1214 repo.prefetch(revs, opts.get(b'base'), pats, opts)
1251
1215
1252 # Run repack in background
1216 # Run repack in background
1253 if opts.get(b'repack'):
1217 if opts.get(b'repack'):
1254 repackmod.backgroundrepack(repo, incremental=True)
1218 repackmod.backgroundrepack(repo, incremental=True)
1255
1219
1256
1220
1257 @command(
1221 @command(
1258 b'repack',
1222 b'repack',
1259 [
1223 [
1260 (b'', b'background', None, _(b'run in a background process'), None),
1224 (b'', b'background', None, _(b'run in a background process'), None),
1261 (b'', b'incremental', None, _(b'do an incremental repack'), None),
1225 (b'', b'incremental', None, _(b'do an incremental repack'), None),
1262 (
1226 (
1263 b'',
1227 b'',
1264 b'packsonly',
1228 b'packsonly',
1265 None,
1229 None,
1266 _(b'only repack packs (skip loose objects)'),
1230 _(b'only repack packs (skip loose objects)'),
1267 None,
1231 None,
1268 ),
1232 ),
1269 ],
1233 ],
1270 _(b'hg repack [OPTIONS]'),
1234 _(b'hg repack [OPTIONS]'),
1271 )
1235 )
1272 def repack_(ui, repo, *pats, **opts):
1236 def repack_(ui, repo, *pats, **opts):
1273 if opts.get('background'):
1237 if opts.get('background'):
1274 repackmod.backgroundrepack(
1238 repackmod.backgroundrepack(
1275 repo,
1239 repo,
1276 incremental=opts.get('incremental'),
1240 incremental=opts.get('incremental'),
1277 packsonly=opts.get('packsonly', False),
1241 packsonly=opts.get('packsonly', False),
1278 )
1242 )
1279 return
1243 return
1280
1244
1281 options = {b'packsonly': opts.get('packsonly')}
1245 options = {b'packsonly': opts.get('packsonly')}
1282
1246
1283 try:
1247 try:
1284 if opts.get('incremental'):
1248 if opts.get('incremental'):
1285 repackmod.incrementalrepack(repo, options=options)
1249 repackmod.incrementalrepack(repo, options=options)
1286 else:
1250 else:
1287 repackmod.fullrepack(repo, options=options)
1251 repackmod.fullrepack(repo, options=options)
1288 except repackmod.RepackAlreadyRunning as ex:
1252 except repackmod.RepackAlreadyRunning as ex:
1289 # Don't propogate the exception if the repack is already in
1253 # Don't propogate the exception if the repack is already in
1290 # progress, since we want the command to exit 0.
1254 # progress, since we want the command to exit 0.
1291 repo.ui.warn(b'%s\n' % ex)
1255 repo.ui.warn(b'%s\n' % ex)
@@ -1,4102 +1,3921
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import copy as copymod
10 import copy as copymod
11 import errno
11 import errno
12 import os
12 import os
13 import re
13 import re
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 hex,
17 hex,
18 nullid,
18 nullid,
19 nullrev,
20 short,
19 short,
21 )
20 )
22 from .pycompat import (
21 from .pycompat import (
23 getattr,
22 getattr,
24 open,
23 open,
25 setattr,
24 setattr,
26 )
25 )
27 from .thirdparty import attr
26 from .thirdparty import attr
28
27
29 from . import (
28 from . import (
30 bookmarks,
29 bookmarks,
31 changelog,
30 changelog,
32 copies,
31 copies,
33 crecord as crecordmod,
32 crecord as crecordmod,
34 dirstateguard,
33 dirstateguard,
35 encoding,
34 encoding,
36 error,
35 error,
37 formatter,
36 formatter,
38 logcmdutil,
37 logcmdutil,
39 match as matchmod,
38 match as matchmod,
40 merge as mergemod,
39 merge as mergemod,
41 mergestate as mergestatemod,
40 mergestate as mergestatemod,
42 mergeutil,
41 mergeutil,
43 obsolete,
42 obsolete,
44 patch,
43 patch,
45 pathutil,
44 pathutil,
46 phases,
45 phases,
47 pycompat,
46 pycompat,
48 repair,
47 repair,
49 revlog,
48 revlog,
50 rewriteutil,
49 rewriteutil,
51 scmutil,
50 scmutil,
52 smartset,
53 state as statemod,
51 state as statemod,
54 subrepoutil,
52 subrepoutil,
55 templatekw,
53 templatekw,
56 templater,
54 templater,
57 util,
55 util,
58 vfs as vfsmod,
56 vfs as vfsmod,
59 )
57 )
60
58
61 from .utils import (
59 from .utils import (
62 dateutil,
60 dateutil,
63 stringutil,
61 stringutil,
64 )
62 )
65
63
66 if pycompat.TYPE_CHECKING:
64 if pycompat.TYPE_CHECKING:
67 from typing import (
65 from typing import (
68 Any,
66 Any,
69 Dict,
67 Dict,
70 )
68 )
71
69
72 for t in (Any, Dict):
70 for t in (Any, Dict):
73 assert t
71 assert t
74
72
75 stringio = util.stringio
73 stringio = util.stringio
76
74
77 # templates of common command options
75 # templates of common command options
78
76
79 dryrunopts = [
77 dryrunopts = [
80 (b'n', b'dry-run', None, _(b'do not perform actions, just print output')),
78 (b'n', b'dry-run', None, _(b'do not perform actions, just print output')),
81 ]
79 ]
82
80
83 confirmopts = [
81 confirmopts = [
84 (b'', b'confirm', None, _(b'ask before applying actions')),
82 (b'', b'confirm', None, _(b'ask before applying actions')),
85 ]
83 ]
86
84
87 remoteopts = [
85 remoteopts = [
88 (b'e', b'ssh', b'', _(b'specify ssh command to use'), _(b'CMD')),
86 (b'e', b'ssh', b'', _(b'specify ssh command to use'), _(b'CMD')),
89 (
87 (
90 b'',
88 b'',
91 b'remotecmd',
89 b'remotecmd',
92 b'',
90 b'',
93 _(b'specify hg command to run on the remote side'),
91 _(b'specify hg command to run on the remote side'),
94 _(b'CMD'),
92 _(b'CMD'),
95 ),
93 ),
96 (
94 (
97 b'',
95 b'',
98 b'insecure',
96 b'insecure',
99 None,
97 None,
100 _(b'do not verify server certificate (ignoring web.cacerts config)'),
98 _(b'do not verify server certificate (ignoring web.cacerts config)'),
101 ),
99 ),
102 ]
100 ]
103
101
104 walkopts = [
102 walkopts = [
105 (
103 (
106 b'I',
104 b'I',
107 b'include',
105 b'include',
108 [],
106 [],
109 _(b'include names matching the given patterns'),
107 _(b'include names matching the given patterns'),
110 _(b'PATTERN'),
108 _(b'PATTERN'),
111 ),
109 ),
112 (
110 (
113 b'X',
111 b'X',
114 b'exclude',
112 b'exclude',
115 [],
113 [],
116 _(b'exclude names matching the given patterns'),
114 _(b'exclude names matching the given patterns'),
117 _(b'PATTERN'),
115 _(b'PATTERN'),
118 ),
116 ),
119 ]
117 ]
120
118
121 commitopts = [
119 commitopts = [
122 (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
120 (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
123 (b'l', b'logfile', b'', _(b'read commit message from file'), _(b'FILE')),
121 (b'l', b'logfile', b'', _(b'read commit message from file'), _(b'FILE')),
124 ]
122 ]
125
123
126 commitopts2 = [
124 commitopts2 = [
127 (
125 (
128 b'd',
126 b'd',
129 b'date',
127 b'date',
130 b'',
128 b'',
131 _(b'record the specified date as commit date'),
129 _(b'record the specified date as commit date'),
132 _(b'DATE'),
130 _(b'DATE'),
133 ),
131 ),
134 (
132 (
135 b'u',
133 b'u',
136 b'user',
134 b'user',
137 b'',
135 b'',
138 _(b'record the specified user as committer'),
136 _(b'record the specified user as committer'),
139 _(b'USER'),
137 _(b'USER'),
140 ),
138 ),
141 ]
139 ]
142
140
143 commitopts3 = [
141 commitopts3 = [
144 (b'D', b'currentdate', None, _(b'record the current date as commit date')),
142 (b'D', b'currentdate', None, _(b'record the current date as commit date')),
145 (b'U', b'currentuser', None, _(b'record the current user as committer')),
143 (b'U', b'currentuser', None, _(b'record the current user as committer')),
146 ]
144 ]
147
145
148 formatteropts = [
146 formatteropts = [
149 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
147 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
150 ]
148 ]
151
149
152 templateopts = [
150 templateopts = [
153 (
151 (
154 b'',
152 b'',
155 b'style',
153 b'style',
156 b'',
154 b'',
157 _(b'display using template map file (DEPRECATED)'),
155 _(b'display using template map file (DEPRECATED)'),
158 _(b'STYLE'),
156 _(b'STYLE'),
159 ),
157 ),
160 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
158 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
161 ]
159 ]
162
160
163 logopts = [
161 logopts = [
164 (b'p', b'patch', None, _(b'show patch')),
162 (b'p', b'patch', None, _(b'show patch')),
165 (b'g', b'git', None, _(b'use git extended diff format')),
163 (b'g', b'git', None, _(b'use git extended diff format')),
166 (b'l', b'limit', b'', _(b'limit number of changes displayed'), _(b'NUM')),
164 (b'l', b'limit', b'', _(b'limit number of changes displayed'), _(b'NUM')),
167 (b'M', b'no-merges', None, _(b'do not show merges')),
165 (b'M', b'no-merges', None, _(b'do not show merges')),
168 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
166 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
169 (b'G', b'graph', None, _(b"show the revision DAG")),
167 (b'G', b'graph', None, _(b"show the revision DAG")),
170 ] + templateopts
168 ] + templateopts
171
169
172 diffopts = [
170 diffopts = [
173 (b'a', b'text', None, _(b'treat all files as text')),
171 (b'a', b'text', None, _(b'treat all files as text')),
174 (
172 (
175 b'g',
173 b'g',
176 b'git',
174 b'git',
177 None,
175 None,
178 _(b'use git extended diff format (DEFAULT: diff.git)'),
176 _(b'use git extended diff format (DEFAULT: diff.git)'),
179 ),
177 ),
180 (b'', b'binary', None, _(b'generate binary diffs in git mode (default)')),
178 (b'', b'binary', None, _(b'generate binary diffs in git mode (default)')),
181 (b'', b'nodates', None, _(b'omit dates from diff headers')),
179 (b'', b'nodates', None, _(b'omit dates from diff headers')),
182 ]
180 ]
183
181
184 diffwsopts = [
182 diffwsopts = [
185 (
183 (
186 b'w',
184 b'w',
187 b'ignore-all-space',
185 b'ignore-all-space',
188 None,
186 None,
189 _(b'ignore white space when comparing lines'),
187 _(b'ignore white space when comparing lines'),
190 ),
188 ),
191 (
189 (
192 b'b',
190 b'b',
193 b'ignore-space-change',
191 b'ignore-space-change',
194 None,
192 None,
195 _(b'ignore changes in the amount of white space'),
193 _(b'ignore changes in the amount of white space'),
196 ),
194 ),
197 (
195 (
198 b'B',
196 b'B',
199 b'ignore-blank-lines',
197 b'ignore-blank-lines',
200 None,
198 None,
201 _(b'ignore changes whose lines are all blank'),
199 _(b'ignore changes whose lines are all blank'),
202 ),
200 ),
203 (
201 (
204 b'Z',
202 b'Z',
205 b'ignore-space-at-eol',
203 b'ignore-space-at-eol',
206 None,
204 None,
207 _(b'ignore changes in whitespace at EOL'),
205 _(b'ignore changes in whitespace at EOL'),
208 ),
206 ),
209 ]
207 ]
210
208
211 diffopts2 = (
209 diffopts2 = (
212 [
210 [
213 (b'', b'noprefix', None, _(b'omit a/ and b/ prefixes from filenames')),
211 (b'', b'noprefix', None, _(b'omit a/ and b/ prefixes from filenames')),
214 (
212 (
215 b'p',
213 b'p',
216 b'show-function',
214 b'show-function',
217 None,
215 None,
218 _(
216 _(
219 b'show which function each change is in (DEFAULT: diff.showfunc)'
217 b'show which function each change is in (DEFAULT: diff.showfunc)'
220 ),
218 ),
221 ),
219 ),
222 (b'', b'reverse', None, _(b'produce a diff that undoes the changes')),
220 (b'', b'reverse', None, _(b'produce a diff that undoes the changes')),
223 ]
221 ]
224 + diffwsopts
222 + diffwsopts
225 + [
223 + [
226 (
224 (
227 b'U',
225 b'U',
228 b'unified',
226 b'unified',
229 b'',
227 b'',
230 _(b'number of lines of context to show'),
228 _(b'number of lines of context to show'),
231 _(b'NUM'),
229 _(b'NUM'),
232 ),
230 ),
233 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
231 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
234 (
232 (
235 b'',
233 b'',
236 b'root',
234 b'root',
237 b'',
235 b'',
238 _(b'produce diffs relative to subdirectory'),
236 _(b'produce diffs relative to subdirectory'),
239 _(b'DIR'),
237 _(b'DIR'),
240 ),
238 ),
241 ]
239 ]
242 )
240 )
243
241
244 mergetoolopts = [
242 mergetoolopts = [
245 (b't', b'tool', b'', _(b'specify merge tool'), _(b'TOOL')),
243 (b't', b'tool', b'', _(b'specify merge tool'), _(b'TOOL')),
246 ]
244 ]
247
245
248 similarityopts = [
246 similarityopts = [
249 (
247 (
250 b's',
248 b's',
251 b'similarity',
249 b'similarity',
252 b'',
250 b'',
253 _(b'guess renamed files by similarity (0<=s<=100)'),
251 _(b'guess renamed files by similarity (0<=s<=100)'),
254 _(b'SIMILARITY'),
252 _(b'SIMILARITY'),
255 )
253 )
256 ]
254 ]
257
255
258 subrepoopts = [(b'S', b'subrepos', None, _(b'recurse into subrepositories'))]
256 subrepoopts = [(b'S', b'subrepos', None, _(b'recurse into subrepositories'))]
259
257
260 debugrevlogopts = [
258 debugrevlogopts = [
261 (b'c', b'changelog', False, _(b'open changelog')),
259 (b'c', b'changelog', False, _(b'open changelog')),
262 (b'm', b'manifest', False, _(b'open manifest')),
260 (b'm', b'manifest', False, _(b'open manifest')),
263 (b'', b'dir', b'', _(b'open directory manifest')),
261 (b'', b'dir', b'', _(b'open directory manifest')),
264 ]
262 ]
265
263
266 # special string such that everything below this line will be ingored in the
264 # special string such that everything below this line will be ingored in the
267 # editor text
265 # editor text
268 _linebelow = b"^HG: ------------------------ >8 ------------------------$"
266 _linebelow = b"^HG: ------------------------ >8 ------------------------$"
269
267
270
268
271 def check_at_most_one_arg(opts, *args):
269 def check_at_most_one_arg(opts, *args):
272 """abort if more than one of the arguments are in opts
270 """abort if more than one of the arguments are in opts
273
271
274 Returns the unique argument or None if none of them were specified.
272 Returns the unique argument or None if none of them were specified.
275 """
273 """
276
274
277 def to_display(name):
275 def to_display(name):
278 return pycompat.sysbytes(name).replace(b'_', b'-')
276 return pycompat.sysbytes(name).replace(b'_', b'-')
279
277
280 previous = None
278 previous = None
281 for x in args:
279 for x in args:
282 if opts.get(x):
280 if opts.get(x):
283 if previous:
281 if previous:
284 raise error.Abort(
282 raise error.Abort(
285 _(b'cannot specify both --%s and --%s')
283 _(b'cannot specify both --%s and --%s')
286 % (to_display(previous), to_display(x))
284 % (to_display(previous), to_display(x))
287 )
285 )
288 previous = x
286 previous = x
289 return previous
287 return previous
290
288
291
289
292 def check_incompatible_arguments(opts, first, others):
290 def check_incompatible_arguments(opts, first, others):
293 """abort if the first argument is given along with any of the others
291 """abort if the first argument is given along with any of the others
294
292
295 Unlike check_at_most_one_arg(), `others` are not mutually exclusive
293 Unlike check_at_most_one_arg(), `others` are not mutually exclusive
296 among themselves, and they're passed as a single collection.
294 among themselves, and they're passed as a single collection.
297 """
295 """
298 for other in others:
296 for other in others:
299 check_at_most_one_arg(opts, first, other)
297 check_at_most_one_arg(opts, first, other)
300
298
301
299
302 def resolvecommitoptions(ui, opts):
300 def resolvecommitoptions(ui, opts):
303 """modify commit options dict to handle related options
301 """modify commit options dict to handle related options
304
302
305 The return value indicates that ``rewrite.update-timestamp`` is the reason
303 The return value indicates that ``rewrite.update-timestamp`` is the reason
306 the ``date`` option is set.
304 the ``date`` option is set.
307 """
305 """
308 check_at_most_one_arg(opts, b'date', b'currentdate')
306 check_at_most_one_arg(opts, b'date', b'currentdate')
309 check_at_most_one_arg(opts, b'user', b'currentuser')
307 check_at_most_one_arg(opts, b'user', b'currentuser')
310
308
311 datemaydiffer = False # date-only change should be ignored?
309 datemaydiffer = False # date-only change should be ignored?
312
310
313 if opts.get(b'currentdate'):
311 if opts.get(b'currentdate'):
314 opts[b'date'] = b'%d %d' % dateutil.makedate()
312 opts[b'date'] = b'%d %d' % dateutil.makedate()
315 elif (
313 elif (
316 not opts.get(b'date')
314 not opts.get(b'date')
317 and ui.configbool(b'rewrite', b'update-timestamp')
315 and ui.configbool(b'rewrite', b'update-timestamp')
318 and opts.get(b'currentdate') is None
316 and opts.get(b'currentdate') is None
319 ):
317 ):
320 opts[b'date'] = b'%d %d' % dateutil.makedate()
318 opts[b'date'] = b'%d %d' % dateutil.makedate()
321 datemaydiffer = True
319 datemaydiffer = True
322
320
323 if opts.get(b'currentuser'):
321 if opts.get(b'currentuser'):
324 opts[b'user'] = ui.username()
322 opts[b'user'] = ui.username()
325
323
326 return datemaydiffer
324 return datemaydiffer
327
325
328
326
329 def checknotesize(ui, opts):
327 def checknotesize(ui, opts):
330 """ make sure note is of valid format """
328 """ make sure note is of valid format """
331
329
332 note = opts.get(b'note')
330 note = opts.get(b'note')
333 if not note:
331 if not note:
334 return
332 return
335
333
336 if len(note) > 255:
334 if len(note) > 255:
337 raise error.Abort(_(b"cannot store a note of more than 255 bytes"))
335 raise error.Abort(_(b"cannot store a note of more than 255 bytes"))
338 if b'\n' in note:
336 if b'\n' in note:
339 raise error.Abort(_(b"note cannot contain a newline"))
337 raise error.Abort(_(b"note cannot contain a newline"))
340
338
341
339
342 def ishunk(x):
340 def ishunk(x):
343 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
341 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
344 return isinstance(x, hunkclasses)
342 return isinstance(x, hunkclasses)
345
343
346
344
347 def newandmodified(chunks, originalchunks):
345 def newandmodified(chunks, originalchunks):
348 newlyaddedandmodifiedfiles = set()
346 newlyaddedandmodifiedfiles = set()
349 alsorestore = set()
347 alsorestore = set()
350 for chunk in chunks:
348 for chunk in chunks:
351 if (
349 if (
352 ishunk(chunk)
350 ishunk(chunk)
353 and chunk.header.isnewfile()
351 and chunk.header.isnewfile()
354 and chunk not in originalchunks
352 and chunk not in originalchunks
355 ):
353 ):
356 newlyaddedandmodifiedfiles.add(chunk.header.filename())
354 newlyaddedandmodifiedfiles.add(chunk.header.filename())
357 alsorestore.update(
355 alsorestore.update(
358 set(chunk.header.files()) - {chunk.header.filename()}
356 set(chunk.header.files()) - {chunk.header.filename()}
359 )
357 )
360 return newlyaddedandmodifiedfiles, alsorestore
358 return newlyaddedandmodifiedfiles, alsorestore
361
359
362
360
363 def parsealiases(cmd):
361 def parsealiases(cmd):
364 return cmd.split(b"|")
362 return cmd.split(b"|")
365
363
366
364
367 def setupwrapcolorwrite(ui):
365 def setupwrapcolorwrite(ui):
368 # wrap ui.write so diff output can be labeled/colorized
366 # wrap ui.write so diff output can be labeled/colorized
369 def wrapwrite(orig, *args, **kw):
367 def wrapwrite(orig, *args, **kw):
370 label = kw.pop('label', b'')
368 label = kw.pop('label', b'')
371 for chunk, l in patch.difflabel(lambda: args):
369 for chunk, l in patch.difflabel(lambda: args):
372 orig(chunk, label=label + l)
370 orig(chunk, label=label + l)
373
371
374 oldwrite = ui.write
372 oldwrite = ui.write
375
373
376 def wrap(*args, **kwargs):
374 def wrap(*args, **kwargs):
377 return wrapwrite(oldwrite, *args, **kwargs)
375 return wrapwrite(oldwrite, *args, **kwargs)
378
376
379 setattr(ui, 'write', wrap)
377 setattr(ui, 'write', wrap)
380 return oldwrite
378 return oldwrite
381
379
382
380
383 def filterchunks(ui, originalhunks, usecurses, testfile, match, operation=None):
381 def filterchunks(ui, originalhunks, usecurses, testfile, match, operation=None):
384 try:
382 try:
385 if usecurses:
383 if usecurses:
386 if testfile:
384 if testfile:
387 recordfn = crecordmod.testdecorator(
385 recordfn = crecordmod.testdecorator(
388 testfile, crecordmod.testchunkselector
386 testfile, crecordmod.testchunkselector
389 )
387 )
390 else:
388 else:
391 recordfn = crecordmod.chunkselector
389 recordfn = crecordmod.chunkselector
392
390
393 return crecordmod.filterpatch(
391 return crecordmod.filterpatch(
394 ui, originalhunks, recordfn, operation
392 ui, originalhunks, recordfn, operation
395 )
393 )
396 except crecordmod.fallbackerror as e:
394 except crecordmod.fallbackerror as e:
397 ui.warn(b'%s\n' % e)
395 ui.warn(b'%s\n' % e)
398 ui.warn(_(b'falling back to text mode\n'))
396 ui.warn(_(b'falling back to text mode\n'))
399
397
400 return patch.filterpatch(ui, originalhunks, match, operation)
398 return patch.filterpatch(ui, originalhunks, match, operation)
401
399
402
400
403 def recordfilter(ui, originalhunks, match, operation=None):
401 def recordfilter(ui, originalhunks, match, operation=None):
404 """ Prompts the user to filter the originalhunks and return a list of
402 """ Prompts the user to filter the originalhunks and return a list of
405 selected hunks.
403 selected hunks.
406 *operation* is used for to build ui messages to indicate the user what
404 *operation* is used for to build ui messages to indicate the user what
407 kind of filtering they are doing: reverting, committing, shelving, etc.
405 kind of filtering they are doing: reverting, committing, shelving, etc.
408 (see patch.filterpatch).
406 (see patch.filterpatch).
409 """
407 """
410 usecurses = crecordmod.checkcurses(ui)
408 usecurses = crecordmod.checkcurses(ui)
411 testfile = ui.config(b'experimental', b'crecordtest')
409 testfile = ui.config(b'experimental', b'crecordtest')
412 oldwrite = setupwrapcolorwrite(ui)
410 oldwrite = setupwrapcolorwrite(ui)
413 try:
411 try:
414 newchunks, newopts = filterchunks(
412 newchunks, newopts = filterchunks(
415 ui, originalhunks, usecurses, testfile, match, operation
413 ui, originalhunks, usecurses, testfile, match, operation
416 )
414 )
417 finally:
415 finally:
418 ui.write = oldwrite
416 ui.write = oldwrite
419 return newchunks, newopts
417 return newchunks, newopts
420
418
421
419
422 def dorecord(
420 def dorecord(
423 ui, repo, commitfunc, cmdsuggest, backupall, filterfn, *pats, **opts
421 ui, repo, commitfunc, cmdsuggest, backupall, filterfn, *pats, **opts
424 ):
422 ):
425 opts = pycompat.byteskwargs(opts)
423 opts = pycompat.byteskwargs(opts)
426 if not ui.interactive():
424 if not ui.interactive():
427 if cmdsuggest:
425 if cmdsuggest:
428 msg = _(b'running non-interactively, use %s instead') % cmdsuggest
426 msg = _(b'running non-interactively, use %s instead') % cmdsuggest
429 else:
427 else:
430 msg = _(b'running non-interactively')
428 msg = _(b'running non-interactively')
431 raise error.Abort(msg)
429 raise error.Abort(msg)
432
430
433 # make sure username is set before going interactive
431 # make sure username is set before going interactive
434 if not opts.get(b'user'):
432 if not opts.get(b'user'):
435 ui.username() # raise exception, username not provided
433 ui.username() # raise exception, username not provided
436
434
437 def recordfunc(ui, repo, message, match, opts):
435 def recordfunc(ui, repo, message, match, opts):
438 """This is generic record driver.
436 """This is generic record driver.
439
437
440 Its job is to interactively filter local changes, and
438 Its job is to interactively filter local changes, and
441 accordingly prepare working directory into a state in which the
439 accordingly prepare working directory into a state in which the
442 job can be delegated to a non-interactive commit command such as
440 job can be delegated to a non-interactive commit command such as
443 'commit' or 'qrefresh'.
441 'commit' or 'qrefresh'.
444
442
445 After the actual job is done by non-interactive command, the
443 After the actual job is done by non-interactive command, the
446 working directory is restored to its original state.
444 working directory is restored to its original state.
447
445
448 In the end we'll record interesting changes, and everything else
446 In the end we'll record interesting changes, and everything else
449 will be left in place, so the user can continue working.
447 will be left in place, so the user can continue working.
450 """
448 """
451 if not opts.get(b'interactive-unshelve'):
449 if not opts.get(b'interactive-unshelve'):
452 checkunfinished(repo, commit=True)
450 checkunfinished(repo, commit=True)
453 wctx = repo[None]
451 wctx = repo[None]
454 merge = len(wctx.parents()) > 1
452 merge = len(wctx.parents()) > 1
455 if merge:
453 if merge:
456 raise error.Abort(
454 raise error.Abort(
457 _(
455 _(
458 b'cannot partially commit a merge '
456 b'cannot partially commit a merge '
459 b'(use "hg commit" instead)'
457 b'(use "hg commit" instead)'
460 )
458 )
461 )
459 )
462
460
463 def fail(f, msg):
461 def fail(f, msg):
464 raise error.Abort(b'%s: %s' % (f, msg))
462 raise error.Abort(b'%s: %s' % (f, msg))
465
463
466 force = opts.get(b'force')
464 force = opts.get(b'force')
467 if not force:
465 if not force:
468 match = matchmod.badmatch(match, fail)
466 match = matchmod.badmatch(match, fail)
469
467
470 status = repo.status(match=match)
468 status = repo.status(match=match)
471
469
472 overrides = {(b'ui', b'commitsubrepos'): True}
470 overrides = {(b'ui', b'commitsubrepos'): True}
473
471
474 with repo.ui.configoverride(overrides, b'record'):
472 with repo.ui.configoverride(overrides, b'record'):
475 # subrepoutil.precommit() modifies the status
473 # subrepoutil.precommit() modifies the status
476 tmpstatus = scmutil.status(
474 tmpstatus = scmutil.status(
477 copymod.copy(status.modified),
475 copymod.copy(status.modified),
478 copymod.copy(status.added),
476 copymod.copy(status.added),
479 copymod.copy(status.removed),
477 copymod.copy(status.removed),
480 copymod.copy(status.deleted),
478 copymod.copy(status.deleted),
481 copymod.copy(status.unknown),
479 copymod.copy(status.unknown),
482 copymod.copy(status.ignored),
480 copymod.copy(status.ignored),
483 copymod.copy(status.clean), # pytype: disable=wrong-arg-count
481 copymod.copy(status.clean), # pytype: disable=wrong-arg-count
484 )
482 )
485
483
486 # Force allows -X subrepo to skip the subrepo.
484 # Force allows -X subrepo to skip the subrepo.
487 subs, commitsubs, newstate = subrepoutil.precommit(
485 subs, commitsubs, newstate = subrepoutil.precommit(
488 repo.ui, wctx, tmpstatus, match, force=True
486 repo.ui, wctx, tmpstatus, match, force=True
489 )
487 )
490 for s in subs:
488 for s in subs:
491 if s in commitsubs:
489 if s in commitsubs:
492 dirtyreason = wctx.sub(s).dirtyreason(True)
490 dirtyreason = wctx.sub(s).dirtyreason(True)
493 raise error.Abort(dirtyreason)
491 raise error.Abort(dirtyreason)
494
492
495 if not force:
493 if not force:
496 repo.checkcommitpatterns(wctx, match, status, fail)
494 repo.checkcommitpatterns(wctx, match, status, fail)
497 diffopts = patch.difffeatureopts(
495 diffopts = patch.difffeatureopts(
498 ui,
496 ui,
499 opts=opts,
497 opts=opts,
500 whitespace=True,
498 whitespace=True,
501 section=b'commands',
499 section=b'commands',
502 configprefix=b'commit.interactive.',
500 configprefix=b'commit.interactive.',
503 )
501 )
504 diffopts.nodates = True
502 diffopts.nodates = True
505 diffopts.git = True
503 diffopts.git = True
506 diffopts.showfunc = True
504 diffopts.showfunc = True
507 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
505 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
508 originalchunks = patch.parsepatch(originaldiff)
506 originalchunks = patch.parsepatch(originaldiff)
509 match = scmutil.match(repo[None], pats)
507 match = scmutil.match(repo[None], pats)
510
508
511 # 1. filter patch, since we are intending to apply subset of it
509 # 1. filter patch, since we are intending to apply subset of it
512 try:
510 try:
513 chunks, newopts = filterfn(ui, originalchunks, match)
511 chunks, newopts = filterfn(ui, originalchunks, match)
514 except error.PatchError as err:
512 except error.PatchError as err:
515 raise error.Abort(_(b'error parsing patch: %s') % err)
513 raise error.Abort(_(b'error parsing patch: %s') % err)
516 opts.update(newopts)
514 opts.update(newopts)
517
515
518 # We need to keep a backup of files that have been newly added and
516 # We need to keep a backup of files that have been newly added and
519 # modified during the recording process because there is a previous
517 # modified during the recording process because there is a previous
520 # version without the edit in the workdir. We also will need to restore
518 # version without the edit in the workdir. We also will need to restore
521 # files that were the sources of renames so that the patch application
519 # files that were the sources of renames so that the patch application
522 # works.
520 # works.
523 newlyaddedandmodifiedfiles, alsorestore = newandmodified(
521 newlyaddedandmodifiedfiles, alsorestore = newandmodified(
524 chunks, originalchunks
522 chunks, originalchunks
525 )
523 )
526 contenders = set()
524 contenders = set()
527 for h in chunks:
525 for h in chunks:
528 try:
526 try:
529 contenders.update(set(h.files()))
527 contenders.update(set(h.files()))
530 except AttributeError:
528 except AttributeError:
531 pass
529 pass
532
530
533 changed = status.modified + status.added + status.removed
531 changed = status.modified + status.added + status.removed
534 newfiles = [f for f in changed if f in contenders]
532 newfiles = [f for f in changed if f in contenders]
535 if not newfiles:
533 if not newfiles:
536 ui.status(_(b'no changes to record\n'))
534 ui.status(_(b'no changes to record\n'))
537 return 0
535 return 0
538
536
539 modified = set(status.modified)
537 modified = set(status.modified)
540
538
541 # 2. backup changed files, so we can restore them in the end
539 # 2. backup changed files, so we can restore them in the end
542
540
543 if backupall:
541 if backupall:
544 tobackup = changed
542 tobackup = changed
545 else:
543 else:
546 tobackup = [
544 tobackup = [
547 f
545 f
548 for f in newfiles
546 for f in newfiles
549 if f in modified or f in newlyaddedandmodifiedfiles
547 if f in modified or f in newlyaddedandmodifiedfiles
550 ]
548 ]
551 backups = {}
549 backups = {}
552 if tobackup:
550 if tobackup:
553 backupdir = repo.vfs.join(b'record-backups')
551 backupdir = repo.vfs.join(b'record-backups')
554 try:
552 try:
555 os.mkdir(backupdir)
553 os.mkdir(backupdir)
556 except OSError as err:
554 except OSError as err:
557 if err.errno != errno.EEXIST:
555 if err.errno != errno.EEXIST:
558 raise
556 raise
559 try:
557 try:
560 # backup continues
558 # backup continues
561 for f in tobackup:
559 for f in tobackup:
562 fd, tmpname = pycompat.mkstemp(
560 fd, tmpname = pycompat.mkstemp(
563 prefix=f.replace(b'/', b'_') + b'.', dir=backupdir
561 prefix=f.replace(b'/', b'_') + b'.', dir=backupdir
564 )
562 )
565 os.close(fd)
563 os.close(fd)
566 ui.debug(b'backup %r as %r\n' % (f, tmpname))
564 ui.debug(b'backup %r as %r\n' % (f, tmpname))
567 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
565 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
568 backups[f] = tmpname
566 backups[f] = tmpname
569
567
570 fp = stringio()
568 fp = stringio()
571 for c in chunks:
569 for c in chunks:
572 fname = c.filename()
570 fname = c.filename()
573 if fname in backups:
571 if fname in backups:
574 c.write(fp)
572 c.write(fp)
575 dopatch = fp.tell()
573 dopatch = fp.tell()
576 fp.seek(0)
574 fp.seek(0)
577
575
578 # 2.5 optionally review / modify patch in text editor
576 # 2.5 optionally review / modify patch in text editor
579 if opts.get(b'review', False):
577 if opts.get(b'review', False):
580 patchtext = (
578 patchtext = (
581 crecordmod.diffhelptext
579 crecordmod.diffhelptext
582 + crecordmod.patchhelptext
580 + crecordmod.patchhelptext
583 + fp.read()
581 + fp.read()
584 )
582 )
585 reviewedpatch = ui.edit(
583 reviewedpatch = ui.edit(
586 patchtext, b"", action=b"diff", repopath=repo.path
584 patchtext, b"", action=b"diff", repopath=repo.path
587 )
585 )
588 fp.truncate(0)
586 fp.truncate(0)
589 fp.write(reviewedpatch)
587 fp.write(reviewedpatch)
590 fp.seek(0)
588 fp.seek(0)
591
589
592 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
590 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
593 # 3a. apply filtered patch to clean repo (clean)
591 # 3a. apply filtered patch to clean repo (clean)
594 if backups:
592 if backups:
595 m = scmutil.matchfiles(repo, set(backups.keys()) | alsorestore)
593 m = scmutil.matchfiles(repo, set(backups.keys()) | alsorestore)
596 mergemod.revert_to(repo[b'.'], matcher=m)
594 mergemod.revert_to(repo[b'.'], matcher=m)
597
595
598 # 3b. (apply)
596 # 3b. (apply)
599 if dopatch:
597 if dopatch:
600 try:
598 try:
601 ui.debug(b'applying patch\n')
599 ui.debug(b'applying patch\n')
602 ui.debug(fp.getvalue())
600 ui.debug(fp.getvalue())
603 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
601 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
604 except error.PatchError as err:
602 except error.PatchError as err:
605 raise error.Abort(pycompat.bytestr(err))
603 raise error.Abort(pycompat.bytestr(err))
606 del fp
604 del fp
607
605
608 # 4. We prepared working directory according to filtered
606 # 4. We prepared working directory according to filtered
609 # patch. Now is the time to delegate the job to
607 # patch. Now is the time to delegate the job to
610 # commit/qrefresh or the like!
608 # commit/qrefresh or the like!
611
609
612 # Make all of the pathnames absolute.
610 # Make all of the pathnames absolute.
613 newfiles = [repo.wjoin(nf) for nf in newfiles]
611 newfiles = [repo.wjoin(nf) for nf in newfiles]
614 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
612 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
615 finally:
613 finally:
616 # 5. finally restore backed-up files
614 # 5. finally restore backed-up files
617 try:
615 try:
618 dirstate = repo.dirstate
616 dirstate = repo.dirstate
619 for realname, tmpname in pycompat.iteritems(backups):
617 for realname, tmpname in pycompat.iteritems(backups):
620 ui.debug(b'restoring %r to %r\n' % (tmpname, realname))
618 ui.debug(b'restoring %r to %r\n' % (tmpname, realname))
621
619
622 if dirstate[realname] == b'n':
620 if dirstate[realname] == b'n':
623 # without normallookup, restoring timestamp
621 # without normallookup, restoring timestamp
624 # may cause partially committed files
622 # may cause partially committed files
625 # to be treated as unmodified
623 # to be treated as unmodified
626 dirstate.normallookup(realname)
624 dirstate.normallookup(realname)
627
625
628 # copystat=True here and above are a hack to trick any
626 # copystat=True here and above are a hack to trick any
629 # editors that have f open that we haven't modified them.
627 # editors that have f open that we haven't modified them.
630 #
628 #
631 # Also note that this racy as an editor could notice the
629 # Also note that this racy as an editor could notice the
632 # file's mtime before we've finished writing it.
630 # file's mtime before we've finished writing it.
633 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
631 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
634 os.unlink(tmpname)
632 os.unlink(tmpname)
635 if tobackup:
633 if tobackup:
636 os.rmdir(backupdir)
634 os.rmdir(backupdir)
637 except OSError:
635 except OSError:
638 pass
636 pass
639
637
640 def recordinwlock(ui, repo, message, match, opts):
638 def recordinwlock(ui, repo, message, match, opts):
641 with repo.wlock():
639 with repo.wlock():
642 return recordfunc(ui, repo, message, match, opts)
640 return recordfunc(ui, repo, message, match, opts)
643
641
644 return commit(ui, repo, recordinwlock, pats, opts)
642 return commit(ui, repo, recordinwlock, pats, opts)
645
643
646
644
647 class dirnode(object):
645 class dirnode(object):
648 """
646 """
649 Represent a directory in user working copy with information required for
647 Represent a directory in user working copy with information required for
650 the purpose of tersing its status.
648 the purpose of tersing its status.
651
649
652 path is the path to the directory, without a trailing '/'
650 path is the path to the directory, without a trailing '/'
653
651
654 statuses is a set of statuses of all files in this directory (this includes
652 statuses is a set of statuses of all files in this directory (this includes
655 all the files in all the subdirectories too)
653 all the files in all the subdirectories too)
656
654
657 files is a list of files which are direct child of this directory
655 files is a list of files which are direct child of this directory
658
656
659 subdirs is a dictionary of sub-directory name as the key and it's own
657 subdirs is a dictionary of sub-directory name as the key and it's own
660 dirnode object as the value
658 dirnode object as the value
661 """
659 """
662
660
663 def __init__(self, dirpath):
661 def __init__(self, dirpath):
664 self.path = dirpath
662 self.path = dirpath
665 self.statuses = set()
663 self.statuses = set()
666 self.files = []
664 self.files = []
667 self.subdirs = {}
665 self.subdirs = {}
668
666
669 def _addfileindir(self, filename, status):
667 def _addfileindir(self, filename, status):
670 """Add a file in this directory as a direct child."""
668 """Add a file in this directory as a direct child."""
671 self.files.append((filename, status))
669 self.files.append((filename, status))
672
670
673 def addfile(self, filename, status):
671 def addfile(self, filename, status):
674 """
672 """
675 Add a file to this directory or to its direct parent directory.
673 Add a file to this directory or to its direct parent directory.
676
674
677 If the file is not direct child of this directory, we traverse to the
675 If the file is not direct child of this directory, we traverse to the
678 directory of which this file is a direct child of and add the file
676 directory of which this file is a direct child of and add the file
679 there.
677 there.
680 """
678 """
681
679
682 # the filename contains a path separator, it means it's not the direct
680 # the filename contains a path separator, it means it's not the direct
683 # child of this directory
681 # child of this directory
684 if b'/' in filename:
682 if b'/' in filename:
685 subdir, filep = filename.split(b'/', 1)
683 subdir, filep = filename.split(b'/', 1)
686
684
687 # does the dirnode object for subdir exists
685 # does the dirnode object for subdir exists
688 if subdir not in self.subdirs:
686 if subdir not in self.subdirs:
689 subdirpath = pathutil.join(self.path, subdir)
687 subdirpath = pathutil.join(self.path, subdir)
690 self.subdirs[subdir] = dirnode(subdirpath)
688 self.subdirs[subdir] = dirnode(subdirpath)
691
689
692 # try adding the file in subdir
690 # try adding the file in subdir
693 self.subdirs[subdir].addfile(filep, status)
691 self.subdirs[subdir].addfile(filep, status)
694
692
695 else:
693 else:
696 self._addfileindir(filename, status)
694 self._addfileindir(filename, status)
697
695
698 if status not in self.statuses:
696 if status not in self.statuses:
699 self.statuses.add(status)
697 self.statuses.add(status)
700
698
701 def iterfilepaths(self):
699 def iterfilepaths(self):
702 """Yield (status, path) for files directly under this directory."""
700 """Yield (status, path) for files directly under this directory."""
703 for f, st in self.files:
701 for f, st in self.files:
704 yield st, pathutil.join(self.path, f)
702 yield st, pathutil.join(self.path, f)
705
703
706 def tersewalk(self, terseargs):
704 def tersewalk(self, terseargs):
707 """
705 """
708 Yield (status, path) obtained by processing the status of this
706 Yield (status, path) obtained by processing the status of this
709 dirnode.
707 dirnode.
710
708
711 terseargs is the string of arguments passed by the user with `--terse`
709 terseargs is the string of arguments passed by the user with `--terse`
712 flag.
710 flag.
713
711
714 Following are the cases which can happen:
712 Following are the cases which can happen:
715
713
716 1) All the files in the directory (including all the files in its
714 1) All the files in the directory (including all the files in its
717 subdirectories) share the same status and the user has asked us to terse
715 subdirectories) share the same status and the user has asked us to terse
718 that status. -> yield (status, dirpath). dirpath will end in '/'.
716 that status. -> yield (status, dirpath). dirpath will end in '/'.
719
717
720 2) Otherwise, we do following:
718 2) Otherwise, we do following:
721
719
722 a) Yield (status, filepath) for all the files which are in this
720 a) Yield (status, filepath) for all the files which are in this
723 directory (only the ones in this directory, not the subdirs)
721 directory (only the ones in this directory, not the subdirs)
724
722
725 b) Recurse the function on all the subdirectories of this
723 b) Recurse the function on all the subdirectories of this
726 directory
724 directory
727 """
725 """
728
726
729 if len(self.statuses) == 1:
727 if len(self.statuses) == 1:
730 onlyst = self.statuses.pop()
728 onlyst = self.statuses.pop()
731
729
732 # Making sure we terse only when the status abbreviation is
730 # Making sure we terse only when the status abbreviation is
733 # passed as terse argument
731 # passed as terse argument
734 if onlyst in terseargs:
732 if onlyst in terseargs:
735 yield onlyst, self.path + b'/'
733 yield onlyst, self.path + b'/'
736 return
734 return
737
735
738 # add the files to status list
736 # add the files to status list
739 for st, fpath in self.iterfilepaths():
737 for st, fpath in self.iterfilepaths():
740 yield st, fpath
738 yield st, fpath
741
739
742 # recurse on the subdirs
740 # recurse on the subdirs
743 for dirobj in self.subdirs.values():
741 for dirobj in self.subdirs.values():
744 for st, fpath in dirobj.tersewalk(terseargs):
742 for st, fpath in dirobj.tersewalk(terseargs):
745 yield st, fpath
743 yield st, fpath
746
744
747
745
748 def tersedir(statuslist, terseargs):
746 def tersedir(statuslist, terseargs):
749 """
747 """
750 Terse the status if all the files in a directory shares the same status.
748 Terse the status if all the files in a directory shares the same status.
751
749
752 statuslist is scmutil.status() object which contains a list of files for
750 statuslist is scmutil.status() object which contains a list of files for
753 each status.
751 each status.
754 terseargs is string which is passed by the user as the argument to `--terse`
752 terseargs is string which is passed by the user as the argument to `--terse`
755 flag.
753 flag.
756
754
757 The function makes a tree of objects of dirnode class, and at each node it
755 The function makes a tree of objects of dirnode class, and at each node it
758 stores the information required to know whether we can terse a certain
756 stores the information required to know whether we can terse a certain
759 directory or not.
757 directory or not.
760 """
758 """
761 # the order matters here as that is used to produce final list
759 # the order matters here as that is used to produce final list
762 allst = (b'm', b'a', b'r', b'd', b'u', b'i', b'c')
760 allst = (b'm', b'a', b'r', b'd', b'u', b'i', b'c')
763
761
764 # checking the argument validity
762 # checking the argument validity
765 for s in pycompat.bytestr(terseargs):
763 for s in pycompat.bytestr(terseargs):
766 if s not in allst:
764 if s not in allst:
767 raise error.Abort(_(b"'%s' not recognized") % s)
765 raise error.Abort(_(b"'%s' not recognized") % s)
768
766
769 # creating a dirnode object for the root of the repo
767 # creating a dirnode object for the root of the repo
770 rootobj = dirnode(b'')
768 rootobj = dirnode(b'')
771 pstatus = (
769 pstatus = (
772 b'modified',
770 b'modified',
773 b'added',
771 b'added',
774 b'deleted',
772 b'deleted',
775 b'clean',
773 b'clean',
776 b'unknown',
774 b'unknown',
777 b'ignored',
775 b'ignored',
778 b'removed',
776 b'removed',
779 )
777 )
780
778
781 tersedict = {}
779 tersedict = {}
782 for attrname in pstatus:
780 for attrname in pstatus:
783 statuschar = attrname[0:1]
781 statuschar = attrname[0:1]
784 for f in getattr(statuslist, attrname):
782 for f in getattr(statuslist, attrname):
785 rootobj.addfile(f, statuschar)
783 rootobj.addfile(f, statuschar)
786 tersedict[statuschar] = []
784 tersedict[statuschar] = []
787
785
788 # we won't be tersing the root dir, so add files in it
786 # we won't be tersing the root dir, so add files in it
789 for st, fpath in rootobj.iterfilepaths():
787 for st, fpath in rootobj.iterfilepaths():
790 tersedict[st].append(fpath)
788 tersedict[st].append(fpath)
791
789
792 # process each sub-directory and build tersedict
790 # process each sub-directory and build tersedict
793 for subdir in rootobj.subdirs.values():
791 for subdir in rootobj.subdirs.values():
794 for st, f in subdir.tersewalk(terseargs):
792 for st, f in subdir.tersewalk(terseargs):
795 tersedict[st].append(f)
793 tersedict[st].append(f)
796
794
797 tersedlist = []
795 tersedlist = []
798 for st in allst:
796 for st in allst:
799 tersedict[st].sort()
797 tersedict[st].sort()
800 tersedlist.append(tersedict[st])
798 tersedlist.append(tersedict[st])
801
799
802 return scmutil.status(*tersedlist)
800 return scmutil.status(*tersedlist)
803
801
804
802
805 def _commentlines(raw):
803 def _commentlines(raw):
806 '''Surround lineswith a comment char and a new line'''
804 '''Surround lineswith a comment char and a new line'''
807 lines = raw.splitlines()
805 lines = raw.splitlines()
808 commentedlines = [b'# %s' % line for line in lines]
806 commentedlines = [b'# %s' % line for line in lines]
809 return b'\n'.join(commentedlines) + b'\n'
807 return b'\n'.join(commentedlines) + b'\n'
810
808
811
809
812 @attr.s(frozen=True)
810 @attr.s(frozen=True)
813 class morestatus(object):
811 class morestatus(object):
814 reporoot = attr.ib()
812 reporoot = attr.ib()
815 unfinishedop = attr.ib()
813 unfinishedop = attr.ib()
816 unfinishedmsg = attr.ib()
814 unfinishedmsg = attr.ib()
817 activemerge = attr.ib()
815 activemerge = attr.ib()
818 unresolvedpaths = attr.ib()
816 unresolvedpaths = attr.ib()
819 _formattedpaths = attr.ib(init=False, default=set())
817 _formattedpaths = attr.ib(init=False, default=set())
820 _label = b'status.morestatus'
818 _label = b'status.morestatus'
821
819
822 def formatfile(self, path, fm):
820 def formatfile(self, path, fm):
823 self._formattedpaths.add(path)
821 self._formattedpaths.add(path)
824 if self.activemerge and path in self.unresolvedpaths:
822 if self.activemerge and path in self.unresolvedpaths:
825 fm.data(unresolved=True)
823 fm.data(unresolved=True)
826
824
827 def formatfooter(self, fm):
825 def formatfooter(self, fm):
828 if self.unfinishedop or self.unfinishedmsg:
826 if self.unfinishedop or self.unfinishedmsg:
829 fm.startitem()
827 fm.startitem()
830 fm.data(itemtype=b'morestatus')
828 fm.data(itemtype=b'morestatus')
831
829
832 if self.unfinishedop:
830 if self.unfinishedop:
833 fm.data(unfinished=self.unfinishedop)
831 fm.data(unfinished=self.unfinishedop)
834 statemsg = (
832 statemsg = (
835 _(b'The repository is in an unfinished *%s* state.')
833 _(b'The repository is in an unfinished *%s* state.')
836 % self.unfinishedop
834 % self.unfinishedop
837 )
835 )
838 fm.plain(b'%s\n' % _commentlines(statemsg), label=self._label)
836 fm.plain(b'%s\n' % _commentlines(statemsg), label=self._label)
839 if self.unfinishedmsg:
837 if self.unfinishedmsg:
840 fm.data(unfinishedmsg=self.unfinishedmsg)
838 fm.data(unfinishedmsg=self.unfinishedmsg)
841
839
842 # May also start new data items.
840 # May also start new data items.
843 self._formatconflicts(fm)
841 self._formatconflicts(fm)
844
842
845 if self.unfinishedmsg:
843 if self.unfinishedmsg:
846 fm.plain(
844 fm.plain(
847 b'%s\n' % _commentlines(self.unfinishedmsg), label=self._label
845 b'%s\n' % _commentlines(self.unfinishedmsg), label=self._label
848 )
846 )
849
847
850 def _formatconflicts(self, fm):
848 def _formatconflicts(self, fm):
851 if not self.activemerge:
849 if not self.activemerge:
852 return
850 return
853
851
854 if self.unresolvedpaths:
852 if self.unresolvedpaths:
855 mergeliststr = b'\n'.join(
853 mergeliststr = b'\n'.join(
856 [
854 [
857 b' %s'
855 b' %s'
858 % util.pathto(self.reporoot, encoding.getcwd(), path)
856 % util.pathto(self.reporoot, encoding.getcwd(), path)
859 for path in self.unresolvedpaths
857 for path in self.unresolvedpaths
860 ]
858 ]
861 )
859 )
862 msg = (
860 msg = (
863 _(
861 _(
864 '''Unresolved merge conflicts:
862 '''Unresolved merge conflicts:
865
863
866 %s
864 %s
867
865
868 To mark files as resolved: hg resolve --mark FILE'''
866 To mark files as resolved: hg resolve --mark FILE'''
869 )
867 )
870 % mergeliststr
868 % mergeliststr
871 )
869 )
872
870
873 # If any paths with unresolved conflicts were not previously
871 # If any paths with unresolved conflicts were not previously
874 # formatted, output them now.
872 # formatted, output them now.
875 for f in self.unresolvedpaths:
873 for f in self.unresolvedpaths:
876 if f in self._formattedpaths:
874 if f in self._formattedpaths:
877 # Already output.
875 # Already output.
878 continue
876 continue
879 fm.startitem()
877 fm.startitem()
880 # We can't claim to know the status of the file - it may just
878 # We can't claim to know the status of the file - it may just
881 # have been in one of the states that were not requested for
879 # have been in one of the states that were not requested for
882 # display, so it could be anything.
880 # display, so it could be anything.
883 fm.data(itemtype=b'file', path=f, unresolved=True)
881 fm.data(itemtype=b'file', path=f, unresolved=True)
884
882
885 else:
883 else:
886 msg = _(b'No unresolved merge conflicts.')
884 msg = _(b'No unresolved merge conflicts.')
887
885
888 fm.plain(b'%s\n' % _commentlines(msg), label=self._label)
886 fm.plain(b'%s\n' % _commentlines(msg), label=self._label)
889
887
890
888
891 def readmorestatus(repo):
889 def readmorestatus(repo):
892 """Returns a morestatus object if the repo has unfinished state."""
890 """Returns a morestatus object if the repo has unfinished state."""
893 statetuple = statemod.getrepostate(repo)
891 statetuple = statemod.getrepostate(repo)
894 mergestate = mergestatemod.mergestate.read(repo)
892 mergestate = mergestatemod.mergestate.read(repo)
895 activemerge = mergestate.active()
893 activemerge = mergestate.active()
896 if not statetuple and not activemerge:
894 if not statetuple and not activemerge:
897 return None
895 return None
898
896
899 unfinishedop = unfinishedmsg = unresolved = None
897 unfinishedop = unfinishedmsg = unresolved = None
900 if statetuple:
898 if statetuple:
901 unfinishedop, unfinishedmsg = statetuple
899 unfinishedop, unfinishedmsg = statetuple
902 if activemerge:
900 if activemerge:
903 unresolved = sorted(mergestate.unresolved())
901 unresolved = sorted(mergestate.unresolved())
904 return morestatus(
902 return morestatus(
905 repo.root, unfinishedop, unfinishedmsg, activemerge, unresolved
903 repo.root, unfinishedop, unfinishedmsg, activemerge, unresolved
906 )
904 )
907
905
908
906
909 def findpossible(cmd, table, strict=False):
907 def findpossible(cmd, table, strict=False):
910 """
908 """
911 Return cmd -> (aliases, command table entry)
909 Return cmd -> (aliases, command table entry)
912 for each matching command.
910 for each matching command.
913 Return debug commands (or their aliases) only if no normal command matches.
911 Return debug commands (or their aliases) only if no normal command matches.
914 """
912 """
915 choice = {}
913 choice = {}
916 debugchoice = {}
914 debugchoice = {}
917
915
918 if cmd in table:
916 if cmd in table:
919 # short-circuit exact matches, "log" alias beats "log|history"
917 # short-circuit exact matches, "log" alias beats "log|history"
920 keys = [cmd]
918 keys = [cmd]
921 else:
919 else:
922 keys = table.keys()
920 keys = table.keys()
923
921
924 allcmds = []
922 allcmds = []
925 for e in keys:
923 for e in keys:
926 aliases = parsealiases(e)
924 aliases = parsealiases(e)
927 allcmds.extend(aliases)
925 allcmds.extend(aliases)
928 found = None
926 found = None
929 if cmd in aliases:
927 if cmd in aliases:
930 found = cmd
928 found = cmd
931 elif not strict:
929 elif not strict:
932 for a in aliases:
930 for a in aliases:
933 if a.startswith(cmd):
931 if a.startswith(cmd):
934 found = a
932 found = a
935 break
933 break
936 if found is not None:
934 if found is not None:
937 if aliases[0].startswith(b"debug") or found.startswith(b"debug"):
935 if aliases[0].startswith(b"debug") or found.startswith(b"debug"):
938 debugchoice[found] = (aliases, table[e])
936 debugchoice[found] = (aliases, table[e])
939 else:
937 else:
940 choice[found] = (aliases, table[e])
938 choice[found] = (aliases, table[e])
941
939
942 if not choice and debugchoice:
940 if not choice and debugchoice:
943 choice = debugchoice
941 choice = debugchoice
944
942
945 return choice, allcmds
943 return choice, allcmds
946
944
947
945
948 def findcmd(cmd, table, strict=True):
946 def findcmd(cmd, table, strict=True):
949 """Return (aliases, command table entry) for command string."""
947 """Return (aliases, command table entry) for command string."""
950 choice, allcmds = findpossible(cmd, table, strict)
948 choice, allcmds = findpossible(cmd, table, strict)
951
949
952 if cmd in choice:
950 if cmd in choice:
953 return choice[cmd]
951 return choice[cmd]
954
952
955 if len(choice) > 1:
953 if len(choice) > 1:
956 clist = sorted(choice)
954 clist = sorted(choice)
957 raise error.AmbiguousCommand(cmd, clist)
955 raise error.AmbiguousCommand(cmd, clist)
958
956
959 if choice:
957 if choice:
960 return list(choice.values())[0]
958 return list(choice.values())[0]
961
959
962 raise error.UnknownCommand(cmd, allcmds)
960 raise error.UnknownCommand(cmd, allcmds)
963
961
964
962
965 def changebranch(ui, repo, revs, label, opts):
963 def changebranch(ui, repo, revs, label, opts):
966 """ Change the branch name of given revs to label """
964 """ Change the branch name of given revs to label """
967
965
968 with repo.wlock(), repo.lock(), repo.transaction(b'branches'):
966 with repo.wlock(), repo.lock(), repo.transaction(b'branches'):
969 # abort in case of uncommitted merge or dirty wdir
967 # abort in case of uncommitted merge or dirty wdir
970 bailifchanged(repo)
968 bailifchanged(repo)
971 revs = scmutil.revrange(repo, revs)
969 revs = scmutil.revrange(repo, revs)
972 if not revs:
970 if not revs:
973 raise error.Abort(b"empty revision set")
971 raise error.Abort(b"empty revision set")
974 roots = repo.revs(b'roots(%ld)', revs)
972 roots = repo.revs(b'roots(%ld)', revs)
975 if len(roots) > 1:
973 if len(roots) > 1:
976 raise error.Abort(
974 raise error.Abort(
977 _(b"cannot change branch of non-linear revisions")
975 _(b"cannot change branch of non-linear revisions")
978 )
976 )
979 rewriteutil.precheck(repo, revs, b'change branch of')
977 rewriteutil.precheck(repo, revs, b'change branch of')
980
978
981 root = repo[roots.first()]
979 root = repo[roots.first()]
982 rpb = {parent.branch() for parent in root.parents()}
980 rpb = {parent.branch() for parent in root.parents()}
983 if (
981 if (
984 not opts.get(b'force')
982 not opts.get(b'force')
985 and label not in rpb
983 and label not in rpb
986 and label in repo.branchmap()
984 and label in repo.branchmap()
987 ):
985 ):
988 raise error.Abort(_(b"a branch of the same name already exists"))
986 raise error.Abort(_(b"a branch of the same name already exists"))
989
987
990 if repo.revs(b'obsolete() and %ld', revs):
988 if repo.revs(b'obsolete() and %ld', revs):
991 raise error.Abort(
989 raise error.Abort(
992 _(b"cannot change branch of a obsolete changeset")
990 _(b"cannot change branch of a obsolete changeset")
993 )
991 )
994
992
995 # make sure only topological heads
993 # make sure only topological heads
996 if repo.revs(b'heads(%ld) - head()', revs):
994 if repo.revs(b'heads(%ld) - head()', revs):
997 raise error.Abort(_(b"cannot change branch in middle of a stack"))
995 raise error.Abort(_(b"cannot change branch in middle of a stack"))
998
996
999 replacements = {}
997 replacements = {}
1000 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
998 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
1001 # mercurial.subrepo -> mercurial.cmdutil
999 # mercurial.subrepo -> mercurial.cmdutil
1002 from . import context
1000 from . import context
1003
1001
1004 for rev in revs:
1002 for rev in revs:
1005 ctx = repo[rev]
1003 ctx = repo[rev]
1006 oldbranch = ctx.branch()
1004 oldbranch = ctx.branch()
1007 # check if ctx has same branch
1005 # check if ctx has same branch
1008 if oldbranch == label:
1006 if oldbranch == label:
1009 continue
1007 continue
1010
1008
1011 def filectxfn(repo, newctx, path):
1009 def filectxfn(repo, newctx, path):
1012 try:
1010 try:
1013 return ctx[path]
1011 return ctx[path]
1014 except error.ManifestLookupError:
1012 except error.ManifestLookupError:
1015 return None
1013 return None
1016
1014
1017 ui.debug(
1015 ui.debug(
1018 b"changing branch of '%s' from '%s' to '%s'\n"
1016 b"changing branch of '%s' from '%s' to '%s'\n"
1019 % (hex(ctx.node()), oldbranch, label)
1017 % (hex(ctx.node()), oldbranch, label)
1020 )
1018 )
1021 extra = ctx.extra()
1019 extra = ctx.extra()
1022 extra[b'branch_change'] = hex(ctx.node())
1020 extra[b'branch_change'] = hex(ctx.node())
1023 # While changing branch of set of linear commits, make sure that
1021 # While changing branch of set of linear commits, make sure that
1024 # we base our commits on new parent rather than old parent which
1022 # we base our commits on new parent rather than old parent which
1025 # was obsoleted while changing the branch
1023 # was obsoleted while changing the branch
1026 p1 = ctx.p1().node()
1024 p1 = ctx.p1().node()
1027 p2 = ctx.p2().node()
1025 p2 = ctx.p2().node()
1028 if p1 in replacements:
1026 if p1 in replacements:
1029 p1 = replacements[p1][0]
1027 p1 = replacements[p1][0]
1030 if p2 in replacements:
1028 if p2 in replacements:
1031 p2 = replacements[p2][0]
1029 p2 = replacements[p2][0]
1032
1030
1033 mc = context.memctx(
1031 mc = context.memctx(
1034 repo,
1032 repo,
1035 (p1, p2),
1033 (p1, p2),
1036 ctx.description(),
1034 ctx.description(),
1037 ctx.files(),
1035 ctx.files(),
1038 filectxfn,
1036 filectxfn,
1039 user=ctx.user(),
1037 user=ctx.user(),
1040 date=ctx.date(),
1038 date=ctx.date(),
1041 extra=extra,
1039 extra=extra,
1042 branch=label,
1040 branch=label,
1043 )
1041 )
1044
1042
1045 newnode = repo.commitctx(mc)
1043 newnode = repo.commitctx(mc)
1046 replacements[ctx.node()] = (newnode,)
1044 replacements[ctx.node()] = (newnode,)
1047 ui.debug(b'new node id is %s\n' % hex(newnode))
1045 ui.debug(b'new node id is %s\n' % hex(newnode))
1048
1046
1049 # create obsmarkers and move bookmarks
1047 # create obsmarkers and move bookmarks
1050 scmutil.cleanupnodes(
1048 scmutil.cleanupnodes(
1051 repo, replacements, b'branch-change', fixphase=True
1049 repo, replacements, b'branch-change', fixphase=True
1052 )
1050 )
1053
1051
1054 # move the working copy too
1052 # move the working copy too
1055 wctx = repo[None]
1053 wctx = repo[None]
1056 # in-progress merge is a bit too complex for now.
1054 # in-progress merge is a bit too complex for now.
1057 if len(wctx.parents()) == 1:
1055 if len(wctx.parents()) == 1:
1058 newid = replacements.get(wctx.p1().node())
1056 newid = replacements.get(wctx.p1().node())
1059 if newid is not None:
1057 if newid is not None:
1060 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
1058 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
1061 # mercurial.cmdutil
1059 # mercurial.cmdutil
1062 from . import hg
1060 from . import hg
1063
1061
1064 hg.update(repo, newid[0], quietempty=True)
1062 hg.update(repo, newid[0], quietempty=True)
1065
1063
1066 ui.status(_(b"changed branch on %d changesets\n") % len(replacements))
1064 ui.status(_(b"changed branch on %d changesets\n") % len(replacements))
1067
1065
1068
1066
1069 def findrepo(p):
1067 def findrepo(p):
1070 while not os.path.isdir(os.path.join(p, b".hg")):
1068 while not os.path.isdir(os.path.join(p, b".hg")):
1071 oldp, p = p, os.path.dirname(p)
1069 oldp, p = p, os.path.dirname(p)
1072 if p == oldp:
1070 if p == oldp:
1073 return None
1071 return None
1074
1072
1075 return p
1073 return p
1076
1074
1077
1075
1078 def bailifchanged(repo, merge=True, hint=None):
1076 def bailifchanged(repo, merge=True, hint=None):
1079 """ enforce the precondition that working directory must be clean.
1077 """ enforce the precondition that working directory must be clean.
1080
1078
1081 'merge' can be set to false if a pending uncommitted merge should be
1079 'merge' can be set to false if a pending uncommitted merge should be
1082 ignored (such as when 'update --check' runs).
1080 ignored (such as when 'update --check' runs).
1083
1081
1084 'hint' is the usual hint given to Abort exception.
1082 'hint' is the usual hint given to Abort exception.
1085 """
1083 """
1086
1084
1087 if merge and repo.dirstate.p2() != nullid:
1085 if merge and repo.dirstate.p2() != nullid:
1088 raise error.Abort(_(b'outstanding uncommitted merge'), hint=hint)
1086 raise error.Abort(_(b'outstanding uncommitted merge'), hint=hint)
1089 st = repo.status()
1087 st = repo.status()
1090 if st.modified or st.added or st.removed or st.deleted:
1088 if st.modified or st.added or st.removed or st.deleted:
1091 raise error.Abort(_(b'uncommitted changes'), hint=hint)
1089 raise error.Abort(_(b'uncommitted changes'), hint=hint)
1092 ctx = repo[None]
1090 ctx = repo[None]
1093 for s in sorted(ctx.substate):
1091 for s in sorted(ctx.substate):
1094 ctx.sub(s).bailifchanged(hint=hint)
1092 ctx.sub(s).bailifchanged(hint=hint)
1095
1093
1096
1094
1097 def logmessage(ui, opts):
1095 def logmessage(ui, opts):
1098 """ get the log message according to -m and -l option """
1096 """ get the log message according to -m and -l option """
1099
1097
1100 check_at_most_one_arg(opts, b'message', b'logfile')
1098 check_at_most_one_arg(opts, b'message', b'logfile')
1101
1099
1102 message = opts.get(b'message')
1100 message = opts.get(b'message')
1103 logfile = opts.get(b'logfile')
1101 logfile = opts.get(b'logfile')
1104
1102
1105 if not message and logfile:
1103 if not message and logfile:
1106 try:
1104 try:
1107 if isstdiofilename(logfile):
1105 if isstdiofilename(logfile):
1108 message = ui.fin.read()
1106 message = ui.fin.read()
1109 else:
1107 else:
1110 message = b'\n'.join(util.readfile(logfile).splitlines())
1108 message = b'\n'.join(util.readfile(logfile).splitlines())
1111 except IOError as inst:
1109 except IOError as inst:
1112 raise error.Abort(
1110 raise error.Abort(
1113 _(b"can't read commit message '%s': %s")
1111 _(b"can't read commit message '%s': %s")
1114 % (logfile, encoding.strtolocal(inst.strerror))
1112 % (logfile, encoding.strtolocal(inst.strerror))
1115 )
1113 )
1116 return message
1114 return message
1117
1115
1118
1116
1119 def mergeeditform(ctxorbool, baseformname):
1117 def mergeeditform(ctxorbool, baseformname):
1120 """return appropriate editform name (referencing a committemplate)
1118 """return appropriate editform name (referencing a committemplate)
1121
1119
1122 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
1120 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
1123 merging is committed.
1121 merging is committed.
1124
1122
1125 This returns baseformname with '.merge' appended if it is a merge,
1123 This returns baseformname with '.merge' appended if it is a merge,
1126 otherwise '.normal' is appended.
1124 otherwise '.normal' is appended.
1127 """
1125 """
1128 if isinstance(ctxorbool, bool):
1126 if isinstance(ctxorbool, bool):
1129 if ctxorbool:
1127 if ctxorbool:
1130 return baseformname + b".merge"
1128 return baseformname + b".merge"
1131 elif len(ctxorbool.parents()) > 1:
1129 elif len(ctxorbool.parents()) > 1:
1132 return baseformname + b".merge"
1130 return baseformname + b".merge"
1133
1131
1134 return baseformname + b".normal"
1132 return baseformname + b".normal"
1135
1133
1136
1134
1137 def getcommiteditor(
1135 def getcommiteditor(
1138 edit=False, finishdesc=None, extramsg=None, editform=b'', **opts
1136 edit=False, finishdesc=None, extramsg=None, editform=b'', **opts
1139 ):
1137 ):
1140 """get appropriate commit message editor according to '--edit' option
1138 """get appropriate commit message editor according to '--edit' option
1141
1139
1142 'finishdesc' is a function to be called with edited commit message
1140 'finishdesc' is a function to be called with edited commit message
1143 (= 'description' of the new changeset) just after editing, but
1141 (= 'description' of the new changeset) just after editing, but
1144 before checking empty-ness. It should return actual text to be
1142 before checking empty-ness. It should return actual text to be
1145 stored into history. This allows to change description before
1143 stored into history. This allows to change description before
1146 storing.
1144 storing.
1147
1145
1148 'extramsg' is a extra message to be shown in the editor instead of
1146 'extramsg' is a extra message to be shown in the editor instead of
1149 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
1147 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
1150 is automatically added.
1148 is automatically added.
1151
1149
1152 'editform' is a dot-separated list of names, to distinguish
1150 'editform' is a dot-separated list of names, to distinguish
1153 the purpose of commit text editing.
1151 the purpose of commit text editing.
1154
1152
1155 'getcommiteditor' returns 'commitforceeditor' regardless of
1153 'getcommiteditor' returns 'commitforceeditor' regardless of
1156 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
1154 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
1157 they are specific for usage in MQ.
1155 they are specific for usage in MQ.
1158 """
1156 """
1159 if edit or finishdesc or extramsg:
1157 if edit or finishdesc or extramsg:
1160 return lambda r, c, s: commitforceeditor(
1158 return lambda r, c, s: commitforceeditor(
1161 r, c, s, finishdesc=finishdesc, extramsg=extramsg, editform=editform
1159 r, c, s, finishdesc=finishdesc, extramsg=extramsg, editform=editform
1162 )
1160 )
1163 elif editform:
1161 elif editform:
1164 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
1162 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
1165 else:
1163 else:
1166 return commiteditor
1164 return commiteditor
1167
1165
1168
1166
1169 def _escapecommandtemplate(tmpl):
1167 def _escapecommandtemplate(tmpl):
1170 parts = []
1168 parts = []
1171 for typ, start, end in templater.scantemplate(tmpl, raw=True):
1169 for typ, start, end in templater.scantemplate(tmpl, raw=True):
1172 if typ == b'string':
1170 if typ == b'string':
1173 parts.append(stringutil.escapestr(tmpl[start:end]))
1171 parts.append(stringutil.escapestr(tmpl[start:end]))
1174 else:
1172 else:
1175 parts.append(tmpl[start:end])
1173 parts.append(tmpl[start:end])
1176 return b''.join(parts)
1174 return b''.join(parts)
1177
1175
1178
1176
1179 def rendercommandtemplate(ui, tmpl, props):
1177 def rendercommandtemplate(ui, tmpl, props):
1180 r"""Expand a literal template 'tmpl' in a way suitable for command line
1178 r"""Expand a literal template 'tmpl' in a way suitable for command line
1181
1179
1182 '\' in outermost string is not taken as an escape character because it
1180 '\' in outermost string is not taken as an escape character because it
1183 is a directory separator on Windows.
1181 is a directory separator on Windows.
1184
1182
1185 >>> from . import ui as uimod
1183 >>> from . import ui as uimod
1186 >>> ui = uimod.ui()
1184 >>> ui = uimod.ui()
1187 >>> rendercommandtemplate(ui, b'c:\\{path}', {b'path': b'foo'})
1185 >>> rendercommandtemplate(ui, b'c:\\{path}', {b'path': b'foo'})
1188 'c:\\foo'
1186 'c:\\foo'
1189 >>> rendercommandtemplate(ui, b'{"c:\\{path}"}', {'path': b'foo'})
1187 >>> rendercommandtemplate(ui, b'{"c:\\{path}"}', {'path': b'foo'})
1190 'c:{path}'
1188 'c:{path}'
1191 """
1189 """
1192 if not tmpl:
1190 if not tmpl:
1193 return tmpl
1191 return tmpl
1194 t = formatter.maketemplater(ui, _escapecommandtemplate(tmpl))
1192 t = formatter.maketemplater(ui, _escapecommandtemplate(tmpl))
1195 return t.renderdefault(props)
1193 return t.renderdefault(props)
1196
1194
1197
1195
1198 def rendertemplate(ctx, tmpl, props=None):
1196 def rendertemplate(ctx, tmpl, props=None):
1199 """Expand a literal template 'tmpl' byte-string against one changeset
1197 """Expand a literal template 'tmpl' byte-string against one changeset
1200
1198
1201 Each props item must be a stringify-able value or a callable returning
1199 Each props item must be a stringify-able value or a callable returning
1202 such value, i.e. no bare list nor dict should be passed.
1200 such value, i.e. no bare list nor dict should be passed.
1203 """
1201 """
1204 repo = ctx.repo()
1202 repo = ctx.repo()
1205 tres = formatter.templateresources(repo.ui, repo)
1203 tres = formatter.templateresources(repo.ui, repo)
1206 t = formatter.maketemplater(
1204 t = formatter.maketemplater(
1207 repo.ui, tmpl, defaults=templatekw.keywords, resources=tres
1205 repo.ui, tmpl, defaults=templatekw.keywords, resources=tres
1208 )
1206 )
1209 mapping = {b'ctx': ctx}
1207 mapping = {b'ctx': ctx}
1210 if props:
1208 if props:
1211 mapping.update(props)
1209 mapping.update(props)
1212 return t.renderdefault(mapping)
1210 return t.renderdefault(mapping)
1213
1211
1214
1212
1215 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
1213 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
1216 r"""Convert old-style filename format string to template string
1214 r"""Convert old-style filename format string to template string
1217
1215
1218 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
1216 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
1219 'foo-{reporoot|basename}-{seqno}.patch'
1217 'foo-{reporoot|basename}-{seqno}.patch'
1220 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
1218 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
1221 '{rev}{tags % "{tag}"}{node}'
1219 '{rev}{tags % "{tag}"}{node}'
1222
1220
1223 '\' in outermost strings has to be escaped because it is a directory
1221 '\' in outermost strings has to be escaped because it is a directory
1224 separator on Windows:
1222 separator on Windows:
1225
1223
1226 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
1224 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
1227 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
1225 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
1228 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
1226 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
1229 '\\\\\\\\foo\\\\bar.patch'
1227 '\\\\\\\\foo\\\\bar.patch'
1230 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
1228 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
1231 '\\\\{tags % "{tag}"}'
1229 '\\\\{tags % "{tag}"}'
1232
1230
1233 but inner strings follow the template rules (i.e. '\' is taken as an
1231 but inner strings follow the template rules (i.e. '\' is taken as an
1234 escape character):
1232 escape character):
1235
1233
1236 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
1234 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
1237 '{"c:\\tmp"}'
1235 '{"c:\\tmp"}'
1238 """
1236 """
1239 expander = {
1237 expander = {
1240 b'H': b'{node}',
1238 b'H': b'{node}',
1241 b'R': b'{rev}',
1239 b'R': b'{rev}',
1242 b'h': b'{node|short}',
1240 b'h': b'{node|short}',
1243 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
1241 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
1244 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
1242 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
1245 b'%': b'%',
1243 b'%': b'%',
1246 b'b': b'{reporoot|basename}',
1244 b'b': b'{reporoot|basename}',
1247 }
1245 }
1248 if total is not None:
1246 if total is not None:
1249 expander[b'N'] = b'{total}'
1247 expander[b'N'] = b'{total}'
1250 if seqno is not None:
1248 if seqno is not None:
1251 expander[b'n'] = b'{seqno}'
1249 expander[b'n'] = b'{seqno}'
1252 if total is not None and seqno is not None:
1250 if total is not None and seqno is not None:
1253 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
1251 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
1254 if pathname is not None:
1252 if pathname is not None:
1255 expander[b's'] = b'{pathname|basename}'
1253 expander[b's'] = b'{pathname|basename}'
1256 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
1254 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
1257 expander[b'p'] = b'{pathname}'
1255 expander[b'p'] = b'{pathname}'
1258
1256
1259 newname = []
1257 newname = []
1260 for typ, start, end in templater.scantemplate(pat, raw=True):
1258 for typ, start, end in templater.scantemplate(pat, raw=True):
1261 if typ != b'string':
1259 if typ != b'string':
1262 newname.append(pat[start:end])
1260 newname.append(pat[start:end])
1263 continue
1261 continue
1264 i = start
1262 i = start
1265 while i < end:
1263 while i < end:
1266 n = pat.find(b'%', i, end)
1264 n = pat.find(b'%', i, end)
1267 if n < 0:
1265 if n < 0:
1268 newname.append(stringutil.escapestr(pat[i:end]))
1266 newname.append(stringutil.escapestr(pat[i:end]))
1269 break
1267 break
1270 newname.append(stringutil.escapestr(pat[i:n]))
1268 newname.append(stringutil.escapestr(pat[i:n]))
1271 if n + 2 > end:
1269 if n + 2 > end:
1272 raise error.Abort(
1270 raise error.Abort(
1273 _(b"incomplete format spec in output filename")
1271 _(b"incomplete format spec in output filename")
1274 )
1272 )
1275 c = pat[n + 1 : n + 2]
1273 c = pat[n + 1 : n + 2]
1276 i = n + 2
1274 i = n + 2
1277 try:
1275 try:
1278 newname.append(expander[c])
1276 newname.append(expander[c])
1279 except KeyError:
1277 except KeyError:
1280 raise error.Abort(
1278 raise error.Abort(
1281 _(b"invalid format spec '%%%s' in output filename") % c
1279 _(b"invalid format spec '%%%s' in output filename") % c
1282 )
1280 )
1283 return b''.join(newname)
1281 return b''.join(newname)
1284
1282
1285
1283
1286 def makefilename(ctx, pat, **props):
1284 def makefilename(ctx, pat, **props):
1287 if not pat:
1285 if not pat:
1288 return pat
1286 return pat
1289 tmpl = _buildfntemplate(pat, **props)
1287 tmpl = _buildfntemplate(pat, **props)
1290 # BUG: alias expansion shouldn't be made against template fragments
1288 # BUG: alias expansion shouldn't be made against template fragments
1291 # rewritten from %-format strings, but we have no easy way to partially
1289 # rewritten from %-format strings, but we have no easy way to partially
1292 # disable the expansion.
1290 # disable the expansion.
1293 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
1291 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
1294
1292
1295
1293
1296 def isstdiofilename(pat):
1294 def isstdiofilename(pat):
1297 """True if the given pat looks like a filename denoting stdin/stdout"""
1295 """True if the given pat looks like a filename denoting stdin/stdout"""
1298 return not pat or pat == b'-'
1296 return not pat or pat == b'-'
1299
1297
1300
1298
1301 class _unclosablefile(object):
1299 class _unclosablefile(object):
1302 def __init__(self, fp):
1300 def __init__(self, fp):
1303 self._fp = fp
1301 self._fp = fp
1304
1302
1305 def close(self):
1303 def close(self):
1306 pass
1304 pass
1307
1305
1308 def __iter__(self):
1306 def __iter__(self):
1309 return iter(self._fp)
1307 return iter(self._fp)
1310
1308
1311 def __getattr__(self, attr):
1309 def __getattr__(self, attr):
1312 return getattr(self._fp, attr)
1310 return getattr(self._fp, attr)
1313
1311
1314 def __enter__(self):
1312 def __enter__(self):
1315 return self
1313 return self
1316
1314
1317 def __exit__(self, exc_type, exc_value, exc_tb):
1315 def __exit__(self, exc_type, exc_value, exc_tb):
1318 pass
1316 pass
1319
1317
1320
1318
1321 def makefileobj(ctx, pat, mode=b'wb', **props):
1319 def makefileobj(ctx, pat, mode=b'wb', **props):
1322 writable = mode not in (b'r', b'rb')
1320 writable = mode not in (b'r', b'rb')
1323
1321
1324 if isstdiofilename(pat):
1322 if isstdiofilename(pat):
1325 repo = ctx.repo()
1323 repo = ctx.repo()
1326 if writable:
1324 if writable:
1327 fp = repo.ui.fout
1325 fp = repo.ui.fout
1328 else:
1326 else:
1329 fp = repo.ui.fin
1327 fp = repo.ui.fin
1330 return _unclosablefile(fp)
1328 return _unclosablefile(fp)
1331 fn = makefilename(ctx, pat, **props)
1329 fn = makefilename(ctx, pat, **props)
1332 return open(fn, mode)
1330 return open(fn, mode)
1333
1331
1334
1332
1335 def openstorage(repo, cmd, file_, opts, returnrevlog=False):
1333 def openstorage(repo, cmd, file_, opts, returnrevlog=False):
1336 """opens the changelog, manifest, a filelog or a given revlog"""
1334 """opens the changelog, manifest, a filelog or a given revlog"""
1337 cl = opts[b'changelog']
1335 cl = opts[b'changelog']
1338 mf = opts[b'manifest']
1336 mf = opts[b'manifest']
1339 dir = opts[b'dir']
1337 dir = opts[b'dir']
1340 msg = None
1338 msg = None
1341 if cl and mf:
1339 if cl and mf:
1342 msg = _(b'cannot specify --changelog and --manifest at the same time')
1340 msg = _(b'cannot specify --changelog and --manifest at the same time')
1343 elif cl and dir:
1341 elif cl and dir:
1344 msg = _(b'cannot specify --changelog and --dir at the same time')
1342 msg = _(b'cannot specify --changelog and --dir at the same time')
1345 elif cl or mf or dir:
1343 elif cl or mf or dir:
1346 if file_:
1344 if file_:
1347 msg = _(b'cannot specify filename with --changelog or --manifest')
1345 msg = _(b'cannot specify filename with --changelog or --manifest')
1348 elif not repo:
1346 elif not repo:
1349 msg = _(
1347 msg = _(
1350 b'cannot specify --changelog or --manifest or --dir '
1348 b'cannot specify --changelog or --manifest or --dir '
1351 b'without a repository'
1349 b'without a repository'
1352 )
1350 )
1353 if msg:
1351 if msg:
1354 raise error.Abort(msg)
1352 raise error.Abort(msg)
1355
1353
1356 r = None
1354 r = None
1357 if repo:
1355 if repo:
1358 if cl:
1356 if cl:
1359 r = repo.unfiltered().changelog
1357 r = repo.unfiltered().changelog
1360 elif dir:
1358 elif dir:
1361 if not scmutil.istreemanifest(repo):
1359 if not scmutil.istreemanifest(repo):
1362 raise error.Abort(
1360 raise error.Abort(
1363 _(
1361 _(
1364 b"--dir can only be used on repos with "
1362 b"--dir can only be used on repos with "
1365 b"treemanifest enabled"
1363 b"treemanifest enabled"
1366 )
1364 )
1367 )
1365 )
1368 if not dir.endswith(b'/'):
1366 if not dir.endswith(b'/'):
1369 dir = dir + b'/'
1367 dir = dir + b'/'
1370 dirlog = repo.manifestlog.getstorage(dir)
1368 dirlog = repo.manifestlog.getstorage(dir)
1371 if len(dirlog):
1369 if len(dirlog):
1372 r = dirlog
1370 r = dirlog
1373 elif mf:
1371 elif mf:
1374 r = repo.manifestlog.getstorage(b'')
1372 r = repo.manifestlog.getstorage(b'')
1375 elif file_:
1373 elif file_:
1376 filelog = repo.file(file_)
1374 filelog = repo.file(file_)
1377 if len(filelog):
1375 if len(filelog):
1378 r = filelog
1376 r = filelog
1379
1377
1380 # Not all storage may be revlogs. If requested, try to return an actual
1378 # Not all storage may be revlogs. If requested, try to return an actual
1381 # revlog instance.
1379 # revlog instance.
1382 if returnrevlog:
1380 if returnrevlog:
1383 if isinstance(r, revlog.revlog):
1381 if isinstance(r, revlog.revlog):
1384 pass
1382 pass
1385 elif util.safehasattr(r, b'_revlog'):
1383 elif util.safehasattr(r, b'_revlog'):
1386 r = r._revlog # pytype: disable=attribute-error
1384 r = r._revlog # pytype: disable=attribute-error
1387 elif r is not None:
1385 elif r is not None:
1388 raise error.Abort(_(b'%r does not appear to be a revlog') % r)
1386 raise error.Abort(_(b'%r does not appear to be a revlog') % r)
1389
1387
1390 if not r:
1388 if not r:
1391 if not returnrevlog:
1389 if not returnrevlog:
1392 raise error.Abort(_(b'cannot give path to non-revlog'))
1390 raise error.Abort(_(b'cannot give path to non-revlog'))
1393
1391
1394 if not file_:
1392 if not file_:
1395 raise error.CommandError(cmd, _(b'invalid arguments'))
1393 raise error.CommandError(cmd, _(b'invalid arguments'))
1396 if not os.path.isfile(file_):
1394 if not os.path.isfile(file_):
1397 raise error.Abort(_(b"revlog '%s' not found") % file_)
1395 raise error.Abort(_(b"revlog '%s' not found") % file_)
1398 r = revlog.revlog(
1396 r = revlog.revlog(
1399 vfsmod.vfs(encoding.getcwd(), audit=False), file_[:-2] + b".i"
1397 vfsmod.vfs(encoding.getcwd(), audit=False), file_[:-2] + b".i"
1400 )
1398 )
1401 return r
1399 return r
1402
1400
1403
1401
1404 def openrevlog(repo, cmd, file_, opts):
1402 def openrevlog(repo, cmd, file_, opts):
1405 """Obtain a revlog backing storage of an item.
1403 """Obtain a revlog backing storage of an item.
1406
1404
1407 This is similar to ``openstorage()`` except it always returns a revlog.
1405 This is similar to ``openstorage()`` except it always returns a revlog.
1408
1406
1409 In most cases, a caller cares about the main storage object - not the
1407 In most cases, a caller cares about the main storage object - not the
1410 revlog backing it. Therefore, this function should only be used by code
1408 revlog backing it. Therefore, this function should only be used by code
1411 that needs to examine low-level revlog implementation details. e.g. debug
1409 that needs to examine low-level revlog implementation details. e.g. debug
1412 commands.
1410 commands.
1413 """
1411 """
1414 return openstorage(repo, cmd, file_, opts, returnrevlog=True)
1412 return openstorage(repo, cmd, file_, opts, returnrevlog=True)
1415
1413
1416
1414
1417 def copy(ui, repo, pats, opts, rename=False):
1415 def copy(ui, repo, pats, opts, rename=False):
1418 check_incompatible_arguments(opts, b'forget', [b'dry_run'])
1416 check_incompatible_arguments(opts, b'forget', [b'dry_run'])
1419
1417
1420 # called with the repo lock held
1418 # called with the repo lock held
1421 #
1419 #
1422 # hgsep => pathname that uses "/" to separate directories
1420 # hgsep => pathname that uses "/" to separate directories
1423 # ossep => pathname that uses os.sep to separate directories
1421 # ossep => pathname that uses os.sep to separate directories
1424 cwd = repo.getcwd()
1422 cwd = repo.getcwd()
1425 targets = {}
1423 targets = {}
1426 forget = opts.get(b"forget")
1424 forget = opts.get(b"forget")
1427 after = opts.get(b"after")
1425 after = opts.get(b"after")
1428 dryrun = opts.get(b"dry_run")
1426 dryrun = opts.get(b"dry_run")
1429 rev = opts.get(b'at_rev')
1427 rev = opts.get(b'at_rev')
1430 if rev:
1428 if rev:
1431 if not forget and not after:
1429 if not forget and not after:
1432 # TODO: Remove this restriction and make it also create the copy
1430 # TODO: Remove this restriction and make it also create the copy
1433 # targets (and remove the rename source if rename==True).
1431 # targets (and remove the rename source if rename==True).
1434 raise error.Abort(_(b'--at-rev requires --after'))
1432 raise error.Abort(_(b'--at-rev requires --after'))
1435 ctx = scmutil.revsingle(repo, rev)
1433 ctx = scmutil.revsingle(repo, rev)
1436 if len(ctx.parents()) > 1:
1434 if len(ctx.parents()) > 1:
1437 raise error.Abort(_(b'cannot mark/unmark copy in merge commit'))
1435 raise error.Abort(_(b'cannot mark/unmark copy in merge commit'))
1438 else:
1436 else:
1439 ctx = repo[None]
1437 ctx = repo[None]
1440
1438
1441 pctx = ctx.p1()
1439 pctx = ctx.p1()
1442
1440
1443 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1441 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1444
1442
1445 if forget:
1443 if forget:
1446 if ctx.rev() is None:
1444 if ctx.rev() is None:
1447 new_ctx = ctx
1445 new_ctx = ctx
1448 else:
1446 else:
1449 if len(ctx.parents()) > 1:
1447 if len(ctx.parents()) > 1:
1450 raise error.Abort(_(b'cannot unmark copy in merge commit'))
1448 raise error.Abort(_(b'cannot unmark copy in merge commit'))
1451 # avoid cycle context -> subrepo -> cmdutil
1449 # avoid cycle context -> subrepo -> cmdutil
1452 from . import context
1450 from . import context
1453
1451
1454 rewriteutil.precheck(repo, [ctx.rev()], b'uncopy')
1452 rewriteutil.precheck(repo, [ctx.rev()], b'uncopy')
1455 new_ctx = context.overlayworkingctx(repo)
1453 new_ctx = context.overlayworkingctx(repo)
1456 new_ctx.setbase(ctx.p1())
1454 new_ctx.setbase(ctx.p1())
1457 mergemod.graft(repo, ctx, wctx=new_ctx)
1455 mergemod.graft(repo, ctx, wctx=new_ctx)
1458
1456
1459 match = scmutil.match(ctx, pats, opts)
1457 match = scmutil.match(ctx, pats, opts)
1460
1458
1461 current_copies = ctx.p1copies()
1459 current_copies = ctx.p1copies()
1462 current_copies.update(ctx.p2copies())
1460 current_copies.update(ctx.p2copies())
1463
1461
1464 uipathfn = scmutil.getuipathfn(repo)
1462 uipathfn = scmutil.getuipathfn(repo)
1465 for f in ctx.walk(match):
1463 for f in ctx.walk(match):
1466 if f in current_copies:
1464 if f in current_copies:
1467 new_ctx[f].markcopied(None)
1465 new_ctx[f].markcopied(None)
1468 elif match.exact(f):
1466 elif match.exact(f):
1469 ui.warn(
1467 ui.warn(
1470 _(
1468 _(
1471 b'%s: not unmarking as copy - file is not marked as copied\n'
1469 b'%s: not unmarking as copy - file is not marked as copied\n'
1472 )
1470 )
1473 % uipathfn(f)
1471 % uipathfn(f)
1474 )
1472 )
1475
1473
1476 if ctx.rev() is not None:
1474 if ctx.rev() is not None:
1477 with repo.lock():
1475 with repo.lock():
1478 mem_ctx = new_ctx.tomemctx_for_amend(ctx)
1476 mem_ctx = new_ctx.tomemctx_for_amend(ctx)
1479 new_node = mem_ctx.commit()
1477 new_node = mem_ctx.commit()
1480
1478
1481 if repo.dirstate.p1() == ctx.node():
1479 if repo.dirstate.p1() == ctx.node():
1482 with repo.dirstate.parentchange():
1480 with repo.dirstate.parentchange():
1483 scmutil.movedirstate(repo, repo[new_node])
1481 scmutil.movedirstate(repo, repo[new_node])
1484 replacements = {ctx.node(): [new_node]}
1482 replacements = {ctx.node(): [new_node]}
1485 scmutil.cleanupnodes(
1483 scmutil.cleanupnodes(
1486 repo, replacements, b'uncopy', fixphase=True
1484 repo, replacements, b'uncopy', fixphase=True
1487 )
1485 )
1488
1486
1489 return
1487 return
1490
1488
1491 pats = scmutil.expandpats(pats)
1489 pats = scmutil.expandpats(pats)
1492 if not pats:
1490 if not pats:
1493 raise error.Abort(_(b'no source or destination specified'))
1491 raise error.Abort(_(b'no source or destination specified'))
1494 if len(pats) == 1:
1492 if len(pats) == 1:
1495 raise error.Abort(_(b'no destination specified'))
1493 raise error.Abort(_(b'no destination specified'))
1496 dest = pats.pop()
1494 dest = pats.pop()
1497
1495
1498 def walkpat(pat):
1496 def walkpat(pat):
1499 srcs = []
1497 srcs = []
1500 # TODO: Inline and simplify the non-working-copy version of this code
1498 # TODO: Inline and simplify the non-working-copy version of this code
1501 # since it shares very little with the working-copy version of it.
1499 # since it shares very little with the working-copy version of it.
1502 ctx_to_walk = ctx if ctx.rev() is None else pctx
1500 ctx_to_walk = ctx if ctx.rev() is None else pctx
1503 m = scmutil.match(ctx_to_walk, [pat], opts, globbed=True)
1501 m = scmutil.match(ctx_to_walk, [pat], opts, globbed=True)
1504 for abs in ctx_to_walk.walk(m):
1502 for abs in ctx_to_walk.walk(m):
1505 rel = uipathfn(abs)
1503 rel = uipathfn(abs)
1506 exact = m.exact(abs)
1504 exact = m.exact(abs)
1507 if abs not in ctx:
1505 if abs not in ctx:
1508 if abs in pctx:
1506 if abs in pctx:
1509 if not after:
1507 if not after:
1510 if exact:
1508 if exact:
1511 ui.warn(
1509 ui.warn(
1512 _(
1510 _(
1513 b'%s: not copying - file has been marked '
1511 b'%s: not copying - file has been marked '
1514 b'for remove\n'
1512 b'for remove\n'
1515 )
1513 )
1516 % rel
1514 % rel
1517 )
1515 )
1518 continue
1516 continue
1519 else:
1517 else:
1520 if exact:
1518 if exact:
1521 ui.warn(
1519 ui.warn(
1522 _(b'%s: not copying - file is not managed\n') % rel
1520 _(b'%s: not copying - file is not managed\n') % rel
1523 )
1521 )
1524 continue
1522 continue
1525
1523
1526 # abs: hgsep
1524 # abs: hgsep
1527 # rel: ossep
1525 # rel: ossep
1528 srcs.append((abs, rel, exact))
1526 srcs.append((abs, rel, exact))
1529 return srcs
1527 return srcs
1530
1528
1531 if ctx.rev() is not None:
1529 if ctx.rev() is not None:
1532 rewriteutil.precheck(repo, [ctx.rev()], b'uncopy')
1530 rewriteutil.precheck(repo, [ctx.rev()], b'uncopy')
1533 absdest = pathutil.canonpath(repo.root, cwd, dest)
1531 absdest = pathutil.canonpath(repo.root, cwd, dest)
1534 if ctx.hasdir(absdest):
1532 if ctx.hasdir(absdest):
1535 raise error.Abort(
1533 raise error.Abort(
1536 _(b'%s: --at-rev does not support a directory as destination')
1534 _(b'%s: --at-rev does not support a directory as destination')
1537 % uipathfn(absdest)
1535 % uipathfn(absdest)
1538 )
1536 )
1539 if absdest not in ctx:
1537 if absdest not in ctx:
1540 raise error.Abort(
1538 raise error.Abort(
1541 _(b'%s: copy destination does not exist in %s')
1539 _(b'%s: copy destination does not exist in %s')
1542 % (uipathfn(absdest), ctx)
1540 % (uipathfn(absdest), ctx)
1543 )
1541 )
1544
1542
1545 # avoid cycle context -> subrepo -> cmdutil
1543 # avoid cycle context -> subrepo -> cmdutil
1546 from . import context
1544 from . import context
1547
1545
1548 copylist = []
1546 copylist = []
1549 for pat in pats:
1547 for pat in pats:
1550 srcs = walkpat(pat)
1548 srcs = walkpat(pat)
1551 if not srcs:
1549 if not srcs:
1552 continue
1550 continue
1553 for abs, rel, exact in srcs:
1551 for abs, rel, exact in srcs:
1554 copylist.append(abs)
1552 copylist.append(abs)
1555
1553
1556 if not copylist:
1554 if not copylist:
1557 raise error.Abort(_(b'no files to copy'))
1555 raise error.Abort(_(b'no files to copy'))
1558 # TODO: Add support for `hg cp --at-rev . foo bar dir` and
1556 # TODO: Add support for `hg cp --at-rev . foo bar dir` and
1559 # `hg cp --at-rev . dir1 dir2`, preferably unifying the code with the
1557 # `hg cp --at-rev . dir1 dir2`, preferably unifying the code with the
1560 # existing functions below.
1558 # existing functions below.
1561 if len(copylist) != 1:
1559 if len(copylist) != 1:
1562 raise error.Abort(_(b'--at-rev requires a single source'))
1560 raise error.Abort(_(b'--at-rev requires a single source'))
1563
1561
1564 new_ctx = context.overlayworkingctx(repo)
1562 new_ctx = context.overlayworkingctx(repo)
1565 new_ctx.setbase(ctx.p1())
1563 new_ctx.setbase(ctx.p1())
1566 mergemod.graft(repo, ctx, wctx=new_ctx)
1564 mergemod.graft(repo, ctx, wctx=new_ctx)
1567
1565
1568 new_ctx.markcopied(absdest, copylist[0])
1566 new_ctx.markcopied(absdest, copylist[0])
1569
1567
1570 with repo.lock():
1568 with repo.lock():
1571 mem_ctx = new_ctx.tomemctx_for_amend(ctx)
1569 mem_ctx = new_ctx.tomemctx_for_amend(ctx)
1572 new_node = mem_ctx.commit()
1570 new_node = mem_ctx.commit()
1573
1571
1574 if repo.dirstate.p1() == ctx.node():
1572 if repo.dirstate.p1() == ctx.node():
1575 with repo.dirstate.parentchange():
1573 with repo.dirstate.parentchange():
1576 scmutil.movedirstate(repo, repo[new_node])
1574 scmutil.movedirstate(repo, repo[new_node])
1577 replacements = {ctx.node(): [new_node]}
1575 replacements = {ctx.node(): [new_node]}
1578 scmutil.cleanupnodes(repo, replacements, b'copy', fixphase=True)
1576 scmutil.cleanupnodes(repo, replacements, b'copy', fixphase=True)
1579
1577
1580 return
1578 return
1581
1579
1582 # abssrc: hgsep
1580 # abssrc: hgsep
1583 # relsrc: ossep
1581 # relsrc: ossep
1584 # otarget: ossep
1582 # otarget: ossep
1585 def copyfile(abssrc, relsrc, otarget, exact):
1583 def copyfile(abssrc, relsrc, otarget, exact):
1586 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1584 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1587 if b'/' in abstarget:
1585 if b'/' in abstarget:
1588 # We cannot normalize abstarget itself, this would prevent
1586 # We cannot normalize abstarget itself, this would prevent
1589 # case only renames, like a => A.
1587 # case only renames, like a => A.
1590 abspath, absname = abstarget.rsplit(b'/', 1)
1588 abspath, absname = abstarget.rsplit(b'/', 1)
1591 abstarget = repo.dirstate.normalize(abspath) + b'/' + absname
1589 abstarget = repo.dirstate.normalize(abspath) + b'/' + absname
1592 reltarget = repo.pathto(abstarget, cwd)
1590 reltarget = repo.pathto(abstarget, cwd)
1593 target = repo.wjoin(abstarget)
1591 target = repo.wjoin(abstarget)
1594 src = repo.wjoin(abssrc)
1592 src = repo.wjoin(abssrc)
1595 state = repo.dirstate[abstarget]
1593 state = repo.dirstate[abstarget]
1596
1594
1597 scmutil.checkportable(ui, abstarget)
1595 scmutil.checkportable(ui, abstarget)
1598
1596
1599 # check for collisions
1597 # check for collisions
1600 prevsrc = targets.get(abstarget)
1598 prevsrc = targets.get(abstarget)
1601 if prevsrc is not None:
1599 if prevsrc is not None:
1602 ui.warn(
1600 ui.warn(
1603 _(b'%s: not overwriting - %s collides with %s\n')
1601 _(b'%s: not overwriting - %s collides with %s\n')
1604 % (
1602 % (
1605 reltarget,
1603 reltarget,
1606 repo.pathto(abssrc, cwd),
1604 repo.pathto(abssrc, cwd),
1607 repo.pathto(prevsrc, cwd),
1605 repo.pathto(prevsrc, cwd),
1608 )
1606 )
1609 )
1607 )
1610 return True # report a failure
1608 return True # report a failure
1611
1609
1612 # check for overwrites
1610 # check for overwrites
1613 exists = os.path.lexists(target)
1611 exists = os.path.lexists(target)
1614 samefile = False
1612 samefile = False
1615 if exists and abssrc != abstarget:
1613 if exists and abssrc != abstarget:
1616 if repo.dirstate.normalize(abssrc) == repo.dirstate.normalize(
1614 if repo.dirstate.normalize(abssrc) == repo.dirstate.normalize(
1617 abstarget
1615 abstarget
1618 ):
1616 ):
1619 if not rename:
1617 if not rename:
1620 ui.warn(_(b"%s: can't copy - same file\n") % reltarget)
1618 ui.warn(_(b"%s: can't copy - same file\n") % reltarget)
1621 return True # report a failure
1619 return True # report a failure
1622 exists = False
1620 exists = False
1623 samefile = True
1621 samefile = True
1624
1622
1625 if not after and exists or after and state in b'mn':
1623 if not after and exists or after and state in b'mn':
1626 if not opts[b'force']:
1624 if not opts[b'force']:
1627 if state in b'mn':
1625 if state in b'mn':
1628 msg = _(b'%s: not overwriting - file already committed\n')
1626 msg = _(b'%s: not overwriting - file already committed\n')
1629 if after:
1627 if after:
1630 flags = b'--after --force'
1628 flags = b'--after --force'
1631 else:
1629 else:
1632 flags = b'--force'
1630 flags = b'--force'
1633 if rename:
1631 if rename:
1634 hint = (
1632 hint = (
1635 _(
1633 _(
1636 b"('hg rename %s' to replace the file by "
1634 b"('hg rename %s' to replace the file by "
1637 b'recording a rename)\n'
1635 b'recording a rename)\n'
1638 )
1636 )
1639 % flags
1637 % flags
1640 )
1638 )
1641 else:
1639 else:
1642 hint = (
1640 hint = (
1643 _(
1641 _(
1644 b"('hg copy %s' to replace the file by "
1642 b"('hg copy %s' to replace the file by "
1645 b'recording a copy)\n'
1643 b'recording a copy)\n'
1646 )
1644 )
1647 % flags
1645 % flags
1648 )
1646 )
1649 else:
1647 else:
1650 msg = _(b'%s: not overwriting - file exists\n')
1648 msg = _(b'%s: not overwriting - file exists\n')
1651 if rename:
1649 if rename:
1652 hint = _(
1650 hint = _(
1653 b"('hg rename --after' to record the rename)\n"
1651 b"('hg rename --after' to record the rename)\n"
1654 )
1652 )
1655 else:
1653 else:
1656 hint = _(b"('hg copy --after' to record the copy)\n")
1654 hint = _(b"('hg copy --after' to record the copy)\n")
1657 ui.warn(msg % reltarget)
1655 ui.warn(msg % reltarget)
1658 ui.warn(hint)
1656 ui.warn(hint)
1659 return True # report a failure
1657 return True # report a failure
1660
1658
1661 if after:
1659 if after:
1662 if not exists:
1660 if not exists:
1663 if rename:
1661 if rename:
1664 ui.warn(
1662 ui.warn(
1665 _(b'%s: not recording move - %s does not exist\n')
1663 _(b'%s: not recording move - %s does not exist\n')
1666 % (relsrc, reltarget)
1664 % (relsrc, reltarget)
1667 )
1665 )
1668 else:
1666 else:
1669 ui.warn(
1667 ui.warn(
1670 _(b'%s: not recording copy - %s does not exist\n')
1668 _(b'%s: not recording copy - %s does not exist\n')
1671 % (relsrc, reltarget)
1669 % (relsrc, reltarget)
1672 )
1670 )
1673 return True # report a failure
1671 return True # report a failure
1674 elif not dryrun:
1672 elif not dryrun:
1675 try:
1673 try:
1676 if exists:
1674 if exists:
1677 os.unlink(target)
1675 os.unlink(target)
1678 targetdir = os.path.dirname(target) or b'.'
1676 targetdir = os.path.dirname(target) or b'.'
1679 if not os.path.isdir(targetdir):
1677 if not os.path.isdir(targetdir):
1680 os.makedirs(targetdir)
1678 os.makedirs(targetdir)
1681 if samefile:
1679 if samefile:
1682 tmp = target + b"~hgrename"
1680 tmp = target + b"~hgrename"
1683 os.rename(src, tmp)
1681 os.rename(src, tmp)
1684 os.rename(tmp, target)
1682 os.rename(tmp, target)
1685 else:
1683 else:
1686 # Preserve stat info on renames, not on copies; this matches
1684 # Preserve stat info on renames, not on copies; this matches
1687 # Linux CLI behavior.
1685 # Linux CLI behavior.
1688 util.copyfile(src, target, copystat=rename)
1686 util.copyfile(src, target, copystat=rename)
1689 srcexists = True
1687 srcexists = True
1690 except IOError as inst:
1688 except IOError as inst:
1691 if inst.errno == errno.ENOENT:
1689 if inst.errno == errno.ENOENT:
1692 ui.warn(_(b'%s: deleted in working directory\n') % relsrc)
1690 ui.warn(_(b'%s: deleted in working directory\n') % relsrc)
1693 srcexists = False
1691 srcexists = False
1694 else:
1692 else:
1695 ui.warn(
1693 ui.warn(
1696 _(b'%s: cannot copy - %s\n')
1694 _(b'%s: cannot copy - %s\n')
1697 % (relsrc, encoding.strtolocal(inst.strerror))
1695 % (relsrc, encoding.strtolocal(inst.strerror))
1698 )
1696 )
1699 return True # report a failure
1697 return True # report a failure
1700
1698
1701 if ui.verbose or not exact:
1699 if ui.verbose or not exact:
1702 if rename:
1700 if rename:
1703 ui.status(_(b'moving %s to %s\n') % (relsrc, reltarget))
1701 ui.status(_(b'moving %s to %s\n') % (relsrc, reltarget))
1704 else:
1702 else:
1705 ui.status(_(b'copying %s to %s\n') % (relsrc, reltarget))
1703 ui.status(_(b'copying %s to %s\n') % (relsrc, reltarget))
1706
1704
1707 targets[abstarget] = abssrc
1705 targets[abstarget] = abssrc
1708
1706
1709 # fix up dirstate
1707 # fix up dirstate
1710 scmutil.dirstatecopy(
1708 scmutil.dirstatecopy(
1711 ui, repo, ctx, abssrc, abstarget, dryrun=dryrun, cwd=cwd
1709 ui, repo, ctx, abssrc, abstarget, dryrun=dryrun, cwd=cwd
1712 )
1710 )
1713 if rename and not dryrun:
1711 if rename and not dryrun:
1714 if not after and srcexists and not samefile:
1712 if not after and srcexists and not samefile:
1715 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
1713 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
1716 repo.wvfs.unlinkpath(abssrc, rmdir=rmdir)
1714 repo.wvfs.unlinkpath(abssrc, rmdir=rmdir)
1717 ctx.forget([abssrc])
1715 ctx.forget([abssrc])
1718
1716
1719 # pat: ossep
1717 # pat: ossep
1720 # dest ossep
1718 # dest ossep
1721 # srcs: list of (hgsep, hgsep, ossep, bool)
1719 # srcs: list of (hgsep, hgsep, ossep, bool)
1722 # return: function that takes hgsep and returns ossep
1720 # return: function that takes hgsep and returns ossep
1723 def targetpathfn(pat, dest, srcs):
1721 def targetpathfn(pat, dest, srcs):
1724 if os.path.isdir(pat):
1722 if os.path.isdir(pat):
1725 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1723 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1726 abspfx = util.localpath(abspfx)
1724 abspfx = util.localpath(abspfx)
1727 if destdirexists:
1725 if destdirexists:
1728 striplen = len(os.path.split(abspfx)[0])
1726 striplen = len(os.path.split(abspfx)[0])
1729 else:
1727 else:
1730 striplen = len(abspfx)
1728 striplen = len(abspfx)
1731 if striplen:
1729 if striplen:
1732 striplen += len(pycompat.ossep)
1730 striplen += len(pycompat.ossep)
1733 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1731 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1734 elif destdirexists:
1732 elif destdirexists:
1735 res = lambda p: os.path.join(
1733 res = lambda p: os.path.join(
1736 dest, os.path.basename(util.localpath(p))
1734 dest, os.path.basename(util.localpath(p))
1737 )
1735 )
1738 else:
1736 else:
1739 res = lambda p: dest
1737 res = lambda p: dest
1740 return res
1738 return res
1741
1739
1742 # pat: ossep
1740 # pat: ossep
1743 # dest ossep
1741 # dest ossep
1744 # srcs: list of (hgsep, hgsep, ossep, bool)
1742 # srcs: list of (hgsep, hgsep, ossep, bool)
1745 # return: function that takes hgsep and returns ossep
1743 # return: function that takes hgsep and returns ossep
1746 def targetpathafterfn(pat, dest, srcs):
1744 def targetpathafterfn(pat, dest, srcs):
1747 if matchmod.patkind(pat):
1745 if matchmod.patkind(pat):
1748 # a mercurial pattern
1746 # a mercurial pattern
1749 res = lambda p: os.path.join(
1747 res = lambda p: os.path.join(
1750 dest, os.path.basename(util.localpath(p))
1748 dest, os.path.basename(util.localpath(p))
1751 )
1749 )
1752 else:
1750 else:
1753 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1751 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1754 if len(abspfx) < len(srcs[0][0]):
1752 if len(abspfx) < len(srcs[0][0]):
1755 # A directory. Either the target path contains the last
1753 # A directory. Either the target path contains the last
1756 # component of the source path or it does not.
1754 # component of the source path or it does not.
1757 def evalpath(striplen):
1755 def evalpath(striplen):
1758 score = 0
1756 score = 0
1759 for s in srcs:
1757 for s in srcs:
1760 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1758 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1761 if os.path.lexists(t):
1759 if os.path.lexists(t):
1762 score += 1
1760 score += 1
1763 return score
1761 return score
1764
1762
1765 abspfx = util.localpath(abspfx)
1763 abspfx = util.localpath(abspfx)
1766 striplen = len(abspfx)
1764 striplen = len(abspfx)
1767 if striplen:
1765 if striplen:
1768 striplen += len(pycompat.ossep)
1766 striplen += len(pycompat.ossep)
1769 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1767 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1770 score = evalpath(striplen)
1768 score = evalpath(striplen)
1771 striplen1 = len(os.path.split(abspfx)[0])
1769 striplen1 = len(os.path.split(abspfx)[0])
1772 if striplen1:
1770 if striplen1:
1773 striplen1 += len(pycompat.ossep)
1771 striplen1 += len(pycompat.ossep)
1774 if evalpath(striplen1) > score:
1772 if evalpath(striplen1) > score:
1775 striplen = striplen1
1773 striplen = striplen1
1776 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1774 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1777 else:
1775 else:
1778 # a file
1776 # a file
1779 if destdirexists:
1777 if destdirexists:
1780 res = lambda p: os.path.join(
1778 res = lambda p: os.path.join(
1781 dest, os.path.basename(util.localpath(p))
1779 dest, os.path.basename(util.localpath(p))
1782 )
1780 )
1783 else:
1781 else:
1784 res = lambda p: dest
1782 res = lambda p: dest
1785 return res
1783 return res
1786
1784
1787 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1785 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1788 if not destdirexists:
1786 if not destdirexists:
1789 if len(pats) > 1 or matchmod.patkind(pats[0]):
1787 if len(pats) > 1 or matchmod.patkind(pats[0]):
1790 raise error.Abort(
1788 raise error.Abort(
1791 _(
1789 _(
1792 b'with multiple sources, destination must be an '
1790 b'with multiple sources, destination must be an '
1793 b'existing directory'
1791 b'existing directory'
1794 )
1792 )
1795 )
1793 )
1796 if util.endswithsep(dest):
1794 if util.endswithsep(dest):
1797 raise error.Abort(_(b'destination %s is not a directory') % dest)
1795 raise error.Abort(_(b'destination %s is not a directory') % dest)
1798
1796
1799 tfn = targetpathfn
1797 tfn = targetpathfn
1800 if after:
1798 if after:
1801 tfn = targetpathafterfn
1799 tfn = targetpathafterfn
1802 copylist = []
1800 copylist = []
1803 for pat in pats:
1801 for pat in pats:
1804 srcs = walkpat(pat)
1802 srcs = walkpat(pat)
1805 if not srcs:
1803 if not srcs:
1806 continue
1804 continue
1807 copylist.append((tfn(pat, dest, srcs), srcs))
1805 copylist.append((tfn(pat, dest, srcs), srcs))
1808 if not copylist:
1806 if not copylist:
1809 raise error.Abort(_(b'no files to copy'))
1807 raise error.Abort(_(b'no files to copy'))
1810
1808
1811 errors = 0
1809 errors = 0
1812 for targetpath, srcs in copylist:
1810 for targetpath, srcs in copylist:
1813 for abssrc, relsrc, exact in srcs:
1811 for abssrc, relsrc, exact in srcs:
1814 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1812 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1815 errors += 1
1813 errors += 1
1816
1814
1817 return errors != 0
1815 return errors != 0
1818
1816
1819
1817
1820 ## facility to let extension process additional data into an import patch
1818 ## facility to let extension process additional data into an import patch
1821 # list of identifier to be executed in order
1819 # list of identifier to be executed in order
1822 extrapreimport = [] # run before commit
1820 extrapreimport = [] # run before commit
1823 extrapostimport = [] # run after commit
1821 extrapostimport = [] # run after commit
1824 # mapping from identifier to actual import function
1822 # mapping from identifier to actual import function
1825 #
1823 #
1826 # 'preimport' are run before the commit is made and are provided the following
1824 # 'preimport' are run before the commit is made and are provided the following
1827 # arguments:
1825 # arguments:
1828 # - repo: the localrepository instance,
1826 # - repo: the localrepository instance,
1829 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1827 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1830 # - extra: the future extra dictionary of the changeset, please mutate it,
1828 # - extra: the future extra dictionary of the changeset, please mutate it,
1831 # - opts: the import options.
1829 # - opts: the import options.
1832 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1830 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1833 # mutation of in memory commit and more. Feel free to rework the code to get
1831 # mutation of in memory commit and more. Feel free to rework the code to get
1834 # there.
1832 # there.
1835 extrapreimportmap = {}
1833 extrapreimportmap = {}
1836 # 'postimport' are run after the commit is made and are provided the following
1834 # 'postimport' are run after the commit is made and are provided the following
1837 # argument:
1835 # argument:
1838 # - ctx: the changectx created by import.
1836 # - ctx: the changectx created by import.
1839 extrapostimportmap = {}
1837 extrapostimportmap = {}
1840
1838
1841
1839
1842 def tryimportone(ui, repo, patchdata, parents, opts, msgs, updatefunc):
1840 def tryimportone(ui, repo, patchdata, parents, opts, msgs, updatefunc):
1843 """Utility function used by commands.import to import a single patch
1841 """Utility function used by commands.import to import a single patch
1844
1842
1845 This function is explicitly defined here to help the evolve extension to
1843 This function is explicitly defined here to help the evolve extension to
1846 wrap this part of the import logic.
1844 wrap this part of the import logic.
1847
1845
1848 The API is currently a bit ugly because it a simple code translation from
1846 The API is currently a bit ugly because it a simple code translation from
1849 the import command. Feel free to make it better.
1847 the import command. Feel free to make it better.
1850
1848
1851 :patchdata: a dictionary containing parsed patch data (such as from
1849 :patchdata: a dictionary containing parsed patch data (such as from
1852 ``patch.extract()``)
1850 ``patch.extract()``)
1853 :parents: nodes that will be parent of the created commit
1851 :parents: nodes that will be parent of the created commit
1854 :opts: the full dict of option passed to the import command
1852 :opts: the full dict of option passed to the import command
1855 :msgs: list to save commit message to.
1853 :msgs: list to save commit message to.
1856 (used in case we need to save it when failing)
1854 (used in case we need to save it when failing)
1857 :updatefunc: a function that update a repo to a given node
1855 :updatefunc: a function that update a repo to a given node
1858 updatefunc(<repo>, <node>)
1856 updatefunc(<repo>, <node>)
1859 """
1857 """
1860 # avoid cycle context -> subrepo -> cmdutil
1858 # avoid cycle context -> subrepo -> cmdutil
1861 from . import context
1859 from . import context
1862
1860
1863 tmpname = patchdata.get(b'filename')
1861 tmpname = patchdata.get(b'filename')
1864 message = patchdata.get(b'message')
1862 message = patchdata.get(b'message')
1865 user = opts.get(b'user') or patchdata.get(b'user')
1863 user = opts.get(b'user') or patchdata.get(b'user')
1866 date = opts.get(b'date') or patchdata.get(b'date')
1864 date = opts.get(b'date') or patchdata.get(b'date')
1867 branch = patchdata.get(b'branch')
1865 branch = patchdata.get(b'branch')
1868 nodeid = patchdata.get(b'nodeid')
1866 nodeid = patchdata.get(b'nodeid')
1869 p1 = patchdata.get(b'p1')
1867 p1 = patchdata.get(b'p1')
1870 p2 = patchdata.get(b'p2')
1868 p2 = patchdata.get(b'p2')
1871
1869
1872 nocommit = opts.get(b'no_commit')
1870 nocommit = opts.get(b'no_commit')
1873 importbranch = opts.get(b'import_branch')
1871 importbranch = opts.get(b'import_branch')
1874 update = not opts.get(b'bypass')
1872 update = not opts.get(b'bypass')
1875 strip = opts[b"strip"]
1873 strip = opts[b"strip"]
1876 prefix = opts[b"prefix"]
1874 prefix = opts[b"prefix"]
1877 sim = float(opts.get(b'similarity') or 0)
1875 sim = float(opts.get(b'similarity') or 0)
1878
1876
1879 if not tmpname:
1877 if not tmpname:
1880 return None, None, False
1878 return None, None, False
1881
1879
1882 rejects = False
1880 rejects = False
1883
1881
1884 cmdline_message = logmessage(ui, opts)
1882 cmdline_message = logmessage(ui, opts)
1885 if cmdline_message:
1883 if cmdline_message:
1886 # pickup the cmdline msg
1884 # pickup the cmdline msg
1887 message = cmdline_message
1885 message = cmdline_message
1888 elif message:
1886 elif message:
1889 # pickup the patch msg
1887 # pickup the patch msg
1890 message = message.strip()
1888 message = message.strip()
1891 else:
1889 else:
1892 # launch the editor
1890 # launch the editor
1893 message = None
1891 message = None
1894 ui.debug(b'message:\n%s\n' % (message or b''))
1892 ui.debug(b'message:\n%s\n' % (message or b''))
1895
1893
1896 if len(parents) == 1:
1894 if len(parents) == 1:
1897 parents.append(repo[nullid])
1895 parents.append(repo[nullid])
1898 if opts.get(b'exact'):
1896 if opts.get(b'exact'):
1899 if not nodeid or not p1:
1897 if not nodeid or not p1:
1900 raise error.Abort(_(b'not a Mercurial patch'))
1898 raise error.Abort(_(b'not a Mercurial patch'))
1901 p1 = repo[p1]
1899 p1 = repo[p1]
1902 p2 = repo[p2 or nullid]
1900 p2 = repo[p2 or nullid]
1903 elif p2:
1901 elif p2:
1904 try:
1902 try:
1905 p1 = repo[p1]
1903 p1 = repo[p1]
1906 p2 = repo[p2]
1904 p2 = repo[p2]
1907 # Without any options, consider p2 only if the
1905 # Without any options, consider p2 only if the
1908 # patch is being applied on top of the recorded
1906 # patch is being applied on top of the recorded
1909 # first parent.
1907 # first parent.
1910 if p1 != parents[0]:
1908 if p1 != parents[0]:
1911 p1 = parents[0]
1909 p1 = parents[0]
1912 p2 = repo[nullid]
1910 p2 = repo[nullid]
1913 except error.RepoError:
1911 except error.RepoError:
1914 p1, p2 = parents
1912 p1, p2 = parents
1915 if p2.node() == nullid:
1913 if p2.node() == nullid:
1916 ui.warn(
1914 ui.warn(
1917 _(
1915 _(
1918 b"warning: import the patch as a normal revision\n"
1916 b"warning: import the patch as a normal revision\n"
1919 b"(use --exact to import the patch as a merge)\n"
1917 b"(use --exact to import the patch as a merge)\n"
1920 )
1918 )
1921 )
1919 )
1922 else:
1920 else:
1923 p1, p2 = parents
1921 p1, p2 = parents
1924
1922
1925 n = None
1923 n = None
1926 if update:
1924 if update:
1927 if p1 != parents[0]:
1925 if p1 != parents[0]:
1928 updatefunc(repo, p1.node())
1926 updatefunc(repo, p1.node())
1929 if p2 != parents[1]:
1927 if p2 != parents[1]:
1930 repo.setparents(p1.node(), p2.node())
1928 repo.setparents(p1.node(), p2.node())
1931
1929
1932 if opts.get(b'exact') or importbranch:
1930 if opts.get(b'exact') or importbranch:
1933 repo.dirstate.setbranch(branch or b'default')
1931 repo.dirstate.setbranch(branch or b'default')
1934
1932
1935 partial = opts.get(b'partial', False)
1933 partial = opts.get(b'partial', False)
1936 files = set()
1934 files = set()
1937 try:
1935 try:
1938 patch.patch(
1936 patch.patch(
1939 ui,
1937 ui,
1940 repo,
1938 repo,
1941 tmpname,
1939 tmpname,
1942 strip=strip,
1940 strip=strip,
1943 prefix=prefix,
1941 prefix=prefix,
1944 files=files,
1942 files=files,
1945 eolmode=None,
1943 eolmode=None,
1946 similarity=sim / 100.0,
1944 similarity=sim / 100.0,
1947 )
1945 )
1948 except error.PatchError as e:
1946 except error.PatchError as e:
1949 if not partial:
1947 if not partial:
1950 raise error.Abort(pycompat.bytestr(e))
1948 raise error.Abort(pycompat.bytestr(e))
1951 if partial:
1949 if partial:
1952 rejects = True
1950 rejects = True
1953
1951
1954 files = list(files)
1952 files = list(files)
1955 if nocommit:
1953 if nocommit:
1956 if message:
1954 if message:
1957 msgs.append(message)
1955 msgs.append(message)
1958 else:
1956 else:
1959 if opts.get(b'exact') or p2:
1957 if opts.get(b'exact') or p2:
1960 # If you got here, you either use --force and know what
1958 # If you got here, you either use --force and know what
1961 # you are doing or used --exact or a merge patch while
1959 # you are doing or used --exact or a merge patch while
1962 # being updated to its first parent.
1960 # being updated to its first parent.
1963 m = None
1961 m = None
1964 else:
1962 else:
1965 m = scmutil.matchfiles(repo, files or [])
1963 m = scmutil.matchfiles(repo, files or [])
1966 editform = mergeeditform(repo[None], b'import.normal')
1964 editform = mergeeditform(repo[None], b'import.normal')
1967 if opts.get(b'exact'):
1965 if opts.get(b'exact'):
1968 editor = None
1966 editor = None
1969 else:
1967 else:
1970 editor = getcommiteditor(
1968 editor = getcommiteditor(
1971 editform=editform, **pycompat.strkwargs(opts)
1969 editform=editform, **pycompat.strkwargs(opts)
1972 )
1970 )
1973 extra = {}
1971 extra = {}
1974 for idfunc in extrapreimport:
1972 for idfunc in extrapreimport:
1975 extrapreimportmap[idfunc](repo, patchdata, extra, opts)
1973 extrapreimportmap[idfunc](repo, patchdata, extra, opts)
1976 overrides = {}
1974 overrides = {}
1977 if partial:
1975 if partial:
1978 overrides[(b'ui', b'allowemptycommit')] = True
1976 overrides[(b'ui', b'allowemptycommit')] = True
1979 if opts.get(b'secret'):
1977 if opts.get(b'secret'):
1980 overrides[(b'phases', b'new-commit')] = b'secret'
1978 overrides[(b'phases', b'new-commit')] = b'secret'
1981 with repo.ui.configoverride(overrides, b'import'):
1979 with repo.ui.configoverride(overrides, b'import'):
1982 n = repo.commit(
1980 n = repo.commit(
1983 message, user, date, match=m, editor=editor, extra=extra
1981 message, user, date, match=m, editor=editor, extra=extra
1984 )
1982 )
1985 for idfunc in extrapostimport:
1983 for idfunc in extrapostimport:
1986 extrapostimportmap[idfunc](repo[n])
1984 extrapostimportmap[idfunc](repo[n])
1987 else:
1985 else:
1988 if opts.get(b'exact') or importbranch:
1986 if opts.get(b'exact') or importbranch:
1989 branch = branch or b'default'
1987 branch = branch or b'default'
1990 else:
1988 else:
1991 branch = p1.branch()
1989 branch = p1.branch()
1992 store = patch.filestore()
1990 store = patch.filestore()
1993 try:
1991 try:
1994 files = set()
1992 files = set()
1995 try:
1993 try:
1996 patch.patchrepo(
1994 patch.patchrepo(
1997 ui,
1995 ui,
1998 repo,
1996 repo,
1999 p1,
1997 p1,
2000 store,
1998 store,
2001 tmpname,
1999 tmpname,
2002 strip,
2000 strip,
2003 prefix,
2001 prefix,
2004 files,
2002 files,
2005 eolmode=None,
2003 eolmode=None,
2006 )
2004 )
2007 except error.PatchError as e:
2005 except error.PatchError as e:
2008 raise error.Abort(stringutil.forcebytestr(e))
2006 raise error.Abort(stringutil.forcebytestr(e))
2009 if opts.get(b'exact'):
2007 if opts.get(b'exact'):
2010 editor = None
2008 editor = None
2011 else:
2009 else:
2012 editor = getcommiteditor(editform=b'import.bypass')
2010 editor = getcommiteditor(editform=b'import.bypass')
2013 memctx = context.memctx(
2011 memctx = context.memctx(
2014 repo,
2012 repo,
2015 (p1.node(), p2.node()),
2013 (p1.node(), p2.node()),
2016 message,
2014 message,
2017 files=files,
2015 files=files,
2018 filectxfn=store,
2016 filectxfn=store,
2019 user=user,
2017 user=user,
2020 date=date,
2018 date=date,
2021 branch=branch,
2019 branch=branch,
2022 editor=editor,
2020 editor=editor,
2023 )
2021 )
2024
2022
2025 overrides = {}
2023 overrides = {}
2026 if opts.get(b'secret'):
2024 if opts.get(b'secret'):
2027 overrides[(b'phases', b'new-commit')] = b'secret'
2025 overrides[(b'phases', b'new-commit')] = b'secret'
2028 with repo.ui.configoverride(overrides, b'import'):
2026 with repo.ui.configoverride(overrides, b'import'):
2029 n = memctx.commit()
2027 n = memctx.commit()
2030 finally:
2028 finally:
2031 store.close()
2029 store.close()
2032 if opts.get(b'exact') and nocommit:
2030 if opts.get(b'exact') and nocommit:
2033 # --exact with --no-commit is still useful in that it does merge
2031 # --exact with --no-commit is still useful in that it does merge
2034 # and branch bits
2032 # and branch bits
2035 ui.warn(_(b"warning: can't check exact import with --no-commit\n"))
2033 ui.warn(_(b"warning: can't check exact import with --no-commit\n"))
2036 elif opts.get(b'exact') and (not n or hex(n) != nodeid):
2034 elif opts.get(b'exact') and (not n or hex(n) != nodeid):
2037 raise error.Abort(_(b'patch is damaged or loses information'))
2035 raise error.Abort(_(b'patch is damaged or loses information'))
2038 msg = _(b'applied to working directory')
2036 msg = _(b'applied to working directory')
2039 if n:
2037 if n:
2040 # i18n: refers to a short changeset id
2038 # i18n: refers to a short changeset id
2041 msg = _(b'created %s') % short(n)
2039 msg = _(b'created %s') % short(n)
2042 return msg, n, rejects
2040 return msg, n, rejects
2043
2041
2044
2042
2045 # facility to let extensions include additional data in an exported patch
2043 # facility to let extensions include additional data in an exported patch
2046 # list of identifiers to be executed in order
2044 # list of identifiers to be executed in order
2047 extraexport = []
2045 extraexport = []
2048 # mapping from identifier to actual export function
2046 # mapping from identifier to actual export function
2049 # function as to return a string to be added to the header or None
2047 # function as to return a string to be added to the header or None
2050 # it is given two arguments (sequencenumber, changectx)
2048 # it is given two arguments (sequencenumber, changectx)
2051 extraexportmap = {}
2049 extraexportmap = {}
2052
2050
2053
2051
2054 def _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts):
2052 def _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts):
2055 node = scmutil.binnode(ctx)
2053 node = scmutil.binnode(ctx)
2056 parents = [p.node() for p in ctx.parents() if p]
2054 parents = [p.node() for p in ctx.parents() if p]
2057 branch = ctx.branch()
2055 branch = ctx.branch()
2058 if switch_parent:
2056 if switch_parent:
2059 parents.reverse()
2057 parents.reverse()
2060
2058
2061 if parents:
2059 if parents:
2062 prev = parents[0]
2060 prev = parents[0]
2063 else:
2061 else:
2064 prev = nullid
2062 prev = nullid
2065
2063
2066 fm.context(ctx=ctx)
2064 fm.context(ctx=ctx)
2067 fm.plain(b'# HG changeset patch\n')
2065 fm.plain(b'# HG changeset patch\n')
2068 fm.write(b'user', b'# User %s\n', ctx.user())
2066 fm.write(b'user', b'# User %s\n', ctx.user())
2069 fm.plain(b'# Date %d %d\n' % ctx.date())
2067 fm.plain(b'# Date %d %d\n' % ctx.date())
2070 fm.write(b'date', b'# %s\n', fm.formatdate(ctx.date()))
2068 fm.write(b'date', b'# %s\n', fm.formatdate(ctx.date()))
2071 fm.condwrite(
2069 fm.condwrite(
2072 branch and branch != b'default', b'branch', b'# Branch %s\n', branch
2070 branch and branch != b'default', b'branch', b'# Branch %s\n', branch
2073 )
2071 )
2074 fm.write(b'node', b'# Node ID %s\n', hex(node))
2072 fm.write(b'node', b'# Node ID %s\n', hex(node))
2075 fm.plain(b'# Parent %s\n' % hex(prev))
2073 fm.plain(b'# Parent %s\n' % hex(prev))
2076 if len(parents) > 1:
2074 if len(parents) > 1:
2077 fm.plain(b'# Parent %s\n' % hex(parents[1]))
2075 fm.plain(b'# Parent %s\n' % hex(parents[1]))
2078 fm.data(parents=fm.formatlist(pycompat.maplist(hex, parents), name=b'node'))
2076 fm.data(parents=fm.formatlist(pycompat.maplist(hex, parents), name=b'node'))
2079
2077
2080 # TODO: redesign extraexportmap function to support formatter
2078 # TODO: redesign extraexportmap function to support formatter
2081 for headerid in extraexport:
2079 for headerid in extraexport:
2082 header = extraexportmap[headerid](seqno, ctx)
2080 header = extraexportmap[headerid](seqno, ctx)
2083 if header is not None:
2081 if header is not None:
2084 fm.plain(b'# %s\n' % header)
2082 fm.plain(b'# %s\n' % header)
2085
2083
2086 fm.write(b'desc', b'%s\n', ctx.description().rstrip())
2084 fm.write(b'desc', b'%s\n', ctx.description().rstrip())
2087 fm.plain(b'\n')
2085 fm.plain(b'\n')
2088
2086
2089 if fm.isplain():
2087 if fm.isplain():
2090 chunkiter = patch.diffui(repo, prev, node, match, opts=diffopts)
2088 chunkiter = patch.diffui(repo, prev, node, match, opts=diffopts)
2091 for chunk, label in chunkiter:
2089 for chunk, label in chunkiter:
2092 fm.plain(chunk, label=label)
2090 fm.plain(chunk, label=label)
2093 else:
2091 else:
2094 chunkiter = patch.diff(repo, prev, node, match, opts=diffopts)
2092 chunkiter = patch.diff(repo, prev, node, match, opts=diffopts)
2095 # TODO: make it structured?
2093 # TODO: make it structured?
2096 fm.data(diff=b''.join(chunkiter))
2094 fm.data(diff=b''.join(chunkiter))
2097
2095
2098
2096
2099 def _exportfile(repo, revs, fm, dest, switch_parent, diffopts, match):
2097 def _exportfile(repo, revs, fm, dest, switch_parent, diffopts, match):
2100 """Export changesets to stdout or a single file"""
2098 """Export changesets to stdout or a single file"""
2101 for seqno, rev in enumerate(revs, 1):
2099 for seqno, rev in enumerate(revs, 1):
2102 ctx = repo[rev]
2100 ctx = repo[rev]
2103 if not dest.startswith(b'<'):
2101 if not dest.startswith(b'<'):
2104 repo.ui.note(b"%s\n" % dest)
2102 repo.ui.note(b"%s\n" % dest)
2105 fm.startitem()
2103 fm.startitem()
2106 _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts)
2104 _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts)
2107
2105
2108
2106
2109 def _exportfntemplate(
2107 def _exportfntemplate(
2110 repo, revs, basefm, fntemplate, switch_parent, diffopts, match
2108 repo, revs, basefm, fntemplate, switch_parent, diffopts, match
2111 ):
2109 ):
2112 """Export changesets to possibly multiple files"""
2110 """Export changesets to possibly multiple files"""
2113 total = len(revs)
2111 total = len(revs)
2114 revwidth = max(len(str(rev)) for rev in revs)
2112 revwidth = max(len(str(rev)) for rev in revs)
2115 filemap = util.sortdict() # filename: [(seqno, rev), ...]
2113 filemap = util.sortdict() # filename: [(seqno, rev), ...]
2116
2114
2117 for seqno, rev in enumerate(revs, 1):
2115 for seqno, rev in enumerate(revs, 1):
2118 ctx = repo[rev]
2116 ctx = repo[rev]
2119 dest = makefilename(
2117 dest = makefilename(
2120 ctx, fntemplate, total=total, seqno=seqno, revwidth=revwidth
2118 ctx, fntemplate, total=total, seqno=seqno, revwidth=revwidth
2121 )
2119 )
2122 filemap.setdefault(dest, []).append((seqno, rev))
2120 filemap.setdefault(dest, []).append((seqno, rev))
2123
2121
2124 for dest in filemap:
2122 for dest in filemap:
2125 with formatter.maybereopen(basefm, dest) as fm:
2123 with formatter.maybereopen(basefm, dest) as fm:
2126 repo.ui.note(b"%s\n" % dest)
2124 repo.ui.note(b"%s\n" % dest)
2127 for seqno, rev in filemap[dest]:
2125 for seqno, rev in filemap[dest]:
2128 fm.startitem()
2126 fm.startitem()
2129 ctx = repo[rev]
2127 ctx = repo[rev]
2130 _exportsingle(
2128 _exportsingle(
2131 repo, ctx, fm, match, switch_parent, seqno, diffopts
2129 repo, ctx, fm, match, switch_parent, seqno, diffopts
2132 )
2130 )
2133
2131
2134
2132
2135 def _prefetchchangedfiles(repo, revs, match):
2133 def _prefetchchangedfiles(repo, revs, match):
2136 allfiles = set()
2134 allfiles = set()
2137 for rev in revs:
2135 for rev in revs:
2138 for file in repo[rev].files():
2136 for file in repo[rev].files():
2139 if not match or match(file):
2137 if not match or match(file):
2140 allfiles.add(file)
2138 allfiles.add(file)
2141 match = scmutil.matchfiles(repo, allfiles)
2139 match = scmutil.matchfiles(repo, allfiles)
2142 revmatches = [(rev, match) for rev in revs]
2140 revmatches = [(rev, match) for rev in revs]
2143 scmutil.prefetchfiles(repo, revmatches)
2141 scmutil.prefetchfiles(repo, revmatches)
2144
2142
2145
2143
2146 def export(
2144 def export(
2147 repo,
2145 repo,
2148 revs,
2146 revs,
2149 basefm,
2147 basefm,
2150 fntemplate=b'hg-%h.patch',
2148 fntemplate=b'hg-%h.patch',
2151 switch_parent=False,
2149 switch_parent=False,
2152 opts=None,
2150 opts=None,
2153 match=None,
2151 match=None,
2154 ):
2152 ):
2155 '''export changesets as hg patches
2153 '''export changesets as hg patches
2156
2154
2157 Args:
2155 Args:
2158 repo: The repository from which we're exporting revisions.
2156 repo: The repository from which we're exporting revisions.
2159 revs: A list of revisions to export as revision numbers.
2157 revs: A list of revisions to export as revision numbers.
2160 basefm: A formatter to which patches should be written.
2158 basefm: A formatter to which patches should be written.
2161 fntemplate: An optional string to use for generating patch file names.
2159 fntemplate: An optional string to use for generating patch file names.
2162 switch_parent: If True, show diffs against second parent when not nullid.
2160 switch_parent: If True, show diffs against second parent when not nullid.
2163 Default is false, which always shows diff against p1.
2161 Default is false, which always shows diff against p1.
2164 opts: diff options to use for generating the patch.
2162 opts: diff options to use for generating the patch.
2165 match: If specified, only export changes to files matching this matcher.
2163 match: If specified, only export changes to files matching this matcher.
2166
2164
2167 Returns:
2165 Returns:
2168 Nothing.
2166 Nothing.
2169
2167
2170 Side Effect:
2168 Side Effect:
2171 "HG Changeset Patch" data is emitted to one of the following
2169 "HG Changeset Patch" data is emitted to one of the following
2172 destinations:
2170 destinations:
2173 fntemplate specified: Each rev is written to a unique file named using
2171 fntemplate specified: Each rev is written to a unique file named using
2174 the given template.
2172 the given template.
2175 Otherwise: All revs will be written to basefm.
2173 Otherwise: All revs will be written to basefm.
2176 '''
2174 '''
2177 _prefetchchangedfiles(repo, revs, match)
2175 _prefetchchangedfiles(repo, revs, match)
2178
2176
2179 if not fntemplate:
2177 if not fntemplate:
2180 _exportfile(
2178 _exportfile(
2181 repo, revs, basefm, b'<unnamed>', switch_parent, opts, match
2179 repo, revs, basefm, b'<unnamed>', switch_parent, opts, match
2182 )
2180 )
2183 else:
2181 else:
2184 _exportfntemplate(
2182 _exportfntemplate(
2185 repo, revs, basefm, fntemplate, switch_parent, opts, match
2183 repo, revs, basefm, fntemplate, switch_parent, opts, match
2186 )
2184 )
2187
2185
2188
2186
2189 def exportfile(repo, revs, fp, switch_parent=False, opts=None, match=None):
2187 def exportfile(repo, revs, fp, switch_parent=False, opts=None, match=None):
2190 """Export changesets to the given file stream"""
2188 """Export changesets to the given file stream"""
2191 _prefetchchangedfiles(repo, revs, match)
2189 _prefetchchangedfiles(repo, revs, match)
2192
2190
2193 dest = getattr(fp, 'name', b'<unnamed>')
2191 dest = getattr(fp, 'name', b'<unnamed>')
2194 with formatter.formatter(repo.ui, fp, b'export', {}) as fm:
2192 with formatter.formatter(repo.ui, fp, b'export', {}) as fm:
2195 _exportfile(repo, revs, fm, dest, switch_parent, opts, match)
2193 _exportfile(repo, revs, fm, dest, switch_parent, opts, match)
2196
2194
2197
2195
2198 def showmarker(fm, marker, index=None):
2196 def showmarker(fm, marker, index=None):
2199 """utility function to display obsolescence marker in a readable way
2197 """utility function to display obsolescence marker in a readable way
2200
2198
2201 To be used by debug function."""
2199 To be used by debug function."""
2202 if index is not None:
2200 if index is not None:
2203 fm.write(b'index', b'%i ', index)
2201 fm.write(b'index', b'%i ', index)
2204 fm.write(b'prednode', b'%s ', hex(marker.prednode()))
2202 fm.write(b'prednode', b'%s ', hex(marker.prednode()))
2205 succs = marker.succnodes()
2203 succs = marker.succnodes()
2206 fm.condwrite(
2204 fm.condwrite(
2207 succs,
2205 succs,
2208 b'succnodes',
2206 b'succnodes',
2209 b'%s ',
2207 b'%s ',
2210 fm.formatlist(map(hex, succs), name=b'node'),
2208 fm.formatlist(map(hex, succs), name=b'node'),
2211 )
2209 )
2212 fm.write(b'flag', b'%X ', marker.flags())
2210 fm.write(b'flag', b'%X ', marker.flags())
2213 parents = marker.parentnodes()
2211 parents = marker.parentnodes()
2214 if parents is not None:
2212 if parents is not None:
2215 fm.write(
2213 fm.write(
2216 b'parentnodes',
2214 b'parentnodes',
2217 b'{%s} ',
2215 b'{%s} ',
2218 fm.formatlist(map(hex, parents), name=b'node', sep=b', '),
2216 fm.formatlist(map(hex, parents), name=b'node', sep=b', '),
2219 )
2217 )
2220 fm.write(b'date', b'(%s) ', fm.formatdate(marker.date()))
2218 fm.write(b'date', b'(%s) ', fm.formatdate(marker.date()))
2221 meta = marker.metadata().copy()
2219 meta = marker.metadata().copy()
2222 meta.pop(b'date', None)
2220 meta.pop(b'date', None)
2223 smeta = pycompat.rapply(pycompat.maybebytestr, meta)
2221 smeta = pycompat.rapply(pycompat.maybebytestr, meta)
2224 fm.write(
2222 fm.write(
2225 b'metadata', b'{%s}', fm.formatdict(smeta, fmt=b'%r: %r', sep=b', ')
2223 b'metadata', b'{%s}', fm.formatdict(smeta, fmt=b'%r: %r', sep=b', ')
2226 )
2224 )
2227 fm.plain(b'\n')
2225 fm.plain(b'\n')
2228
2226
2229
2227
2230 def finddate(ui, repo, date):
2228 def finddate(ui, repo, date):
2231 """Find the tipmost changeset that matches the given date spec"""
2229 """Find the tipmost changeset that matches the given date spec"""
2232 mrevs = repo.revs(b'date(%s)', date)
2230 mrevs = repo.revs(b'date(%s)', date)
2233 try:
2231 try:
2234 rev = mrevs.max()
2232 rev = mrevs.max()
2235 except ValueError:
2233 except ValueError:
2236 raise error.Abort(_(b"revision matching date not found"))
2234 raise error.Abort(_(b"revision matching date not found"))
2237
2235
2238 ui.status(
2236 ui.status(
2239 _(b"found revision %d from %s\n")
2237 _(b"found revision %d from %s\n")
2240 % (rev, dateutil.datestr(repo[rev].date()))
2238 % (rev, dateutil.datestr(repo[rev].date()))
2241 )
2239 )
2242 return b'%d' % rev
2240 return b'%d' % rev
2243
2241
2244
2242
2245 def increasingwindows(windowsize=8, sizelimit=512):
2243 def increasingwindows(windowsize=8, sizelimit=512):
2246 while True:
2244 while True:
2247 yield windowsize
2245 yield windowsize
2248 if windowsize < sizelimit:
2246 if windowsize < sizelimit:
2249 windowsize *= 2
2247 windowsize *= 2
2250
2248
2251
2249
2252 def _walkrevs(repo, opts):
2253 # Default --rev value depends on --follow but --follow behavior
2254 # depends on revisions resolved from --rev...
2255 follow = opts.get(b'follow') or opts.get(b'follow_first')
2256 revspec = opts.get(b'rev')
2257 if follow and revspec:
2258 revs = scmutil.revrange(repo, revspec)
2259 revs = repo.revs(b'reverse(::%ld)', revs)
2260 elif revspec:
2261 revs = scmutil.revrange(repo, revspec)
2262 elif follow and repo.dirstate.p1() == nullid:
2263 revs = smartset.baseset()
2264 elif follow:
2265 revs = repo.revs(b'reverse(:.)')
2266 else:
2267 revs = smartset.spanset(repo)
2268 revs.reverse()
2269 return revs
2270
2271
2272 class FileWalkError(Exception):
2273 pass
2274
2275
2276 def walkfilerevs(repo, match, follow, revs, fncache):
2277 '''Walks the file history for the matched files.
2278
2279 Returns the changeset revs that are involved in the file history.
2280
2281 Throws FileWalkError if the file history can't be walked using
2282 filelogs alone.
2283 '''
2284 wanted = set()
2285 copies = []
2286 minrev, maxrev = min(revs), max(revs)
2287
2288 def filerevs(filelog, last):
2289 """
2290 Only files, no patterns. Check the history of each file.
2291
2292 Examines filelog entries within minrev, maxrev linkrev range
2293 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
2294 tuples in backwards order
2295 """
2296 cl_count = len(repo)
2297 revs = []
2298 for j in pycompat.xrange(0, last + 1):
2299 linkrev = filelog.linkrev(j)
2300 if linkrev < minrev:
2301 continue
2302 # only yield rev for which we have the changelog, it can
2303 # happen while doing "hg log" during a pull or commit
2304 if linkrev >= cl_count:
2305 break
2306
2307 parentlinkrevs = []
2308 for p in filelog.parentrevs(j):
2309 if p != nullrev:
2310 parentlinkrevs.append(filelog.linkrev(p))
2311 n = filelog.node(j)
2312 revs.append(
2313 (linkrev, parentlinkrevs, follow and filelog.renamed(n))
2314 )
2315
2316 return reversed(revs)
2317
2318 def iterfiles():
2319 pctx = repo[b'.']
2320 for filename in match.files():
2321 if follow:
2322 if filename not in pctx:
2323 raise error.Abort(
2324 _(
2325 b'cannot follow file not in parent '
2326 b'revision: "%s"'
2327 )
2328 % filename
2329 )
2330 yield filename, pctx[filename].filenode()
2331 else:
2332 yield filename, None
2333 for filename_node in copies:
2334 yield filename_node
2335
2336 for file_, node in iterfiles():
2337 filelog = repo.file(file_)
2338 if not len(filelog):
2339 if node is None:
2340 # A zero count may be a directory or deleted file, so
2341 # try to find matching entries on the slow path.
2342 if follow:
2343 raise error.Abort(
2344 _(b'cannot follow nonexistent file: "%s"') % file_
2345 )
2346 raise FileWalkError(b"Cannot walk via filelog")
2347 else:
2348 continue
2349
2350 if node is None:
2351 last = len(filelog) - 1
2352 else:
2353 last = filelog.rev(node)
2354
2355 # keep track of all ancestors of the file
2356 ancestors = {filelog.linkrev(last)}
2357
2358 # iterate from latest to oldest revision
2359 for rev, flparentlinkrevs, copied in filerevs(filelog, last):
2360 if not follow:
2361 if rev > maxrev:
2362 continue
2363 else:
2364 # Note that last might not be the first interesting
2365 # rev to us:
2366 # if the file has been changed after maxrev, we'll
2367 # have linkrev(last) > maxrev, and we still need
2368 # to explore the file graph
2369 if rev not in ancestors:
2370 continue
2371 # XXX insert 1327 fix here
2372 if flparentlinkrevs:
2373 ancestors.update(flparentlinkrevs)
2374
2375 fncache.setdefault(rev, []).append(file_)
2376 wanted.add(rev)
2377 if copied:
2378 copies.append(copied)
2379
2380 return wanted
2381
2382
2383 class _followfilter(object):
2384 def __init__(self, repo, onlyfirst=False):
2385 self.repo = repo
2386 self.startrev = nullrev
2387 self.roots = set()
2388 self.onlyfirst = onlyfirst
2389
2390 def match(self, rev):
2391 def realparents(rev):
2392 try:
2393 if self.onlyfirst:
2394 return self.repo.changelog.parentrevs(rev)[0:1]
2395 else:
2396 return filter(
2397 lambda x: x != nullrev,
2398 self.repo.changelog.parentrevs(rev),
2399 )
2400 except error.WdirUnsupported:
2401 prevs = [p.rev() for p in self.repo[rev].parents()]
2402 if self.onlyfirst:
2403 return prevs[:1]
2404 else:
2405 return prevs
2406
2407 if self.startrev == nullrev:
2408 self.startrev = rev
2409 return True
2410
2411 if rev > self.startrev:
2412 # forward: all descendants
2413 if not self.roots:
2414 self.roots.add(self.startrev)
2415 for parent in realparents(rev):
2416 if parent in self.roots:
2417 self.roots.add(rev)
2418 return True
2419 else:
2420 # backwards: all parents
2421 if not self.roots:
2422 self.roots.update(realparents(self.startrev))
2423 if rev in self.roots:
2424 self.roots.remove(rev)
2425 self.roots.update(realparents(rev))
2426 return True
2427
2428 return False
2429
2430
2431 def walkchangerevs(repo, revs, makefilematcher, prepare):
2250 def walkchangerevs(repo, revs, makefilematcher, prepare):
2432 '''Iterate over files and the revs in a "windowed" way.
2251 '''Iterate over files and the revs in a "windowed" way.
2433
2252
2434 Callers most commonly need to iterate backwards over the history
2253 Callers most commonly need to iterate backwards over the history
2435 in which they are interested. Doing so has awful (quadratic-looking)
2254 in which they are interested. Doing so has awful (quadratic-looking)
2436 performance, so we use iterators in a "windowed" way.
2255 performance, so we use iterators in a "windowed" way.
2437
2256
2438 We walk a window of revisions in the desired order. Within the
2257 We walk a window of revisions in the desired order. Within the
2439 window, we first walk forwards to gather data, then in the desired
2258 window, we first walk forwards to gather data, then in the desired
2440 order (usually backwards) to display it.
2259 order (usually backwards) to display it.
2441
2260
2442 This function returns an iterator yielding contexts. Before
2261 This function returns an iterator yielding contexts. Before
2443 yielding each context, the iterator will first call the prepare
2262 yielding each context, the iterator will first call the prepare
2444 function on each context in the window in forward order.'''
2263 function on each context in the window in forward order.'''
2445
2264
2446 if not revs:
2265 if not revs:
2447 return []
2266 return []
2448 change = repo.__getitem__
2267 change = repo.__getitem__
2449
2268
2450 def iterate():
2269 def iterate():
2451 it = iter(revs)
2270 it = iter(revs)
2452 stopiteration = False
2271 stopiteration = False
2453 for windowsize in increasingwindows():
2272 for windowsize in increasingwindows():
2454 nrevs = []
2273 nrevs = []
2455 for i in pycompat.xrange(windowsize):
2274 for i in pycompat.xrange(windowsize):
2456 rev = next(it, None)
2275 rev = next(it, None)
2457 if rev is None:
2276 if rev is None:
2458 stopiteration = True
2277 stopiteration = True
2459 break
2278 break
2460 nrevs.append(rev)
2279 nrevs.append(rev)
2461 for rev in sorted(nrevs):
2280 for rev in sorted(nrevs):
2462 ctx = change(rev)
2281 ctx = change(rev)
2463 prepare(ctx, makefilematcher(ctx))
2282 prepare(ctx, makefilematcher(ctx))
2464 for rev in nrevs:
2283 for rev in nrevs:
2465 yield change(rev)
2284 yield change(rev)
2466
2285
2467 if stopiteration:
2286 if stopiteration:
2468 break
2287 break
2469
2288
2470 return iterate()
2289 return iterate()
2471
2290
2472
2291
2473 def add(ui, repo, match, prefix, uipathfn, explicitonly, **opts):
2292 def add(ui, repo, match, prefix, uipathfn, explicitonly, **opts):
2474 bad = []
2293 bad = []
2475
2294
2476 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2295 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2477 names = []
2296 names = []
2478 wctx = repo[None]
2297 wctx = repo[None]
2479 cca = None
2298 cca = None
2480 abort, warn = scmutil.checkportabilityalert(ui)
2299 abort, warn = scmutil.checkportabilityalert(ui)
2481 if abort or warn:
2300 if abort or warn:
2482 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2301 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2483
2302
2484 match = repo.narrowmatch(match, includeexact=True)
2303 match = repo.narrowmatch(match, includeexact=True)
2485 badmatch = matchmod.badmatch(match, badfn)
2304 badmatch = matchmod.badmatch(match, badfn)
2486 dirstate = repo.dirstate
2305 dirstate = repo.dirstate
2487 # We don't want to just call wctx.walk here, since it would return a lot of
2306 # We don't want to just call wctx.walk here, since it would return a lot of
2488 # clean files, which we aren't interested in and takes time.
2307 # clean files, which we aren't interested in and takes time.
2489 for f in sorted(
2308 for f in sorted(
2490 dirstate.walk(
2309 dirstate.walk(
2491 badmatch,
2310 badmatch,
2492 subrepos=sorted(wctx.substate),
2311 subrepos=sorted(wctx.substate),
2493 unknown=True,
2312 unknown=True,
2494 ignored=False,
2313 ignored=False,
2495 full=False,
2314 full=False,
2496 )
2315 )
2497 ):
2316 ):
2498 exact = match.exact(f)
2317 exact = match.exact(f)
2499 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2318 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2500 if cca:
2319 if cca:
2501 cca(f)
2320 cca(f)
2502 names.append(f)
2321 names.append(f)
2503 if ui.verbose or not exact:
2322 if ui.verbose or not exact:
2504 ui.status(
2323 ui.status(
2505 _(b'adding %s\n') % uipathfn(f), label=b'ui.addremove.added'
2324 _(b'adding %s\n') % uipathfn(f), label=b'ui.addremove.added'
2506 )
2325 )
2507
2326
2508 for subpath in sorted(wctx.substate):
2327 for subpath in sorted(wctx.substate):
2509 sub = wctx.sub(subpath)
2328 sub = wctx.sub(subpath)
2510 try:
2329 try:
2511 submatch = matchmod.subdirmatcher(subpath, match)
2330 submatch = matchmod.subdirmatcher(subpath, match)
2512 subprefix = repo.wvfs.reljoin(prefix, subpath)
2331 subprefix = repo.wvfs.reljoin(prefix, subpath)
2513 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2332 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2514 if opts.get('subrepos'):
2333 if opts.get('subrepos'):
2515 bad.extend(
2334 bad.extend(
2516 sub.add(ui, submatch, subprefix, subuipathfn, False, **opts)
2335 sub.add(ui, submatch, subprefix, subuipathfn, False, **opts)
2517 )
2336 )
2518 else:
2337 else:
2519 bad.extend(
2338 bad.extend(
2520 sub.add(ui, submatch, subprefix, subuipathfn, True, **opts)
2339 sub.add(ui, submatch, subprefix, subuipathfn, True, **opts)
2521 )
2340 )
2522 except error.LookupError:
2341 except error.LookupError:
2523 ui.status(
2342 ui.status(
2524 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2343 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2525 )
2344 )
2526
2345
2527 if not opts.get('dry_run'):
2346 if not opts.get('dry_run'):
2528 rejected = wctx.add(names, prefix)
2347 rejected = wctx.add(names, prefix)
2529 bad.extend(f for f in rejected if f in match.files())
2348 bad.extend(f for f in rejected if f in match.files())
2530 return bad
2349 return bad
2531
2350
2532
2351
2533 def addwebdirpath(repo, serverpath, webconf):
2352 def addwebdirpath(repo, serverpath, webconf):
2534 webconf[serverpath] = repo.root
2353 webconf[serverpath] = repo.root
2535 repo.ui.debug(b'adding %s = %s\n' % (serverpath, repo.root))
2354 repo.ui.debug(b'adding %s = %s\n' % (serverpath, repo.root))
2536
2355
2537 for r in repo.revs(b'filelog("path:.hgsub")'):
2356 for r in repo.revs(b'filelog("path:.hgsub")'):
2538 ctx = repo[r]
2357 ctx = repo[r]
2539 for subpath in ctx.substate:
2358 for subpath in ctx.substate:
2540 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2359 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2541
2360
2542
2361
2543 def forget(
2362 def forget(
2544 ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
2363 ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
2545 ):
2364 ):
2546 if dryrun and interactive:
2365 if dryrun and interactive:
2547 raise error.Abort(_(b"cannot specify both --dry-run and --interactive"))
2366 raise error.Abort(_(b"cannot specify both --dry-run and --interactive"))
2548 bad = []
2367 bad = []
2549 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2368 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2550 wctx = repo[None]
2369 wctx = repo[None]
2551 forgot = []
2370 forgot = []
2552
2371
2553 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2372 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2554 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2373 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2555 if explicitonly:
2374 if explicitonly:
2556 forget = [f for f in forget if match.exact(f)]
2375 forget = [f for f in forget if match.exact(f)]
2557
2376
2558 for subpath in sorted(wctx.substate):
2377 for subpath in sorted(wctx.substate):
2559 sub = wctx.sub(subpath)
2378 sub = wctx.sub(subpath)
2560 submatch = matchmod.subdirmatcher(subpath, match)
2379 submatch = matchmod.subdirmatcher(subpath, match)
2561 subprefix = repo.wvfs.reljoin(prefix, subpath)
2380 subprefix = repo.wvfs.reljoin(prefix, subpath)
2562 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2381 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2563 try:
2382 try:
2564 subbad, subforgot = sub.forget(
2383 subbad, subforgot = sub.forget(
2565 submatch,
2384 submatch,
2566 subprefix,
2385 subprefix,
2567 subuipathfn,
2386 subuipathfn,
2568 dryrun=dryrun,
2387 dryrun=dryrun,
2569 interactive=interactive,
2388 interactive=interactive,
2570 )
2389 )
2571 bad.extend([subpath + b'/' + f for f in subbad])
2390 bad.extend([subpath + b'/' + f for f in subbad])
2572 forgot.extend([subpath + b'/' + f for f in subforgot])
2391 forgot.extend([subpath + b'/' + f for f in subforgot])
2573 except error.LookupError:
2392 except error.LookupError:
2574 ui.status(
2393 ui.status(
2575 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2394 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2576 )
2395 )
2577
2396
2578 if not explicitonly:
2397 if not explicitonly:
2579 for f in match.files():
2398 for f in match.files():
2580 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2399 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2581 if f not in forgot:
2400 if f not in forgot:
2582 if repo.wvfs.exists(f):
2401 if repo.wvfs.exists(f):
2583 # Don't complain if the exact case match wasn't given.
2402 # Don't complain if the exact case match wasn't given.
2584 # But don't do this until after checking 'forgot', so
2403 # But don't do this until after checking 'forgot', so
2585 # that subrepo files aren't normalized, and this op is
2404 # that subrepo files aren't normalized, and this op is
2586 # purely from data cached by the status walk above.
2405 # purely from data cached by the status walk above.
2587 if repo.dirstate.normalize(f) in repo.dirstate:
2406 if repo.dirstate.normalize(f) in repo.dirstate:
2588 continue
2407 continue
2589 ui.warn(
2408 ui.warn(
2590 _(
2409 _(
2591 b'not removing %s: '
2410 b'not removing %s: '
2592 b'file is already untracked\n'
2411 b'file is already untracked\n'
2593 )
2412 )
2594 % uipathfn(f)
2413 % uipathfn(f)
2595 )
2414 )
2596 bad.append(f)
2415 bad.append(f)
2597
2416
2598 if interactive:
2417 if interactive:
2599 responses = _(
2418 responses = _(
2600 b'[Ynsa?]'
2419 b'[Ynsa?]'
2601 b'$$ &Yes, forget this file'
2420 b'$$ &Yes, forget this file'
2602 b'$$ &No, skip this file'
2421 b'$$ &No, skip this file'
2603 b'$$ &Skip remaining files'
2422 b'$$ &Skip remaining files'
2604 b'$$ Include &all remaining files'
2423 b'$$ Include &all remaining files'
2605 b'$$ &? (display help)'
2424 b'$$ &? (display help)'
2606 )
2425 )
2607 for filename in forget[:]:
2426 for filename in forget[:]:
2608 r = ui.promptchoice(
2427 r = ui.promptchoice(
2609 _(b'forget %s %s') % (uipathfn(filename), responses)
2428 _(b'forget %s %s') % (uipathfn(filename), responses)
2610 )
2429 )
2611 if r == 4: # ?
2430 if r == 4: # ?
2612 while r == 4:
2431 while r == 4:
2613 for c, t in ui.extractchoices(responses)[1]:
2432 for c, t in ui.extractchoices(responses)[1]:
2614 ui.write(b'%s - %s\n' % (c, encoding.lower(t)))
2433 ui.write(b'%s - %s\n' % (c, encoding.lower(t)))
2615 r = ui.promptchoice(
2434 r = ui.promptchoice(
2616 _(b'forget %s %s') % (uipathfn(filename), responses)
2435 _(b'forget %s %s') % (uipathfn(filename), responses)
2617 )
2436 )
2618 if r == 0: # yes
2437 if r == 0: # yes
2619 continue
2438 continue
2620 elif r == 1: # no
2439 elif r == 1: # no
2621 forget.remove(filename)
2440 forget.remove(filename)
2622 elif r == 2: # Skip
2441 elif r == 2: # Skip
2623 fnindex = forget.index(filename)
2442 fnindex = forget.index(filename)
2624 del forget[fnindex:]
2443 del forget[fnindex:]
2625 break
2444 break
2626 elif r == 3: # All
2445 elif r == 3: # All
2627 break
2446 break
2628
2447
2629 for f in forget:
2448 for f in forget:
2630 if ui.verbose or not match.exact(f) or interactive:
2449 if ui.verbose or not match.exact(f) or interactive:
2631 ui.status(
2450 ui.status(
2632 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2451 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2633 )
2452 )
2634
2453
2635 if not dryrun:
2454 if not dryrun:
2636 rejected = wctx.forget(forget, prefix)
2455 rejected = wctx.forget(forget, prefix)
2637 bad.extend(f for f in rejected if f in match.files())
2456 bad.extend(f for f in rejected if f in match.files())
2638 forgot.extend(f for f in forget if f not in rejected)
2457 forgot.extend(f for f in forget if f not in rejected)
2639 return bad, forgot
2458 return bad, forgot
2640
2459
2641
2460
2642 def files(ui, ctx, m, uipathfn, fm, fmt, subrepos):
2461 def files(ui, ctx, m, uipathfn, fm, fmt, subrepos):
2643 ret = 1
2462 ret = 1
2644
2463
2645 needsfctx = ui.verbose or {b'size', b'flags'} & fm.datahint()
2464 needsfctx = ui.verbose or {b'size', b'flags'} & fm.datahint()
2646 if fm.isplain() and not needsfctx:
2465 if fm.isplain() and not needsfctx:
2647 # Fast path. The speed-up comes from skipping the formatter, and batching
2466 # Fast path. The speed-up comes from skipping the formatter, and batching
2648 # calls to ui.write.
2467 # calls to ui.write.
2649 buf = []
2468 buf = []
2650 for f in ctx.matches(m):
2469 for f in ctx.matches(m):
2651 buf.append(fmt % uipathfn(f))
2470 buf.append(fmt % uipathfn(f))
2652 if len(buf) > 100:
2471 if len(buf) > 100:
2653 ui.write(b''.join(buf))
2472 ui.write(b''.join(buf))
2654 del buf[:]
2473 del buf[:]
2655 ret = 0
2474 ret = 0
2656 if buf:
2475 if buf:
2657 ui.write(b''.join(buf))
2476 ui.write(b''.join(buf))
2658 else:
2477 else:
2659 for f in ctx.matches(m):
2478 for f in ctx.matches(m):
2660 fm.startitem()
2479 fm.startitem()
2661 fm.context(ctx=ctx)
2480 fm.context(ctx=ctx)
2662 if needsfctx:
2481 if needsfctx:
2663 fc = ctx[f]
2482 fc = ctx[f]
2664 fm.write(b'size flags', b'% 10d % 1s ', fc.size(), fc.flags())
2483 fm.write(b'size flags', b'% 10d % 1s ', fc.size(), fc.flags())
2665 fm.data(path=f)
2484 fm.data(path=f)
2666 fm.plain(fmt % uipathfn(f))
2485 fm.plain(fmt % uipathfn(f))
2667 ret = 0
2486 ret = 0
2668
2487
2669 for subpath in sorted(ctx.substate):
2488 for subpath in sorted(ctx.substate):
2670 submatch = matchmod.subdirmatcher(subpath, m)
2489 submatch = matchmod.subdirmatcher(subpath, m)
2671 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2490 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2672 if subrepos or m.exact(subpath) or any(submatch.files()):
2491 if subrepos or m.exact(subpath) or any(submatch.files()):
2673 sub = ctx.sub(subpath)
2492 sub = ctx.sub(subpath)
2674 try:
2493 try:
2675 recurse = m.exact(subpath) or subrepos
2494 recurse = m.exact(subpath) or subrepos
2676 if (
2495 if (
2677 sub.printfiles(ui, submatch, subuipathfn, fm, fmt, recurse)
2496 sub.printfiles(ui, submatch, subuipathfn, fm, fmt, recurse)
2678 == 0
2497 == 0
2679 ):
2498 ):
2680 ret = 0
2499 ret = 0
2681 except error.LookupError:
2500 except error.LookupError:
2682 ui.status(
2501 ui.status(
2683 _(b"skipping missing subrepository: %s\n")
2502 _(b"skipping missing subrepository: %s\n")
2684 % uipathfn(subpath)
2503 % uipathfn(subpath)
2685 )
2504 )
2686
2505
2687 return ret
2506 return ret
2688
2507
2689
2508
2690 def remove(
2509 def remove(
2691 ui, repo, m, prefix, uipathfn, after, force, subrepos, dryrun, warnings=None
2510 ui, repo, m, prefix, uipathfn, after, force, subrepos, dryrun, warnings=None
2692 ):
2511 ):
2693 ret = 0
2512 ret = 0
2694 s = repo.status(match=m, clean=True)
2513 s = repo.status(match=m, clean=True)
2695 modified, added, deleted, clean = s.modified, s.added, s.deleted, s.clean
2514 modified, added, deleted, clean = s.modified, s.added, s.deleted, s.clean
2696
2515
2697 wctx = repo[None]
2516 wctx = repo[None]
2698
2517
2699 if warnings is None:
2518 if warnings is None:
2700 warnings = []
2519 warnings = []
2701 warn = True
2520 warn = True
2702 else:
2521 else:
2703 warn = False
2522 warn = False
2704
2523
2705 subs = sorted(wctx.substate)
2524 subs = sorted(wctx.substate)
2706 progress = ui.makeprogress(
2525 progress = ui.makeprogress(
2707 _(b'searching'), total=len(subs), unit=_(b'subrepos')
2526 _(b'searching'), total=len(subs), unit=_(b'subrepos')
2708 )
2527 )
2709 for subpath in subs:
2528 for subpath in subs:
2710 submatch = matchmod.subdirmatcher(subpath, m)
2529 submatch = matchmod.subdirmatcher(subpath, m)
2711 subprefix = repo.wvfs.reljoin(prefix, subpath)
2530 subprefix = repo.wvfs.reljoin(prefix, subpath)
2712 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2531 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2713 if subrepos or m.exact(subpath) or any(submatch.files()):
2532 if subrepos or m.exact(subpath) or any(submatch.files()):
2714 progress.increment()
2533 progress.increment()
2715 sub = wctx.sub(subpath)
2534 sub = wctx.sub(subpath)
2716 try:
2535 try:
2717 if sub.removefiles(
2536 if sub.removefiles(
2718 submatch,
2537 submatch,
2719 subprefix,
2538 subprefix,
2720 subuipathfn,
2539 subuipathfn,
2721 after,
2540 after,
2722 force,
2541 force,
2723 subrepos,
2542 subrepos,
2724 dryrun,
2543 dryrun,
2725 warnings,
2544 warnings,
2726 ):
2545 ):
2727 ret = 1
2546 ret = 1
2728 except error.LookupError:
2547 except error.LookupError:
2729 warnings.append(
2548 warnings.append(
2730 _(b"skipping missing subrepository: %s\n")
2549 _(b"skipping missing subrepository: %s\n")
2731 % uipathfn(subpath)
2550 % uipathfn(subpath)
2732 )
2551 )
2733 progress.complete()
2552 progress.complete()
2734
2553
2735 # warn about failure to delete explicit files/dirs
2554 # warn about failure to delete explicit files/dirs
2736 deleteddirs = pathutil.dirs(deleted)
2555 deleteddirs = pathutil.dirs(deleted)
2737 files = m.files()
2556 files = m.files()
2738 progress = ui.makeprogress(
2557 progress = ui.makeprogress(
2739 _(b'deleting'), total=len(files), unit=_(b'files')
2558 _(b'deleting'), total=len(files), unit=_(b'files')
2740 )
2559 )
2741 for f in files:
2560 for f in files:
2742
2561
2743 def insubrepo():
2562 def insubrepo():
2744 for subpath in wctx.substate:
2563 for subpath in wctx.substate:
2745 if f.startswith(subpath + b'/'):
2564 if f.startswith(subpath + b'/'):
2746 return True
2565 return True
2747 return False
2566 return False
2748
2567
2749 progress.increment()
2568 progress.increment()
2750 isdir = f in deleteddirs or wctx.hasdir(f)
2569 isdir = f in deleteddirs or wctx.hasdir(f)
2751 if f in repo.dirstate or isdir or f == b'.' or insubrepo() or f in subs:
2570 if f in repo.dirstate or isdir or f == b'.' or insubrepo() or f in subs:
2752 continue
2571 continue
2753
2572
2754 if repo.wvfs.exists(f):
2573 if repo.wvfs.exists(f):
2755 if repo.wvfs.isdir(f):
2574 if repo.wvfs.isdir(f):
2756 warnings.append(
2575 warnings.append(
2757 _(b'not removing %s: no tracked files\n') % uipathfn(f)
2576 _(b'not removing %s: no tracked files\n') % uipathfn(f)
2758 )
2577 )
2759 else:
2578 else:
2760 warnings.append(
2579 warnings.append(
2761 _(b'not removing %s: file is untracked\n') % uipathfn(f)
2580 _(b'not removing %s: file is untracked\n') % uipathfn(f)
2762 )
2581 )
2763 # missing files will generate a warning elsewhere
2582 # missing files will generate a warning elsewhere
2764 ret = 1
2583 ret = 1
2765 progress.complete()
2584 progress.complete()
2766
2585
2767 if force:
2586 if force:
2768 list = modified + deleted + clean + added
2587 list = modified + deleted + clean + added
2769 elif after:
2588 elif after:
2770 list = deleted
2589 list = deleted
2771 remaining = modified + added + clean
2590 remaining = modified + added + clean
2772 progress = ui.makeprogress(
2591 progress = ui.makeprogress(
2773 _(b'skipping'), total=len(remaining), unit=_(b'files')
2592 _(b'skipping'), total=len(remaining), unit=_(b'files')
2774 )
2593 )
2775 for f in remaining:
2594 for f in remaining:
2776 progress.increment()
2595 progress.increment()
2777 if ui.verbose or (f in files):
2596 if ui.verbose or (f in files):
2778 warnings.append(
2597 warnings.append(
2779 _(b'not removing %s: file still exists\n') % uipathfn(f)
2598 _(b'not removing %s: file still exists\n') % uipathfn(f)
2780 )
2599 )
2781 ret = 1
2600 ret = 1
2782 progress.complete()
2601 progress.complete()
2783 else:
2602 else:
2784 list = deleted + clean
2603 list = deleted + clean
2785 progress = ui.makeprogress(
2604 progress = ui.makeprogress(
2786 _(b'skipping'), total=(len(modified) + len(added)), unit=_(b'files')
2605 _(b'skipping'), total=(len(modified) + len(added)), unit=_(b'files')
2787 )
2606 )
2788 for f in modified:
2607 for f in modified:
2789 progress.increment()
2608 progress.increment()
2790 warnings.append(
2609 warnings.append(
2791 _(
2610 _(
2792 b'not removing %s: file is modified (use -f'
2611 b'not removing %s: file is modified (use -f'
2793 b' to force removal)\n'
2612 b' to force removal)\n'
2794 )
2613 )
2795 % uipathfn(f)
2614 % uipathfn(f)
2796 )
2615 )
2797 ret = 1
2616 ret = 1
2798 for f in added:
2617 for f in added:
2799 progress.increment()
2618 progress.increment()
2800 warnings.append(
2619 warnings.append(
2801 _(
2620 _(
2802 b"not removing %s: file has been marked for add"
2621 b"not removing %s: file has been marked for add"
2803 b" (use 'hg forget' to undo add)\n"
2622 b" (use 'hg forget' to undo add)\n"
2804 )
2623 )
2805 % uipathfn(f)
2624 % uipathfn(f)
2806 )
2625 )
2807 ret = 1
2626 ret = 1
2808 progress.complete()
2627 progress.complete()
2809
2628
2810 list = sorted(list)
2629 list = sorted(list)
2811 progress = ui.makeprogress(
2630 progress = ui.makeprogress(
2812 _(b'deleting'), total=len(list), unit=_(b'files')
2631 _(b'deleting'), total=len(list), unit=_(b'files')
2813 )
2632 )
2814 for f in list:
2633 for f in list:
2815 if ui.verbose or not m.exact(f):
2634 if ui.verbose or not m.exact(f):
2816 progress.increment()
2635 progress.increment()
2817 ui.status(
2636 ui.status(
2818 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2637 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2819 )
2638 )
2820 progress.complete()
2639 progress.complete()
2821
2640
2822 if not dryrun:
2641 if not dryrun:
2823 with repo.wlock():
2642 with repo.wlock():
2824 if not after:
2643 if not after:
2825 for f in list:
2644 for f in list:
2826 if f in added:
2645 if f in added:
2827 continue # we never unlink added files on remove
2646 continue # we never unlink added files on remove
2828 rmdir = repo.ui.configbool(
2647 rmdir = repo.ui.configbool(
2829 b'experimental', b'removeemptydirs'
2648 b'experimental', b'removeemptydirs'
2830 )
2649 )
2831 repo.wvfs.unlinkpath(f, ignoremissing=True, rmdir=rmdir)
2650 repo.wvfs.unlinkpath(f, ignoremissing=True, rmdir=rmdir)
2832 repo[None].forget(list)
2651 repo[None].forget(list)
2833
2652
2834 if warn:
2653 if warn:
2835 for warning in warnings:
2654 for warning in warnings:
2836 ui.warn(warning)
2655 ui.warn(warning)
2837
2656
2838 return ret
2657 return ret
2839
2658
2840
2659
2841 def _catfmtneedsdata(fm):
2660 def _catfmtneedsdata(fm):
2842 return not fm.datahint() or b'data' in fm.datahint()
2661 return not fm.datahint() or b'data' in fm.datahint()
2843
2662
2844
2663
2845 def _updatecatformatter(fm, ctx, matcher, path, decode):
2664 def _updatecatformatter(fm, ctx, matcher, path, decode):
2846 """Hook for adding data to the formatter used by ``hg cat``.
2665 """Hook for adding data to the formatter used by ``hg cat``.
2847
2666
2848 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2667 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2849 this method first."""
2668 this method first."""
2850
2669
2851 # data() can be expensive to fetch (e.g. lfs), so don't fetch it if it
2670 # data() can be expensive to fetch (e.g. lfs), so don't fetch it if it
2852 # wasn't requested.
2671 # wasn't requested.
2853 data = b''
2672 data = b''
2854 if _catfmtneedsdata(fm):
2673 if _catfmtneedsdata(fm):
2855 data = ctx[path].data()
2674 data = ctx[path].data()
2856 if decode:
2675 if decode:
2857 data = ctx.repo().wwritedata(path, data)
2676 data = ctx.repo().wwritedata(path, data)
2858 fm.startitem()
2677 fm.startitem()
2859 fm.context(ctx=ctx)
2678 fm.context(ctx=ctx)
2860 fm.write(b'data', b'%s', data)
2679 fm.write(b'data', b'%s', data)
2861 fm.data(path=path)
2680 fm.data(path=path)
2862
2681
2863
2682
2864 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2683 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2865 err = 1
2684 err = 1
2866 opts = pycompat.byteskwargs(opts)
2685 opts = pycompat.byteskwargs(opts)
2867
2686
2868 def write(path):
2687 def write(path):
2869 filename = None
2688 filename = None
2870 if fntemplate:
2689 if fntemplate:
2871 filename = makefilename(
2690 filename = makefilename(
2872 ctx, fntemplate, pathname=os.path.join(prefix, path)
2691 ctx, fntemplate, pathname=os.path.join(prefix, path)
2873 )
2692 )
2874 # attempt to create the directory if it does not already exist
2693 # attempt to create the directory if it does not already exist
2875 try:
2694 try:
2876 os.makedirs(os.path.dirname(filename))
2695 os.makedirs(os.path.dirname(filename))
2877 except OSError:
2696 except OSError:
2878 pass
2697 pass
2879 with formatter.maybereopen(basefm, filename) as fm:
2698 with formatter.maybereopen(basefm, filename) as fm:
2880 _updatecatformatter(fm, ctx, matcher, path, opts.get(b'decode'))
2699 _updatecatformatter(fm, ctx, matcher, path, opts.get(b'decode'))
2881
2700
2882 # Automation often uses hg cat on single files, so special case it
2701 # Automation often uses hg cat on single files, so special case it
2883 # for performance to avoid the cost of parsing the manifest.
2702 # for performance to avoid the cost of parsing the manifest.
2884 if len(matcher.files()) == 1 and not matcher.anypats():
2703 if len(matcher.files()) == 1 and not matcher.anypats():
2885 file = matcher.files()[0]
2704 file = matcher.files()[0]
2886 mfl = repo.manifestlog
2705 mfl = repo.manifestlog
2887 mfnode = ctx.manifestnode()
2706 mfnode = ctx.manifestnode()
2888 try:
2707 try:
2889 if mfnode and mfl[mfnode].find(file)[0]:
2708 if mfnode and mfl[mfnode].find(file)[0]:
2890 if _catfmtneedsdata(basefm):
2709 if _catfmtneedsdata(basefm):
2891 scmutil.prefetchfiles(repo, [(ctx.rev(), matcher)])
2710 scmutil.prefetchfiles(repo, [(ctx.rev(), matcher)])
2892 write(file)
2711 write(file)
2893 return 0
2712 return 0
2894 except KeyError:
2713 except KeyError:
2895 pass
2714 pass
2896
2715
2897 if _catfmtneedsdata(basefm):
2716 if _catfmtneedsdata(basefm):
2898 scmutil.prefetchfiles(repo, [(ctx.rev(), matcher)])
2717 scmutil.prefetchfiles(repo, [(ctx.rev(), matcher)])
2899
2718
2900 for abs in ctx.walk(matcher):
2719 for abs in ctx.walk(matcher):
2901 write(abs)
2720 write(abs)
2902 err = 0
2721 err = 0
2903
2722
2904 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
2723 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
2905 for subpath in sorted(ctx.substate):
2724 for subpath in sorted(ctx.substate):
2906 sub = ctx.sub(subpath)
2725 sub = ctx.sub(subpath)
2907 try:
2726 try:
2908 submatch = matchmod.subdirmatcher(subpath, matcher)
2727 submatch = matchmod.subdirmatcher(subpath, matcher)
2909 subprefix = os.path.join(prefix, subpath)
2728 subprefix = os.path.join(prefix, subpath)
2910 if not sub.cat(
2729 if not sub.cat(
2911 submatch,
2730 submatch,
2912 basefm,
2731 basefm,
2913 fntemplate,
2732 fntemplate,
2914 subprefix,
2733 subprefix,
2915 **pycompat.strkwargs(opts)
2734 **pycompat.strkwargs(opts)
2916 ):
2735 ):
2917 err = 0
2736 err = 0
2918 except error.RepoLookupError:
2737 except error.RepoLookupError:
2919 ui.status(
2738 ui.status(
2920 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2739 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2921 )
2740 )
2922
2741
2923 return err
2742 return err
2924
2743
2925
2744
2926 def commit(ui, repo, commitfunc, pats, opts):
2745 def commit(ui, repo, commitfunc, pats, opts):
2927 '''commit the specified files or all outstanding changes'''
2746 '''commit the specified files or all outstanding changes'''
2928 date = opts.get(b'date')
2747 date = opts.get(b'date')
2929 if date:
2748 if date:
2930 opts[b'date'] = dateutil.parsedate(date)
2749 opts[b'date'] = dateutil.parsedate(date)
2931 message = logmessage(ui, opts)
2750 message = logmessage(ui, opts)
2932 matcher = scmutil.match(repo[None], pats, opts)
2751 matcher = scmutil.match(repo[None], pats, opts)
2933
2752
2934 dsguard = None
2753 dsguard = None
2935 # extract addremove carefully -- this function can be called from a command
2754 # extract addremove carefully -- this function can be called from a command
2936 # that doesn't support addremove
2755 # that doesn't support addremove
2937 if opts.get(b'addremove'):
2756 if opts.get(b'addremove'):
2938 dsguard = dirstateguard.dirstateguard(repo, b'commit')
2757 dsguard = dirstateguard.dirstateguard(repo, b'commit')
2939 with dsguard or util.nullcontextmanager():
2758 with dsguard or util.nullcontextmanager():
2940 if dsguard:
2759 if dsguard:
2941 relative = scmutil.anypats(pats, opts)
2760 relative = scmutil.anypats(pats, opts)
2942 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
2761 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
2943 if scmutil.addremove(repo, matcher, b"", uipathfn, opts) != 0:
2762 if scmutil.addremove(repo, matcher, b"", uipathfn, opts) != 0:
2944 raise error.Abort(
2763 raise error.Abort(
2945 _(b"failed to mark all new/missing files as added/removed")
2764 _(b"failed to mark all new/missing files as added/removed")
2946 )
2765 )
2947
2766
2948 return commitfunc(ui, repo, message, matcher, opts)
2767 return commitfunc(ui, repo, message, matcher, opts)
2949
2768
2950
2769
2951 def samefile(f, ctx1, ctx2):
2770 def samefile(f, ctx1, ctx2):
2952 if f in ctx1.manifest():
2771 if f in ctx1.manifest():
2953 a = ctx1.filectx(f)
2772 a = ctx1.filectx(f)
2954 if f in ctx2.manifest():
2773 if f in ctx2.manifest():
2955 b = ctx2.filectx(f)
2774 b = ctx2.filectx(f)
2956 return not a.cmp(b) and a.flags() == b.flags()
2775 return not a.cmp(b) and a.flags() == b.flags()
2957 else:
2776 else:
2958 return False
2777 return False
2959 else:
2778 else:
2960 return f not in ctx2.manifest()
2779 return f not in ctx2.manifest()
2961
2780
2962
2781
2963 def amend(ui, repo, old, extra, pats, opts):
2782 def amend(ui, repo, old, extra, pats, opts):
2964 # avoid cycle context -> subrepo -> cmdutil
2783 # avoid cycle context -> subrepo -> cmdutil
2965 from . import context
2784 from . import context
2966
2785
2967 # amend will reuse the existing user if not specified, but the obsolete
2786 # amend will reuse the existing user if not specified, but the obsolete
2968 # marker creation requires that the current user's name is specified.
2787 # marker creation requires that the current user's name is specified.
2969 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2788 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2970 ui.username() # raise exception if username not set
2789 ui.username() # raise exception if username not set
2971
2790
2972 ui.note(_(b'amending changeset %s\n') % old)
2791 ui.note(_(b'amending changeset %s\n') % old)
2973 base = old.p1()
2792 base = old.p1()
2974
2793
2975 with repo.wlock(), repo.lock(), repo.transaction(b'amend'):
2794 with repo.wlock(), repo.lock(), repo.transaction(b'amend'):
2976 # Participating changesets:
2795 # Participating changesets:
2977 #
2796 #
2978 # wctx o - workingctx that contains changes from working copy
2797 # wctx o - workingctx that contains changes from working copy
2979 # | to go into amending commit
2798 # | to go into amending commit
2980 # |
2799 # |
2981 # old o - changeset to amend
2800 # old o - changeset to amend
2982 # |
2801 # |
2983 # base o - first parent of the changeset to amend
2802 # base o - first parent of the changeset to amend
2984 wctx = repo[None]
2803 wctx = repo[None]
2985
2804
2986 # Copy to avoid mutating input
2805 # Copy to avoid mutating input
2987 extra = extra.copy()
2806 extra = extra.copy()
2988 # Update extra dict from amended commit (e.g. to preserve graft
2807 # Update extra dict from amended commit (e.g. to preserve graft
2989 # source)
2808 # source)
2990 extra.update(old.extra())
2809 extra.update(old.extra())
2991
2810
2992 # Also update it from the from the wctx
2811 # Also update it from the from the wctx
2993 extra.update(wctx.extra())
2812 extra.update(wctx.extra())
2994
2813
2995 # date-only change should be ignored?
2814 # date-only change should be ignored?
2996 datemaydiffer = resolvecommitoptions(ui, opts)
2815 datemaydiffer = resolvecommitoptions(ui, opts)
2997
2816
2998 date = old.date()
2817 date = old.date()
2999 if opts.get(b'date'):
2818 if opts.get(b'date'):
3000 date = dateutil.parsedate(opts.get(b'date'))
2819 date = dateutil.parsedate(opts.get(b'date'))
3001 user = opts.get(b'user') or old.user()
2820 user = opts.get(b'user') or old.user()
3002
2821
3003 if len(old.parents()) > 1:
2822 if len(old.parents()) > 1:
3004 # ctx.files() isn't reliable for merges, so fall back to the
2823 # ctx.files() isn't reliable for merges, so fall back to the
3005 # slower repo.status() method
2824 # slower repo.status() method
3006 st = base.status(old)
2825 st = base.status(old)
3007 files = set(st.modified) | set(st.added) | set(st.removed)
2826 files = set(st.modified) | set(st.added) | set(st.removed)
3008 else:
2827 else:
3009 files = set(old.files())
2828 files = set(old.files())
3010
2829
3011 # add/remove the files to the working copy if the "addremove" option
2830 # add/remove the files to the working copy if the "addremove" option
3012 # was specified.
2831 # was specified.
3013 matcher = scmutil.match(wctx, pats, opts)
2832 matcher = scmutil.match(wctx, pats, opts)
3014 relative = scmutil.anypats(pats, opts)
2833 relative = scmutil.anypats(pats, opts)
3015 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
2834 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
3016 if opts.get(b'addremove') and scmutil.addremove(
2835 if opts.get(b'addremove') and scmutil.addremove(
3017 repo, matcher, b"", uipathfn, opts
2836 repo, matcher, b"", uipathfn, opts
3018 ):
2837 ):
3019 raise error.Abort(
2838 raise error.Abort(
3020 _(b"failed to mark all new/missing files as added/removed")
2839 _(b"failed to mark all new/missing files as added/removed")
3021 )
2840 )
3022
2841
3023 # Check subrepos. This depends on in-place wctx._status update in
2842 # Check subrepos. This depends on in-place wctx._status update in
3024 # subrepo.precommit(). To minimize the risk of this hack, we do
2843 # subrepo.precommit(). To minimize the risk of this hack, we do
3025 # nothing if .hgsub does not exist.
2844 # nothing if .hgsub does not exist.
3026 if b'.hgsub' in wctx or b'.hgsub' in old:
2845 if b'.hgsub' in wctx or b'.hgsub' in old:
3027 subs, commitsubs, newsubstate = subrepoutil.precommit(
2846 subs, commitsubs, newsubstate = subrepoutil.precommit(
3028 ui, wctx, wctx._status, matcher
2847 ui, wctx, wctx._status, matcher
3029 )
2848 )
3030 # amend should abort if commitsubrepos is enabled
2849 # amend should abort if commitsubrepos is enabled
3031 assert not commitsubs
2850 assert not commitsubs
3032 if subs:
2851 if subs:
3033 subrepoutil.writestate(repo, newsubstate)
2852 subrepoutil.writestate(repo, newsubstate)
3034
2853
3035 ms = mergestatemod.mergestate.read(repo)
2854 ms = mergestatemod.mergestate.read(repo)
3036 mergeutil.checkunresolved(ms)
2855 mergeutil.checkunresolved(ms)
3037
2856
3038 filestoamend = {f for f in wctx.files() if matcher(f)}
2857 filestoamend = {f for f in wctx.files() if matcher(f)}
3039
2858
3040 changes = len(filestoamend) > 0
2859 changes = len(filestoamend) > 0
3041 if changes:
2860 if changes:
3042 # Recompute copies (avoid recording a -> b -> a)
2861 # Recompute copies (avoid recording a -> b -> a)
3043 copied = copies.pathcopies(base, wctx, matcher)
2862 copied = copies.pathcopies(base, wctx, matcher)
3044 if old.p2:
2863 if old.p2:
3045 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
2864 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
3046
2865
3047 # Prune files which were reverted by the updates: if old
2866 # Prune files which were reverted by the updates: if old
3048 # introduced file X and the file was renamed in the working
2867 # introduced file X and the file was renamed in the working
3049 # copy, then those two files are the same and
2868 # copy, then those two files are the same and
3050 # we can discard X from our list of files. Likewise if X
2869 # we can discard X from our list of files. Likewise if X
3051 # was removed, it's no longer relevant. If X is missing (aka
2870 # was removed, it's no longer relevant. If X is missing (aka
3052 # deleted), old X must be preserved.
2871 # deleted), old X must be preserved.
3053 files.update(filestoamend)
2872 files.update(filestoamend)
3054 files = [
2873 files = [
3055 f
2874 f
3056 for f in files
2875 for f in files
3057 if (f not in filestoamend or not samefile(f, wctx, base))
2876 if (f not in filestoamend or not samefile(f, wctx, base))
3058 ]
2877 ]
3059
2878
3060 def filectxfn(repo, ctx_, path):
2879 def filectxfn(repo, ctx_, path):
3061 try:
2880 try:
3062 # If the file being considered is not amongst the files
2881 # If the file being considered is not amongst the files
3063 # to be amended, we should return the file context from the
2882 # to be amended, we should return the file context from the
3064 # old changeset. This avoids issues when only some files in
2883 # old changeset. This avoids issues when only some files in
3065 # the working copy are being amended but there are also
2884 # the working copy are being amended but there are also
3066 # changes to other files from the old changeset.
2885 # changes to other files from the old changeset.
3067 if path not in filestoamend:
2886 if path not in filestoamend:
3068 return old.filectx(path)
2887 return old.filectx(path)
3069
2888
3070 # Return None for removed files.
2889 # Return None for removed files.
3071 if path in wctx.removed():
2890 if path in wctx.removed():
3072 return None
2891 return None
3073
2892
3074 fctx = wctx[path]
2893 fctx = wctx[path]
3075 flags = fctx.flags()
2894 flags = fctx.flags()
3076 mctx = context.memfilectx(
2895 mctx = context.memfilectx(
3077 repo,
2896 repo,
3078 ctx_,
2897 ctx_,
3079 fctx.path(),
2898 fctx.path(),
3080 fctx.data(),
2899 fctx.data(),
3081 islink=b'l' in flags,
2900 islink=b'l' in flags,
3082 isexec=b'x' in flags,
2901 isexec=b'x' in flags,
3083 copysource=copied.get(path),
2902 copysource=copied.get(path),
3084 )
2903 )
3085 return mctx
2904 return mctx
3086 except KeyError:
2905 except KeyError:
3087 return None
2906 return None
3088
2907
3089 else:
2908 else:
3090 ui.note(_(b'copying changeset %s to %s\n') % (old, base))
2909 ui.note(_(b'copying changeset %s to %s\n') % (old, base))
3091
2910
3092 # Use version of files as in the old cset
2911 # Use version of files as in the old cset
3093 def filectxfn(repo, ctx_, path):
2912 def filectxfn(repo, ctx_, path):
3094 try:
2913 try:
3095 return old.filectx(path)
2914 return old.filectx(path)
3096 except KeyError:
2915 except KeyError:
3097 return None
2916 return None
3098
2917
3099 # See if we got a message from -m or -l, if not, open the editor with
2918 # See if we got a message from -m or -l, if not, open the editor with
3100 # the message of the changeset to amend.
2919 # the message of the changeset to amend.
3101 message = logmessage(ui, opts)
2920 message = logmessage(ui, opts)
3102
2921
3103 editform = mergeeditform(old, b'commit.amend')
2922 editform = mergeeditform(old, b'commit.amend')
3104
2923
3105 if not message:
2924 if not message:
3106 message = old.description()
2925 message = old.description()
3107 # Default if message isn't provided and --edit is not passed is to
2926 # Default if message isn't provided and --edit is not passed is to
3108 # invoke editor, but allow --no-edit. If somehow we don't have any
2927 # invoke editor, but allow --no-edit. If somehow we don't have any
3109 # description, let's always start the editor.
2928 # description, let's always start the editor.
3110 doedit = not message or opts.get(b'edit') in [True, None]
2929 doedit = not message or opts.get(b'edit') in [True, None]
3111 else:
2930 else:
3112 # Default if message is provided is to not invoke editor, but allow
2931 # Default if message is provided is to not invoke editor, but allow
3113 # --edit.
2932 # --edit.
3114 doedit = opts.get(b'edit') is True
2933 doedit = opts.get(b'edit') is True
3115 editor = getcommiteditor(edit=doedit, editform=editform)
2934 editor = getcommiteditor(edit=doedit, editform=editform)
3116
2935
3117 pureextra = extra.copy()
2936 pureextra = extra.copy()
3118 extra[b'amend_source'] = old.hex()
2937 extra[b'amend_source'] = old.hex()
3119
2938
3120 new = context.memctx(
2939 new = context.memctx(
3121 repo,
2940 repo,
3122 parents=[base.node(), old.p2().node()],
2941 parents=[base.node(), old.p2().node()],
3123 text=message,
2942 text=message,
3124 files=files,
2943 files=files,
3125 filectxfn=filectxfn,
2944 filectxfn=filectxfn,
3126 user=user,
2945 user=user,
3127 date=date,
2946 date=date,
3128 extra=extra,
2947 extra=extra,
3129 editor=editor,
2948 editor=editor,
3130 )
2949 )
3131
2950
3132 newdesc = changelog.stripdesc(new.description())
2951 newdesc = changelog.stripdesc(new.description())
3133 if (
2952 if (
3134 (not changes)
2953 (not changes)
3135 and newdesc == old.description()
2954 and newdesc == old.description()
3136 and user == old.user()
2955 and user == old.user()
3137 and (date == old.date() or datemaydiffer)
2956 and (date == old.date() or datemaydiffer)
3138 and pureextra == old.extra()
2957 and pureextra == old.extra()
3139 ):
2958 ):
3140 # nothing changed. continuing here would create a new node
2959 # nothing changed. continuing here would create a new node
3141 # anyway because of the amend_source noise.
2960 # anyway because of the amend_source noise.
3142 #
2961 #
3143 # This not what we expect from amend.
2962 # This not what we expect from amend.
3144 return old.node()
2963 return old.node()
3145
2964
3146 commitphase = None
2965 commitphase = None
3147 if opts.get(b'secret'):
2966 if opts.get(b'secret'):
3148 commitphase = phases.secret
2967 commitphase = phases.secret
3149 newid = repo.commitctx(new)
2968 newid = repo.commitctx(new)
3150 ms.reset()
2969 ms.reset()
3151
2970
3152 # Reroute the working copy parent to the new changeset
2971 # Reroute the working copy parent to the new changeset
3153 repo.setparents(newid, nullid)
2972 repo.setparents(newid, nullid)
3154 mapping = {old.node(): (newid,)}
2973 mapping = {old.node(): (newid,)}
3155 obsmetadata = None
2974 obsmetadata = None
3156 if opts.get(b'note'):
2975 if opts.get(b'note'):
3157 obsmetadata = {b'note': encoding.fromlocal(opts[b'note'])}
2976 obsmetadata = {b'note': encoding.fromlocal(opts[b'note'])}
3158 backup = ui.configbool(b'rewrite', b'backup-bundle')
2977 backup = ui.configbool(b'rewrite', b'backup-bundle')
3159 scmutil.cleanupnodes(
2978 scmutil.cleanupnodes(
3160 repo,
2979 repo,
3161 mapping,
2980 mapping,
3162 b'amend',
2981 b'amend',
3163 metadata=obsmetadata,
2982 metadata=obsmetadata,
3164 fixphase=True,
2983 fixphase=True,
3165 targetphase=commitphase,
2984 targetphase=commitphase,
3166 backup=backup,
2985 backup=backup,
3167 )
2986 )
3168
2987
3169 # Fixing the dirstate because localrepo.commitctx does not update
2988 # Fixing the dirstate because localrepo.commitctx does not update
3170 # it. This is rather convenient because we did not need to update
2989 # it. This is rather convenient because we did not need to update
3171 # the dirstate for all the files in the new commit which commitctx
2990 # the dirstate for all the files in the new commit which commitctx
3172 # could have done if it updated the dirstate. Now, we can
2991 # could have done if it updated the dirstate. Now, we can
3173 # selectively update the dirstate only for the amended files.
2992 # selectively update the dirstate only for the amended files.
3174 dirstate = repo.dirstate
2993 dirstate = repo.dirstate
3175
2994
3176 # Update the state of the files which were added and modified in the
2995 # Update the state of the files which were added and modified in the
3177 # amend to "normal" in the dirstate. We need to use "normallookup" since
2996 # amend to "normal" in the dirstate. We need to use "normallookup" since
3178 # the files may have changed since the command started; using "normal"
2997 # the files may have changed since the command started; using "normal"
3179 # would mark them as clean but with uncommitted contents.
2998 # would mark them as clean but with uncommitted contents.
3180 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
2999 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
3181 for f in normalfiles:
3000 for f in normalfiles:
3182 dirstate.normallookup(f)
3001 dirstate.normallookup(f)
3183
3002
3184 # Update the state of files which were removed in the amend
3003 # Update the state of files which were removed in the amend
3185 # to "removed" in the dirstate.
3004 # to "removed" in the dirstate.
3186 removedfiles = set(wctx.removed()) & filestoamend
3005 removedfiles = set(wctx.removed()) & filestoamend
3187 for f in removedfiles:
3006 for f in removedfiles:
3188 dirstate.drop(f)
3007 dirstate.drop(f)
3189
3008
3190 return newid
3009 return newid
3191
3010
3192
3011
3193 def commiteditor(repo, ctx, subs, editform=b''):
3012 def commiteditor(repo, ctx, subs, editform=b''):
3194 if ctx.description():
3013 if ctx.description():
3195 return ctx.description()
3014 return ctx.description()
3196 return commitforceeditor(
3015 return commitforceeditor(
3197 repo, ctx, subs, editform=editform, unchangedmessagedetection=True
3016 repo, ctx, subs, editform=editform, unchangedmessagedetection=True
3198 )
3017 )
3199
3018
3200
3019
3201 def commitforceeditor(
3020 def commitforceeditor(
3202 repo,
3021 repo,
3203 ctx,
3022 ctx,
3204 subs,
3023 subs,
3205 finishdesc=None,
3024 finishdesc=None,
3206 extramsg=None,
3025 extramsg=None,
3207 editform=b'',
3026 editform=b'',
3208 unchangedmessagedetection=False,
3027 unchangedmessagedetection=False,
3209 ):
3028 ):
3210 if not extramsg:
3029 if not extramsg:
3211 extramsg = _(b"Leave message empty to abort commit.")
3030 extramsg = _(b"Leave message empty to abort commit.")
3212
3031
3213 forms = [e for e in editform.split(b'.') if e]
3032 forms = [e for e in editform.split(b'.') if e]
3214 forms.insert(0, b'changeset')
3033 forms.insert(0, b'changeset')
3215 templatetext = None
3034 templatetext = None
3216 while forms:
3035 while forms:
3217 ref = b'.'.join(forms)
3036 ref = b'.'.join(forms)
3218 if repo.ui.config(b'committemplate', ref):
3037 if repo.ui.config(b'committemplate', ref):
3219 templatetext = committext = buildcommittemplate(
3038 templatetext = committext = buildcommittemplate(
3220 repo, ctx, subs, extramsg, ref
3039 repo, ctx, subs, extramsg, ref
3221 )
3040 )
3222 break
3041 break
3223 forms.pop()
3042 forms.pop()
3224 else:
3043 else:
3225 committext = buildcommittext(repo, ctx, subs, extramsg)
3044 committext = buildcommittext(repo, ctx, subs, extramsg)
3226
3045
3227 # run editor in the repository root
3046 # run editor in the repository root
3228 olddir = encoding.getcwd()
3047 olddir = encoding.getcwd()
3229 os.chdir(repo.root)
3048 os.chdir(repo.root)
3230
3049
3231 # make in-memory changes visible to external process
3050 # make in-memory changes visible to external process
3232 tr = repo.currenttransaction()
3051 tr = repo.currenttransaction()
3233 repo.dirstate.write(tr)
3052 repo.dirstate.write(tr)
3234 pending = tr and tr.writepending() and repo.root
3053 pending = tr and tr.writepending() and repo.root
3235
3054
3236 editortext = repo.ui.edit(
3055 editortext = repo.ui.edit(
3237 committext,
3056 committext,
3238 ctx.user(),
3057 ctx.user(),
3239 ctx.extra(),
3058 ctx.extra(),
3240 editform=editform,
3059 editform=editform,
3241 pending=pending,
3060 pending=pending,
3242 repopath=repo.path,
3061 repopath=repo.path,
3243 action=b'commit',
3062 action=b'commit',
3244 )
3063 )
3245 text = editortext
3064 text = editortext
3246
3065
3247 # strip away anything below this special string (used for editors that want
3066 # strip away anything below this special string (used for editors that want
3248 # to display the diff)
3067 # to display the diff)
3249 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3068 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3250 if stripbelow:
3069 if stripbelow:
3251 text = text[: stripbelow.start()]
3070 text = text[: stripbelow.start()]
3252
3071
3253 text = re.sub(b"(?m)^HG:.*(\n|$)", b"", text)
3072 text = re.sub(b"(?m)^HG:.*(\n|$)", b"", text)
3254 os.chdir(olddir)
3073 os.chdir(olddir)
3255
3074
3256 if finishdesc:
3075 if finishdesc:
3257 text = finishdesc(text)
3076 text = finishdesc(text)
3258 if not text.strip():
3077 if not text.strip():
3259 raise error.Abort(_(b"empty commit message"))
3078 raise error.Abort(_(b"empty commit message"))
3260 if unchangedmessagedetection and editortext == templatetext:
3079 if unchangedmessagedetection and editortext == templatetext:
3261 raise error.Abort(_(b"commit message unchanged"))
3080 raise error.Abort(_(b"commit message unchanged"))
3262
3081
3263 return text
3082 return text
3264
3083
3265
3084
3266 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3085 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3267 ui = repo.ui
3086 ui = repo.ui
3268 spec = formatter.reference_templatespec(ref)
3087 spec = formatter.reference_templatespec(ref)
3269 t = logcmdutil.changesettemplater(ui, repo, spec)
3088 t = logcmdutil.changesettemplater(ui, repo, spec)
3270 t.t.cache.update(
3089 t.t.cache.update(
3271 (k, templater.unquotestring(v))
3090 (k, templater.unquotestring(v))
3272 for k, v in repo.ui.configitems(b'committemplate')
3091 for k, v in repo.ui.configitems(b'committemplate')
3273 )
3092 )
3274
3093
3275 if not extramsg:
3094 if not extramsg:
3276 extramsg = b'' # ensure that extramsg is string
3095 extramsg = b'' # ensure that extramsg is string
3277
3096
3278 ui.pushbuffer()
3097 ui.pushbuffer()
3279 t.show(ctx, extramsg=extramsg)
3098 t.show(ctx, extramsg=extramsg)
3280 return ui.popbuffer()
3099 return ui.popbuffer()
3281
3100
3282
3101
3283 def hgprefix(msg):
3102 def hgprefix(msg):
3284 return b"\n".join([b"HG: %s" % a for a in msg.split(b"\n") if a])
3103 return b"\n".join([b"HG: %s" % a for a in msg.split(b"\n") if a])
3285
3104
3286
3105
3287 def buildcommittext(repo, ctx, subs, extramsg):
3106 def buildcommittext(repo, ctx, subs, extramsg):
3288 edittext = []
3107 edittext = []
3289 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3108 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3290 if ctx.description():
3109 if ctx.description():
3291 edittext.append(ctx.description())
3110 edittext.append(ctx.description())
3292 edittext.append(b"")
3111 edittext.append(b"")
3293 edittext.append(b"") # Empty line between message and comments.
3112 edittext.append(b"") # Empty line between message and comments.
3294 edittext.append(
3113 edittext.append(
3295 hgprefix(
3114 hgprefix(
3296 _(
3115 _(
3297 b"Enter commit message."
3116 b"Enter commit message."
3298 b" Lines beginning with 'HG:' are removed."
3117 b" Lines beginning with 'HG:' are removed."
3299 )
3118 )
3300 )
3119 )
3301 )
3120 )
3302 edittext.append(hgprefix(extramsg))
3121 edittext.append(hgprefix(extramsg))
3303 edittext.append(b"HG: --")
3122 edittext.append(b"HG: --")
3304 edittext.append(hgprefix(_(b"user: %s") % ctx.user()))
3123 edittext.append(hgprefix(_(b"user: %s") % ctx.user()))
3305 if ctx.p2():
3124 if ctx.p2():
3306 edittext.append(hgprefix(_(b"branch merge")))
3125 edittext.append(hgprefix(_(b"branch merge")))
3307 if ctx.branch():
3126 if ctx.branch():
3308 edittext.append(hgprefix(_(b"branch '%s'") % ctx.branch()))
3127 edittext.append(hgprefix(_(b"branch '%s'") % ctx.branch()))
3309 if bookmarks.isactivewdirparent(repo):
3128 if bookmarks.isactivewdirparent(repo):
3310 edittext.append(hgprefix(_(b"bookmark '%s'") % repo._activebookmark))
3129 edittext.append(hgprefix(_(b"bookmark '%s'") % repo._activebookmark))
3311 edittext.extend([hgprefix(_(b"subrepo %s") % s) for s in subs])
3130 edittext.extend([hgprefix(_(b"subrepo %s") % s) for s in subs])
3312 edittext.extend([hgprefix(_(b"added %s") % f) for f in added])
3131 edittext.extend([hgprefix(_(b"added %s") % f) for f in added])
3313 edittext.extend([hgprefix(_(b"changed %s") % f) for f in modified])
3132 edittext.extend([hgprefix(_(b"changed %s") % f) for f in modified])
3314 edittext.extend([hgprefix(_(b"removed %s") % f) for f in removed])
3133 edittext.extend([hgprefix(_(b"removed %s") % f) for f in removed])
3315 if not added and not modified and not removed:
3134 if not added and not modified and not removed:
3316 edittext.append(hgprefix(_(b"no files changed")))
3135 edittext.append(hgprefix(_(b"no files changed")))
3317 edittext.append(b"")
3136 edittext.append(b"")
3318
3137
3319 return b"\n".join(edittext)
3138 return b"\n".join(edittext)
3320
3139
3321
3140
3322 def commitstatus(repo, node, branch, bheads=None, opts=None):
3141 def commitstatus(repo, node, branch, bheads=None, opts=None):
3323 if opts is None:
3142 if opts is None:
3324 opts = {}
3143 opts = {}
3325 ctx = repo[node]
3144 ctx = repo[node]
3326 parents = ctx.parents()
3145 parents = ctx.parents()
3327
3146
3328 if (
3147 if (
3329 not opts.get(b'amend')
3148 not opts.get(b'amend')
3330 and bheads
3149 and bheads
3331 and node not in bheads
3150 and node not in bheads
3332 and not any(
3151 and not any(
3333 p.node() in bheads and p.branch() == branch for p in parents
3152 p.node() in bheads and p.branch() == branch for p in parents
3334 )
3153 )
3335 ):
3154 ):
3336 repo.ui.status(_(b'created new head\n'))
3155 repo.ui.status(_(b'created new head\n'))
3337 # The message is not printed for initial roots. For the other
3156 # The message is not printed for initial roots. For the other
3338 # changesets, it is printed in the following situations:
3157 # changesets, it is printed in the following situations:
3339 #
3158 #
3340 # Par column: for the 2 parents with ...
3159 # Par column: for the 2 parents with ...
3341 # N: null or no parent
3160 # N: null or no parent
3342 # B: parent is on another named branch
3161 # B: parent is on another named branch
3343 # C: parent is a regular non head changeset
3162 # C: parent is a regular non head changeset
3344 # H: parent was a branch head of the current branch
3163 # H: parent was a branch head of the current branch
3345 # Msg column: whether we print "created new head" message
3164 # Msg column: whether we print "created new head" message
3346 # In the following, it is assumed that there already exists some
3165 # In the following, it is assumed that there already exists some
3347 # initial branch heads of the current branch, otherwise nothing is
3166 # initial branch heads of the current branch, otherwise nothing is
3348 # printed anyway.
3167 # printed anyway.
3349 #
3168 #
3350 # Par Msg Comment
3169 # Par Msg Comment
3351 # N N y additional topo root
3170 # N N y additional topo root
3352 #
3171 #
3353 # B N y additional branch root
3172 # B N y additional branch root
3354 # C N y additional topo head
3173 # C N y additional topo head
3355 # H N n usual case
3174 # H N n usual case
3356 #
3175 #
3357 # B B y weird additional branch root
3176 # B B y weird additional branch root
3358 # C B y branch merge
3177 # C B y branch merge
3359 # H B n merge with named branch
3178 # H B n merge with named branch
3360 #
3179 #
3361 # C C y additional head from merge
3180 # C C y additional head from merge
3362 # C H n merge with a head
3181 # C H n merge with a head
3363 #
3182 #
3364 # H H n head merge: head count decreases
3183 # H H n head merge: head count decreases
3365
3184
3366 if not opts.get(b'close_branch'):
3185 if not opts.get(b'close_branch'):
3367 for r in parents:
3186 for r in parents:
3368 if r.closesbranch() and r.branch() == branch:
3187 if r.closesbranch() and r.branch() == branch:
3369 repo.ui.status(
3188 repo.ui.status(
3370 _(b'reopening closed branch head %d\n') % r.rev()
3189 _(b'reopening closed branch head %d\n') % r.rev()
3371 )
3190 )
3372
3191
3373 if repo.ui.debugflag:
3192 if repo.ui.debugflag:
3374 repo.ui.write(
3193 repo.ui.write(
3375 _(b'committed changeset %d:%s\n') % (ctx.rev(), ctx.hex())
3194 _(b'committed changeset %d:%s\n') % (ctx.rev(), ctx.hex())
3376 )
3195 )
3377 elif repo.ui.verbose:
3196 elif repo.ui.verbose:
3378 repo.ui.write(_(b'committed changeset %d:%s\n') % (ctx.rev(), ctx))
3197 repo.ui.write(_(b'committed changeset %d:%s\n') % (ctx.rev(), ctx))
3379
3198
3380
3199
3381 def postcommitstatus(repo, pats, opts):
3200 def postcommitstatus(repo, pats, opts):
3382 return repo.status(match=scmutil.match(repo[None], pats, opts))
3201 return repo.status(match=scmutil.match(repo[None], pats, opts))
3383
3202
3384
3203
3385 def revert(ui, repo, ctx, *pats, **opts):
3204 def revert(ui, repo, ctx, *pats, **opts):
3386 opts = pycompat.byteskwargs(opts)
3205 opts = pycompat.byteskwargs(opts)
3387 parent, p2 = repo.dirstate.parents()
3206 parent, p2 = repo.dirstate.parents()
3388 node = ctx.node()
3207 node = ctx.node()
3389
3208
3390 mf = ctx.manifest()
3209 mf = ctx.manifest()
3391 if node == p2:
3210 if node == p2:
3392 parent = p2
3211 parent = p2
3393
3212
3394 # need all matching names in dirstate and manifest of target rev,
3213 # need all matching names in dirstate and manifest of target rev,
3395 # so have to walk both. do not print errors if files exist in one
3214 # so have to walk both. do not print errors if files exist in one
3396 # but not other. in both cases, filesets should be evaluated against
3215 # but not other. in both cases, filesets should be evaluated against
3397 # workingctx to get consistent result (issue4497). this means 'set:**'
3216 # workingctx to get consistent result (issue4497). this means 'set:**'
3398 # cannot be used to select missing files from target rev.
3217 # cannot be used to select missing files from target rev.
3399
3218
3400 # `names` is a mapping for all elements in working copy and target revision
3219 # `names` is a mapping for all elements in working copy and target revision
3401 # The mapping is in the form:
3220 # The mapping is in the form:
3402 # <abs path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3221 # <abs path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3403 names = {}
3222 names = {}
3404 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
3223 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
3405
3224
3406 with repo.wlock():
3225 with repo.wlock():
3407 ## filling of the `names` mapping
3226 ## filling of the `names` mapping
3408 # walk dirstate to fill `names`
3227 # walk dirstate to fill `names`
3409
3228
3410 interactive = opts.get(b'interactive', False)
3229 interactive = opts.get(b'interactive', False)
3411 wctx = repo[None]
3230 wctx = repo[None]
3412 m = scmutil.match(wctx, pats, opts)
3231 m = scmutil.match(wctx, pats, opts)
3413
3232
3414 # we'll need this later
3233 # we'll need this later
3415 targetsubs = sorted(s for s in wctx.substate if m(s))
3234 targetsubs = sorted(s for s in wctx.substate if m(s))
3416
3235
3417 if not m.always():
3236 if not m.always():
3418 matcher = matchmod.badmatch(m, lambda x, y: False)
3237 matcher = matchmod.badmatch(m, lambda x, y: False)
3419 for abs in wctx.walk(matcher):
3238 for abs in wctx.walk(matcher):
3420 names[abs] = m.exact(abs)
3239 names[abs] = m.exact(abs)
3421
3240
3422 # walk target manifest to fill `names`
3241 # walk target manifest to fill `names`
3423
3242
3424 def badfn(path, msg):
3243 def badfn(path, msg):
3425 if path in names:
3244 if path in names:
3426 return
3245 return
3427 if path in ctx.substate:
3246 if path in ctx.substate:
3428 return
3247 return
3429 path_ = path + b'/'
3248 path_ = path + b'/'
3430 for f in names:
3249 for f in names:
3431 if f.startswith(path_):
3250 if f.startswith(path_):
3432 return
3251 return
3433 ui.warn(b"%s: %s\n" % (uipathfn(path), msg))
3252 ui.warn(b"%s: %s\n" % (uipathfn(path), msg))
3434
3253
3435 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3254 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3436 if abs not in names:
3255 if abs not in names:
3437 names[abs] = m.exact(abs)
3256 names[abs] = m.exact(abs)
3438
3257
3439 # Find status of all file in `names`.
3258 # Find status of all file in `names`.
3440 m = scmutil.matchfiles(repo, names)
3259 m = scmutil.matchfiles(repo, names)
3441
3260
3442 changes = repo.status(
3261 changes = repo.status(
3443 node1=node, match=m, unknown=True, ignored=True, clean=True
3262 node1=node, match=m, unknown=True, ignored=True, clean=True
3444 )
3263 )
3445 else:
3264 else:
3446 changes = repo.status(node1=node, match=m)
3265 changes = repo.status(node1=node, match=m)
3447 for kind in changes:
3266 for kind in changes:
3448 for abs in kind:
3267 for abs in kind:
3449 names[abs] = m.exact(abs)
3268 names[abs] = m.exact(abs)
3450
3269
3451 m = scmutil.matchfiles(repo, names)
3270 m = scmutil.matchfiles(repo, names)
3452
3271
3453 modified = set(changes.modified)
3272 modified = set(changes.modified)
3454 added = set(changes.added)
3273 added = set(changes.added)
3455 removed = set(changes.removed)
3274 removed = set(changes.removed)
3456 _deleted = set(changes.deleted)
3275 _deleted = set(changes.deleted)
3457 unknown = set(changes.unknown)
3276 unknown = set(changes.unknown)
3458 unknown.update(changes.ignored)
3277 unknown.update(changes.ignored)
3459 clean = set(changes.clean)
3278 clean = set(changes.clean)
3460 modadded = set()
3279 modadded = set()
3461
3280
3462 # We need to account for the state of the file in the dirstate,
3281 # We need to account for the state of the file in the dirstate,
3463 # even when we revert against something else than parent. This will
3282 # even when we revert against something else than parent. This will
3464 # slightly alter the behavior of revert (doing back up or not, delete
3283 # slightly alter the behavior of revert (doing back up or not, delete
3465 # or just forget etc).
3284 # or just forget etc).
3466 if parent == node:
3285 if parent == node:
3467 dsmodified = modified
3286 dsmodified = modified
3468 dsadded = added
3287 dsadded = added
3469 dsremoved = removed
3288 dsremoved = removed
3470 # store all local modifications, useful later for rename detection
3289 # store all local modifications, useful later for rename detection
3471 localchanges = dsmodified | dsadded
3290 localchanges = dsmodified | dsadded
3472 modified, added, removed = set(), set(), set()
3291 modified, added, removed = set(), set(), set()
3473 else:
3292 else:
3474 changes = repo.status(node1=parent, match=m)
3293 changes = repo.status(node1=parent, match=m)
3475 dsmodified = set(changes.modified)
3294 dsmodified = set(changes.modified)
3476 dsadded = set(changes.added)
3295 dsadded = set(changes.added)
3477 dsremoved = set(changes.removed)
3296 dsremoved = set(changes.removed)
3478 # store all local modifications, useful later for rename detection
3297 # store all local modifications, useful later for rename detection
3479 localchanges = dsmodified | dsadded
3298 localchanges = dsmodified | dsadded
3480
3299
3481 # only take into account for removes between wc and target
3300 # only take into account for removes between wc and target
3482 clean |= dsremoved - removed
3301 clean |= dsremoved - removed
3483 dsremoved &= removed
3302 dsremoved &= removed
3484 # distinct between dirstate remove and other
3303 # distinct between dirstate remove and other
3485 removed -= dsremoved
3304 removed -= dsremoved
3486
3305
3487 modadded = added & dsmodified
3306 modadded = added & dsmodified
3488 added -= modadded
3307 added -= modadded
3489
3308
3490 # tell newly modified apart.
3309 # tell newly modified apart.
3491 dsmodified &= modified
3310 dsmodified &= modified
3492 dsmodified |= modified & dsadded # dirstate added may need backup
3311 dsmodified |= modified & dsadded # dirstate added may need backup
3493 modified -= dsmodified
3312 modified -= dsmodified
3494
3313
3495 # We need to wait for some post-processing to update this set
3314 # We need to wait for some post-processing to update this set
3496 # before making the distinction. The dirstate will be used for
3315 # before making the distinction. The dirstate will be used for
3497 # that purpose.
3316 # that purpose.
3498 dsadded = added
3317 dsadded = added
3499
3318
3500 # in case of merge, files that are actually added can be reported as
3319 # in case of merge, files that are actually added can be reported as
3501 # modified, we need to post process the result
3320 # modified, we need to post process the result
3502 if p2 != nullid:
3321 if p2 != nullid:
3503 mergeadd = set(dsmodified)
3322 mergeadd = set(dsmodified)
3504 for path in dsmodified:
3323 for path in dsmodified:
3505 if path in mf:
3324 if path in mf:
3506 mergeadd.remove(path)
3325 mergeadd.remove(path)
3507 dsadded |= mergeadd
3326 dsadded |= mergeadd
3508 dsmodified -= mergeadd
3327 dsmodified -= mergeadd
3509
3328
3510 # if f is a rename, update `names` to also revert the source
3329 # if f is a rename, update `names` to also revert the source
3511 for f in localchanges:
3330 for f in localchanges:
3512 src = repo.dirstate.copied(f)
3331 src = repo.dirstate.copied(f)
3513 # XXX should we check for rename down to target node?
3332 # XXX should we check for rename down to target node?
3514 if src and src not in names and repo.dirstate[src] == b'r':
3333 if src and src not in names and repo.dirstate[src] == b'r':
3515 dsremoved.add(src)
3334 dsremoved.add(src)
3516 names[src] = True
3335 names[src] = True
3517
3336
3518 # determine the exact nature of the deleted changesets
3337 # determine the exact nature of the deleted changesets
3519 deladded = set(_deleted)
3338 deladded = set(_deleted)
3520 for path in _deleted:
3339 for path in _deleted:
3521 if path in mf:
3340 if path in mf:
3522 deladded.remove(path)
3341 deladded.remove(path)
3523 deleted = _deleted - deladded
3342 deleted = _deleted - deladded
3524
3343
3525 # distinguish between file to forget and the other
3344 # distinguish between file to forget and the other
3526 added = set()
3345 added = set()
3527 for abs in dsadded:
3346 for abs in dsadded:
3528 if repo.dirstate[abs] != b'a':
3347 if repo.dirstate[abs] != b'a':
3529 added.add(abs)
3348 added.add(abs)
3530 dsadded -= added
3349 dsadded -= added
3531
3350
3532 for abs in deladded:
3351 for abs in deladded:
3533 if repo.dirstate[abs] == b'a':
3352 if repo.dirstate[abs] == b'a':
3534 dsadded.add(abs)
3353 dsadded.add(abs)
3535 deladded -= dsadded
3354 deladded -= dsadded
3536
3355
3537 # For files marked as removed, we check if an unknown file is present at
3356 # For files marked as removed, we check if an unknown file is present at
3538 # the same path. If a such file exists it may need to be backed up.
3357 # the same path. If a such file exists it may need to be backed up.
3539 # Making the distinction at this stage helps have simpler backup
3358 # Making the distinction at this stage helps have simpler backup
3540 # logic.
3359 # logic.
3541 removunk = set()
3360 removunk = set()
3542 for abs in removed:
3361 for abs in removed:
3543 target = repo.wjoin(abs)
3362 target = repo.wjoin(abs)
3544 if os.path.lexists(target):
3363 if os.path.lexists(target):
3545 removunk.add(abs)
3364 removunk.add(abs)
3546 removed -= removunk
3365 removed -= removunk
3547
3366
3548 dsremovunk = set()
3367 dsremovunk = set()
3549 for abs in dsremoved:
3368 for abs in dsremoved:
3550 target = repo.wjoin(abs)
3369 target = repo.wjoin(abs)
3551 if os.path.lexists(target):
3370 if os.path.lexists(target):
3552 dsremovunk.add(abs)
3371 dsremovunk.add(abs)
3553 dsremoved -= dsremovunk
3372 dsremoved -= dsremovunk
3554
3373
3555 # action to be actually performed by revert
3374 # action to be actually performed by revert
3556 # (<list of file>, message>) tuple
3375 # (<list of file>, message>) tuple
3557 actions = {
3376 actions = {
3558 b'revert': ([], _(b'reverting %s\n')),
3377 b'revert': ([], _(b'reverting %s\n')),
3559 b'add': ([], _(b'adding %s\n')),
3378 b'add': ([], _(b'adding %s\n')),
3560 b'remove': ([], _(b'removing %s\n')),
3379 b'remove': ([], _(b'removing %s\n')),
3561 b'drop': ([], _(b'removing %s\n')),
3380 b'drop': ([], _(b'removing %s\n')),
3562 b'forget': ([], _(b'forgetting %s\n')),
3381 b'forget': ([], _(b'forgetting %s\n')),
3563 b'undelete': ([], _(b'undeleting %s\n')),
3382 b'undelete': ([], _(b'undeleting %s\n')),
3564 b'noop': (None, _(b'no changes needed to %s\n')),
3383 b'noop': (None, _(b'no changes needed to %s\n')),
3565 b'unknown': (None, _(b'file not managed: %s\n')),
3384 b'unknown': (None, _(b'file not managed: %s\n')),
3566 }
3385 }
3567
3386
3568 # "constant" that convey the backup strategy.
3387 # "constant" that convey the backup strategy.
3569 # All set to `discard` if `no-backup` is set do avoid checking
3388 # All set to `discard` if `no-backup` is set do avoid checking
3570 # no_backup lower in the code.
3389 # no_backup lower in the code.
3571 # These values are ordered for comparison purposes
3390 # These values are ordered for comparison purposes
3572 backupinteractive = 3 # do backup if interactively modified
3391 backupinteractive = 3 # do backup if interactively modified
3573 backup = 2 # unconditionally do backup
3392 backup = 2 # unconditionally do backup
3574 check = 1 # check if the existing file differs from target
3393 check = 1 # check if the existing file differs from target
3575 discard = 0 # never do backup
3394 discard = 0 # never do backup
3576 if opts.get(b'no_backup'):
3395 if opts.get(b'no_backup'):
3577 backupinteractive = backup = check = discard
3396 backupinteractive = backup = check = discard
3578 if interactive:
3397 if interactive:
3579 dsmodifiedbackup = backupinteractive
3398 dsmodifiedbackup = backupinteractive
3580 else:
3399 else:
3581 dsmodifiedbackup = backup
3400 dsmodifiedbackup = backup
3582 tobackup = set()
3401 tobackup = set()
3583
3402
3584 backupanddel = actions[b'remove']
3403 backupanddel = actions[b'remove']
3585 if not opts.get(b'no_backup'):
3404 if not opts.get(b'no_backup'):
3586 backupanddel = actions[b'drop']
3405 backupanddel = actions[b'drop']
3587
3406
3588 disptable = (
3407 disptable = (
3589 # dispatch table:
3408 # dispatch table:
3590 # file state
3409 # file state
3591 # action
3410 # action
3592 # make backup
3411 # make backup
3593 ## Sets that results that will change file on disk
3412 ## Sets that results that will change file on disk
3594 # Modified compared to target, no local change
3413 # Modified compared to target, no local change
3595 (modified, actions[b'revert'], discard),
3414 (modified, actions[b'revert'], discard),
3596 # Modified compared to target, but local file is deleted
3415 # Modified compared to target, but local file is deleted
3597 (deleted, actions[b'revert'], discard),
3416 (deleted, actions[b'revert'], discard),
3598 # Modified compared to target, local change
3417 # Modified compared to target, local change
3599 (dsmodified, actions[b'revert'], dsmodifiedbackup),
3418 (dsmodified, actions[b'revert'], dsmodifiedbackup),
3600 # Added since target
3419 # Added since target
3601 (added, actions[b'remove'], discard),
3420 (added, actions[b'remove'], discard),
3602 # Added in working directory
3421 # Added in working directory
3603 (dsadded, actions[b'forget'], discard),
3422 (dsadded, actions[b'forget'], discard),
3604 # Added since target, have local modification
3423 # Added since target, have local modification
3605 (modadded, backupanddel, backup),
3424 (modadded, backupanddel, backup),
3606 # Added since target but file is missing in working directory
3425 # Added since target but file is missing in working directory
3607 (deladded, actions[b'drop'], discard),
3426 (deladded, actions[b'drop'], discard),
3608 # Removed since target, before working copy parent
3427 # Removed since target, before working copy parent
3609 (removed, actions[b'add'], discard),
3428 (removed, actions[b'add'], discard),
3610 # Same as `removed` but an unknown file exists at the same path
3429 # Same as `removed` but an unknown file exists at the same path
3611 (removunk, actions[b'add'], check),
3430 (removunk, actions[b'add'], check),
3612 # Removed since targe, marked as such in working copy parent
3431 # Removed since targe, marked as such in working copy parent
3613 (dsremoved, actions[b'undelete'], discard),
3432 (dsremoved, actions[b'undelete'], discard),
3614 # Same as `dsremoved` but an unknown file exists at the same path
3433 # Same as `dsremoved` but an unknown file exists at the same path
3615 (dsremovunk, actions[b'undelete'], check),
3434 (dsremovunk, actions[b'undelete'], check),
3616 ## the following sets does not result in any file changes
3435 ## the following sets does not result in any file changes
3617 # File with no modification
3436 # File with no modification
3618 (clean, actions[b'noop'], discard),
3437 (clean, actions[b'noop'], discard),
3619 # Existing file, not tracked anywhere
3438 # Existing file, not tracked anywhere
3620 (unknown, actions[b'unknown'], discard),
3439 (unknown, actions[b'unknown'], discard),
3621 )
3440 )
3622
3441
3623 for abs, exact in sorted(names.items()):
3442 for abs, exact in sorted(names.items()):
3624 # target file to be touch on disk (relative to cwd)
3443 # target file to be touch on disk (relative to cwd)
3625 target = repo.wjoin(abs)
3444 target = repo.wjoin(abs)
3626 # search the entry in the dispatch table.
3445 # search the entry in the dispatch table.
3627 # if the file is in any of these sets, it was touched in the working
3446 # if the file is in any of these sets, it was touched in the working
3628 # directory parent and we are sure it needs to be reverted.
3447 # directory parent and we are sure it needs to be reverted.
3629 for table, (xlist, msg), dobackup in disptable:
3448 for table, (xlist, msg), dobackup in disptable:
3630 if abs not in table:
3449 if abs not in table:
3631 continue
3450 continue
3632 if xlist is not None:
3451 if xlist is not None:
3633 xlist.append(abs)
3452 xlist.append(abs)
3634 if dobackup:
3453 if dobackup:
3635 # If in interactive mode, don't automatically create
3454 # If in interactive mode, don't automatically create
3636 # .orig files (issue4793)
3455 # .orig files (issue4793)
3637 if dobackup == backupinteractive:
3456 if dobackup == backupinteractive:
3638 tobackup.add(abs)
3457 tobackup.add(abs)
3639 elif backup <= dobackup or wctx[abs].cmp(ctx[abs]):
3458 elif backup <= dobackup or wctx[abs].cmp(ctx[abs]):
3640 absbakname = scmutil.backuppath(ui, repo, abs)
3459 absbakname = scmutil.backuppath(ui, repo, abs)
3641 bakname = os.path.relpath(
3460 bakname = os.path.relpath(
3642 absbakname, start=repo.root
3461 absbakname, start=repo.root
3643 )
3462 )
3644 ui.note(
3463 ui.note(
3645 _(b'saving current version of %s as %s\n')
3464 _(b'saving current version of %s as %s\n')
3646 % (uipathfn(abs), uipathfn(bakname))
3465 % (uipathfn(abs), uipathfn(bakname))
3647 )
3466 )
3648 if not opts.get(b'dry_run'):
3467 if not opts.get(b'dry_run'):
3649 if interactive:
3468 if interactive:
3650 util.copyfile(target, absbakname)
3469 util.copyfile(target, absbakname)
3651 else:
3470 else:
3652 util.rename(target, absbakname)
3471 util.rename(target, absbakname)
3653 if opts.get(b'dry_run'):
3472 if opts.get(b'dry_run'):
3654 if ui.verbose or not exact:
3473 if ui.verbose or not exact:
3655 ui.status(msg % uipathfn(abs))
3474 ui.status(msg % uipathfn(abs))
3656 elif exact:
3475 elif exact:
3657 ui.warn(msg % uipathfn(abs))
3476 ui.warn(msg % uipathfn(abs))
3658 break
3477 break
3659
3478
3660 if not opts.get(b'dry_run'):
3479 if not opts.get(b'dry_run'):
3661 needdata = (b'revert', b'add', b'undelete')
3480 needdata = (b'revert', b'add', b'undelete')
3662 oplist = [actions[name][0] for name in needdata]
3481 oplist = [actions[name][0] for name in needdata]
3663 prefetch = scmutil.prefetchfiles
3482 prefetch = scmutil.prefetchfiles
3664 matchfiles = scmutil.matchfiles(
3483 matchfiles = scmutil.matchfiles(
3665 repo, [f for sublist in oplist for f in sublist]
3484 repo, [f for sublist in oplist for f in sublist]
3666 )
3485 )
3667 prefetch(
3486 prefetch(
3668 repo, [(ctx.rev(), matchfiles)],
3487 repo, [(ctx.rev(), matchfiles)],
3669 )
3488 )
3670 match = scmutil.match(repo[None], pats)
3489 match = scmutil.match(repo[None], pats)
3671 _performrevert(
3490 _performrevert(
3672 repo,
3491 repo,
3673 ctx,
3492 ctx,
3674 names,
3493 names,
3675 uipathfn,
3494 uipathfn,
3676 actions,
3495 actions,
3677 match,
3496 match,
3678 interactive,
3497 interactive,
3679 tobackup,
3498 tobackup,
3680 )
3499 )
3681
3500
3682 if targetsubs:
3501 if targetsubs:
3683 # Revert the subrepos on the revert list
3502 # Revert the subrepos on the revert list
3684 for sub in targetsubs:
3503 for sub in targetsubs:
3685 try:
3504 try:
3686 wctx.sub(sub).revert(
3505 wctx.sub(sub).revert(
3687 ctx.substate[sub], *pats, **pycompat.strkwargs(opts)
3506 ctx.substate[sub], *pats, **pycompat.strkwargs(opts)
3688 )
3507 )
3689 except KeyError:
3508 except KeyError:
3690 raise error.Abort(
3509 raise error.Abort(
3691 b"subrepository '%s' does not exist in %s!"
3510 b"subrepository '%s' does not exist in %s!"
3692 % (sub, short(ctx.node()))
3511 % (sub, short(ctx.node()))
3693 )
3512 )
3694
3513
3695
3514
3696 def _performrevert(
3515 def _performrevert(
3697 repo,
3516 repo,
3698 ctx,
3517 ctx,
3699 names,
3518 names,
3700 uipathfn,
3519 uipathfn,
3701 actions,
3520 actions,
3702 match,
3521 match,
3703 interactive=False,
3522 interactive=False,
3704 tobackup=None,
3523 tobackup=None,
3705 ):
3524 ):
3706 """function that actually perform all the actions computed for revert
3525 """function that actually perform all the actions computed for revert
3707
3526
3708 This is an independent function to let extension to plug in and react to
3527 This is an independent function to let extension to plug in and react to
3709 the imminent revert.
3528 the imminent revert.
3710
3529
3711 Make sure you have the working directory locked when calling this function.
3530 Make sure you have the working directory locked when calling this function.
3712 """
3531 """
3713 parent, p2 = repo.dirstate.parents()
3532 parent, p2 = repo.dirstate.parents()
3714 node = ctx.node()
3533 node = ctx.node()
3715 excluded_files = []
3534 excluded_files = []
3716
3535
3717 def checkout(f):
3536 def checkout(f):
3718 fc = ctx[f]
3537 fc = ctx[f]
3719 repo.wwrite(f, fc.data(), fc.flags())
3538 repo.wwrite(f, fc.data(), fc.flags())
3720
3539
3721 def doremove(f):
3540 def doremove(f):
3722 try:
3541 try:
3723 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
3542 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
3724 repo.wvfs.unlinkpath(f, rmdir=rmdir)
3543 repo.wvfs.unlinkpath(f, rmdir=rmdir)
3725 except OSError:
3544 except OSError:
3726 pass
3545 pass
3727 repo.dirstate.remove(f)
3546 repo.dirstate.remove(f)
3728
3547
3729 def prntstatusmsg(action, f):
3548 def prntstatusmsg(action, f):
3730 exact = names[f]
3549 exact = names[f]
3731 if repo.ui.verbose or not exact:
3550 if repo.ui.verbose or not exact:
3732 repo.ui.status(actions[action][1] % uipathfn(f))
3551 repo.ui.status(actions[action][1] % uipathfn(f))
3733
3552
3734 audit_path = pathutil.pathauditor(repo.root, cached=True)
3553 audit_path = pathutil.pathauditor(repo.root, cached=True)
3735 for f in actions[b'forget'][0]:
3554 for f in actions[b'forget'][0]:
3736 if interactive:
3555 if interactive:
3737 choice = repo.ui.promptchoice(
3556 choice = repo.ui.promptchoice(
3738 _(b"forget added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3557 _(b"forget added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3739 )
3558 )
3740 if choice == 0:
3559 if choice == 0:
3741 prntstatusmsg(b'forget', f)
3560 prntstatusmsg(b'forget', f)
3742 repo.dirstate.drop(f)
3561 repo.dirstate.drop(f)
3743 else:
3562 else:
3744 excluded_files.append(f)
3563 excluded_files.append(f)
3745 else:
3564 else:
3746 prntstatusmsg(b'forget', f)
3565 prntstatusmsg(b'forget', f)
3747 repo.dirstate.drop(f)
3566 repo.dirstate.drop(f)
3748 for f in actions[b'remove'][0]:
3567 for f in actions[b'remove'][0]:
3749 audit_path(f)
3568 audit_path(f)
3750 if interactive:
3569 if interactive:
3751 choice = repo.ui.promptchoice(
3570 choice = repo.ui.promptchoice(
3752 _(b"remove added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3571 _(b"remove added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3753 )
3572 )
3754 if choice == 0:
3573 if choice == 0:
3755 prntstatusmsg(b'remove', f)
3574 prntstatusmsg(b'remove', f)
3756 doremove(f)
3575 doremove(f)
3757 else:
3576 else:
3758 excluded_files.append(f)
3577 excluded_files.append(f)
3759 else:
3578 else:
3760 prntstatusmsg(b'remove', f)
3579 prntstatusmsg(b'remove', f)
3761 doremove(f)
3580 doremove(f)
3762 for f in actions[b'drop'][0]:
3581 for f in actions[b'drop'][0]:
3763 audit_path(f)
3582 audit_path(f)
3764 prntstatusmsg(b'drop', f)
3583 prntstatusmsg(b'drop', f)
3765 repo.dirstate.remove(f)
3584 repo.dirstate.remove(f)
3766
3585
3767 normal = None
3586 normal = None
3768 if node == parent:
3587 if node == parent:
3769 # We're reverting to our parent. If possible, we'd like status
3588 # We're reverting to our parent. If possible, we'd like status
3770 # to report the file as clean. We have to use normallookup for
3589 # to report the file as clean. We have to use normallookup for
3771 # merges to avoid losing information about merged/dirty files.
3590 # merges to avoid losing information about merged/dirty files.
3772 if p2 != nullid:
3591 if p2 != nullid:
3773 normal = repo.dirstate.normallookup
3592 normal = repo.dirstate.normallookup
3774 else:
3593 else:
3775 normal = repo.dirstate.normal
3594 normal = repo.dirstate.normal
3776
3595
3777 newlyaddedandmodifiedfiles = set()
3596 newlyaddedandmodifiedfiles = set()
3778 if interactive:
3597 if interactive:
3779 # Prompt the user for changes to revert
3598 # Prompt the user for changes to revert
3780 torevert = [f for f in actions[b'revert'][0] if f not in excluded_files]
3599 torevert = [f for f in actions[b'revert'][0] if f not in excluded_files]
3781 m = scmutil.matchfiles(repo, torevert)
3600 m = scmutil.matchfiles(repo, torevert)
3782 diffopts = patch.difffeatureopts(
3601 diffopts = patch.difffeatureopts(
3783 repo.ui,
3602 repo.ui,
3784 whitespace=True,
3603 whitespace=True,
3785 section=b'commands',
3604 section=b'commands',
3786 configprefix=b'revert.interactive.',
3605 configprefix=b'revert.interactive.',
3787 )
3606 )
3788 diffopts.nodates = True
3607 diffopts.nodates = True
3789 diffopts.git = True
3608 diffopts.git = True
3790 operation = b'apply'
3609 operation = b'apply'
3791 if node == parent:
3610 if node == parent:
3792 if repo.ui.configbool(
3611 if repo.ui.configbool(
3793 b'experimental', b'revert.interactive.select-to-keep'
3612 b'experimental', b'revert.interactive.select-to-keep'
3794 ):
3613 ):
3795 operation = b'keep'
3614 operation = b'keep'
3796 else:
3615 else:
3797 operation = b'discard'
3616 operation = b'discard'
3798
3617
3799 if operation == b'apply':
3618 if operation == b'apply':
3800 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3619 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3801 else:
3620 else:
3802 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3621 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3803 originalchunks = patch.parsepatch(diff)
3622 originalchunks = patch.parsepatch(diff)
3804
3623
3805 try:
3624 try:
3806
3625
3807 chunks, opts = recordfilter(
3626 chunks, opts = recordfilter(
3808 repo.ui, originalchunks, match, operation=operation
3627 repo.ui, originalchunks, match, operation=operation
3809 )
3628 )
3810 if operation == b'discard':
3629 if operation == b'discard':
3811 chunks = patch.reversehunks(chunks)
3630 chunks = patch.reversehunks(chunks)
3812
3631
3813 except error.PatchError as err:
3632 except error.PatchError as err:
3814 raise error.Abort(_(b'error parsing patch: %s') % err)
3633 raise error.Abort(_(b'error parsing patch: %s') % err)
3815
3634
3816 # FIXME: when doing an interactive revert of a copy, there's no way of
3635 # FIXME: when doing an interactive revert of a copy, there's no way of
3817 # performing a partial revert of the added file, the only option is
3636 # performing a partial revert of the added file, the only option is
3818 # "remove added file <name> (Yn)?", so we don't need to worry about the
3637 # "remove added file <name> (Yn)?", so we don't need to worry about the
3819 # alsorestore value. Ideally we'd be able to partially revert
3638 # alsorestore value. Ideally we'd be able to partially revert
3820 # copied/renamed files.
3639 # copied/renamed files.
3821 newlyaddedandmodifiedfiles, unusedalsorestore = newandmodified(
3640 newlyaddedandmodifiedfiles, unusedalsorestore = newandmodified(
3822 chunks, originalchunks
3641 chunks, originalchunks
3823 )
3642 )
3824 if tobackup is None:
3643 if tobackup is None:
3825 tobackup = set()
3644 tobackup = set()
3826 # Apply changes
3645 # Apply changes
3827 fp = stringio()
3646 fp = stringio()
3828 # chunks are serialized per file, but files aren't sorted
3647 # chunks are serialized per file, but files aren't sorted
3829 for f in sorted({c.header.filename() for c in chunks if ishunk(c)}):
3648 for f in sorted({c.header.filename() for c in chunks if ishunk(c)}):
3830 prntstatusmsg(b'revert', f)
3649 prntstatusmsg(b'revert', f)
3831 files = set()
3650 files = set()
3832 for c in chunks:
3651 for c in chunks:
3833 if ishunk(c):
3652 if ishunk(c):
3834 abs = c.header.filename()
3653 abs = c.header.filename()
3835 # Create a backup file only if this hunk should be backed up
3654 # Create a backup file only if this hunk should be backed up
3836 if c.header.filename() in tobackup:
3655 if c.header.filename() in tobackup:
3837 target = repo.wjoin(abs)
3656 target = repo.wjoin(abs)
3838 bakname = scmutil.backuppath(repo.ui, repo, abs)
3657 bakname = scmutil.backuppath(repo.ui, repo, abs)
3839 util.copyfile(target, bakname)
3658 util.copyfile(target, bakname)
3840 tobackup.remove(abs)
3659 tobackup.remove(abs)
3841 if abs not in files:
3660 if abs not in files:
3842 files.add(abs)
3661 files.add(abs)
3843 if operation == b'keep':
3662 if operation == b'keep':
3844 checkout(abs)
3663 checkout(abs)
3845 c.write(fp)
3664 c.write(fp)
3846 dopatch = fp.tell()
3665 dopatch = fp.tell()
3847 fp.seek(0)
3666 fp.seek(0)
3848 if dopatch:
3667 if dopatch:
3849 try:
3668 try:
3850 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3669 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3851 except error.PatchError as err:
3670 except error.PatchError as err:
3852 raise error.Abort(pycompat.bytestr(err))
3671 raise error.Abort(pycompat.bytestr(err))
3853 del fp
3672 del fp
3854 else:
3673 else:
3855 for f in actions[b'revert'][0]:
3674 for f in actions[b'revert'][0]:
3856 prntstatusmsg(b'revert', f)
3675 prntstatusmsg(b'revert', f)
3857 checkout(f)
3676 checkout(f)
3858 if normal:
3677 if normal:
3859 normal(f)
3678 normal(f)
3860
3679
3861 for f in actions[b'add'][0]:
3680 for f in actions[b'add'][0]:
3862 # Don't checkout modified files, they are already created by the diff
3681 # Don't checkout modified files, they are already created by the diff
3863 if f not in newlyaddedandmodifiedfiles:
3682 if f not in newlyaddedandmodifiedfiles:
3864 prntstatusmsg(b'add', f)
3683 prntstatusmsg(b'add', f)
3865 checkout(f)
3684 checkout(f)
3866 repo.dirstate.add(f)
3685 repo.dirstate.add(f)
3867
3686
3868 normal = repo.dirstate.normallookup
3687 normal = repo.dirstate.normallookup
3869 if node == parent and p2 == nullid:
3688 if node == parent and p2 == nullid:
3870 normal = repo.dirstate.normal
3689 normal = repo.dirstate.normal
3871 for f in actions[b'undelete'][0]:
3690 for f in actions[b'undelete'][0]:
3872 if interactive:
3691 if interactive:
3873 choice = repo.ui.promptchoice(
3692 choice = repo.ui.promptchoice(
3874 _(b"add back removed file %s (Yn)?$$ &Yes $$ &No") % f
3693 _(b"add back removed file %s (Yn)?$$ &Yes $$ &No") % f
3875 )
3694 )
3876 if choice == 0:
3695 if choice == 0:
3877 prntstatusmsg(b'undelete', f)
3696 prntstatusmsg(b'undelete', f)
3878 checkout(f)
3697 checkout(f)
3879 normal(f)
3698 normal(f)
3880 else:
3699 else:
3881 excluded_files.append(f)
3700 excluded_files.append(f)
3882 else:
3701 else:
3883 prntstatusmsg(b'undelete', f)
3702 prntstatusmsg(b'undelete', f)
3884 checkout(f)
3703 checkout(f)
3885 normal(f)
3704 normal(f)
3886
3705
3887 copied = copies.pathcopies(repo[parent], ctx)
3706 copied = copies.pathcopies(repo[parent], ctx)
3888
3707
3889 for f in (
3708 for f in (
3890 actions[b'add'][0] + actions[b'undelete'][0] + actions[b'revert'][0]
3709 actions[b'add'][0] + actions[b'undelete'][0] + actions[b'revert'][0]
3891 ):
3710 ):
3892 if f in copied:
3711 if f in copied:
3893 repo.dirstate.copy(copied[f], f)
3712 repo.dirstate.copy(copied[f], f)
3894
3713
3895
3714
3896 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3715 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3897 # commands.outgoing. "missing" is "missing" of the result of
3716 # commands.outgoing. "missing" is "missing" of the result of
3898 # "findcommonoutgoing()"
3717 # "findcommonoutgoing()"
3899 outgoinghooks = util.hooks()
3718 outgoinghooks = util.hooks()
3900
3719
3901 # a list of (ui, repo) functions called by commands.summary
3720 # a list of (ui, repo) functions called by commands.summary
3902 summaryhooks = util.hooks()
3721 summaryhooks = util.hooks()
3903
3722
3904 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3723 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3905 #
3724 #
3906 # functions should return tuple of booleans below, if 'changes' is None:
3725 # functions should return tuple of booleans below, if 'changes' is None:
3907 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3726 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3908 #
3727 #
3909 # otherwise, 'changes' is a tuple of tuples below:
3728 # otherwise, 'changes' is a tuple of tuples below:
3910 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3729 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3911 # - (desturl, destbranch, destpeer, outgoing)
3730 # - (desturl, destbranch, destpeer, outgoing)
3912 summaryremotehooks = util.hooks()
3731 summaryremotehooks = util.hooks()
3913
3732
3914
3733
3915 def checkunfinished(repo, commit=False, skipmerge=False):
3734 def checkunfinished(repo, commit=False, skipmerge=False):
3916 '''Look for an unfinished multistep operation, like graft, and abort
3735 '''Look for an unfinished multistep operation, like graft, and abort
3917 if found. It's probably good to check this right before
3736 if found. It's probably good to check this right before
3918 bailifchanged().
3737 bailifchanged().
3919 '''
3738 '''
3920 # Check for non-clearable states first, so things like rebase will take
3739 # Check for non-clearable states first, so things like rebase will take
3921 # precedence over update.
3740 # precedence over update.
3922 for state in statemod._unfinishedstates:
3741 for state in statemod._unfinishedstates:
3923 if (
3742 if (
3924 state._clearable
3743 state._clearable
3925 or (commit and state._allowcommit)
3744 or (commit and state._allowcommit)
3926 or state._reportonly
3745 or state._reportonly
3927 ):
3746 ):
3928 continue
3747 continue
3929 if state.isunfinished(repo):
3748 if state.isunfinished(repo):
3930 raise error.Abort(state.msg(), hint=state.hint())
3749 raise error.Abort(state.msg(), hint=state.hint())
3931
3750
3932 for s in statemod._unfinishedstates:
3751 for s in statemod._unfinishedstates:
3933 if (
3752 if (
3934 not s._clearable
3753 not s._clearable
3935 or (commit and s._allowcommit)
3754 or (commit and s._allowcommit)
3936 or (s._opname == b'merge' and skipmerge)
3755 or (s._opname == b'merge' and skipmerge)
3937 or s._reportonly
3756 or s._reportonly
3938 ):
3757 ):
3939 continue
3758 continue
3940 if s.isunfinished(repo):
3759 if s.isunfinished(repo):
3941 raise error.Abort(s.msg(), hint=s.hint())
3760 raise error.Abort(s.msg(), hint=s.hint())
3942
3761
3943
3762
3944 def clearunfinished(repo):
3763 def clearunfinished(repo):
3945 '''Check for unfinished operations (as above), and clear the ones
3764 '''Check for unfinished operations (as above), and clear the ones
3946 that are clearable.
3765 that are clearable.
3947 '''
3766 '''
3948 for state in statemod._unfinishedstates:
3767 for state in statemod._unfinishedstates:
3949 if state._reportonly:
3768 if state._reportonly:
3950 continue
3769 continue
3951 if not state._clearable and state.isunfinished(repo):
3770 if not state._clearable and state.isunfinished(repo):
3952 raise error.Abort(state.msg(), hint=state.hint())
3771 raise error.Abort(state.msg(), hint=state.hint())
3953
3772
3954 for s in statemod._unfinishedstates:
3773 for s in statemod._unfinishedstates:
3955 if s._opname == b'merge' or state._reportonly:
3774 if s._opname == b'merge' or state._reportonly:
3956 continue
3775 continue
3957 if s._clearable and s.isunfinished(repo):
3776 if s._clearable and s.isunfinished(repo):
3958 util.unlink(repo.vfs.join(s._fname))
3777 util.unlink(repo.vfs.join(s._fname))
3959
3778
3960
3779
3961 def getunfinishedstate(repo):
3780 def getunfinishedstate(repo):
3962 ''' Checks for unfinished operations and returns statecheck object
3781 ''' Checks for unfinished operations and returns statecheck object
3963 for it'''
3782 for it'''
3964 for state in statemod._unfinishedstates:
3783 for state in statemod._unfinishedstates:
3965 if state.isunfinished(repo):
3784 if state.isunfinished(repo):
3966 return state
3785 return state
3967 return None
3786 return None
3968
3787
3969
3788
3970 def howtocontinue(repo):
3789 def howtocontinue(repo):
3971 '''Check for an unfinished operation and return the command to finish
3790 '''Check for an unfinished operation and return the command to finish
3972 it.
3791 it.
3973
3792
3974 statemod._unfinishedstates list is checked for an unfinished operation
3793 statemod._unfinishedstates list is checked for an unfinished operation
3975 and the corresponding message to finish it is generated if a method to
3794 and the corresponding message to finish it is generated if a method to
3976 continue is supported by the operation.
3795 continue is supported by the operation.
3977
3796
3978 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3797 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3979 a boolean.
3798 a boolean.
3980 '''
3799 '''
3981 contmsg = _(b"continue: %s")
3800 contmsg = _(b"continue: %s")
3982 for state in statemod._unfinishedstates:
3801 for state in statemod._unfinishedstates:
3983 if not state._continueflag:
3802 if not state._continueflag:
3984 continue
3803 continue
3985 if state.isunfinished(repo):
3804 if state.isunfinished(repo):
3986 return contmsg % state.continuemsg(), True
3805 return contmsg % state.continuemsg(), True
3987 if repo[None].dirty(missing=True, merge=False, branch=False):
3806 if repo[None].dirty(missing=True, merge=False, branch=False):
3988 return contmsg % _(b"hg commit"), False
3807 return contmsg % _(b"hg commit"), False
3989 return None, None
3808 return None, None
3990
3809
3991
3810
3992 def checkafterresolved(repo):
3811 def checkafterresolved(repo):
3993 '''Inform the user about the next action after completing hg resolve
3812 '''Inform the user about the next action after completing hg resolve
3994
3813
3995 If there's a an unfinished operation that supports continue flag,
3814 If there's a an unfinished operation that supports continue flag,
3996 howtocontinue will yield repo.ui.warn as the reporter.
3815 howtocontinue will yield repo.ui.warn as the reporter.
3997
3816
3998 Otherwise, it will yield repo.ui.note.
3817 Otherwise, it will yield repo.ui.note.
3999 '''
3818 '''
4000 msg, warning = howtocontinue(repo)
3819 msg, warning = howtocontinue(repo)
4001 if msg is not None:
3820 if msg is not None:
4002 if warning:
3821 if warning:
4003 repo.ui.warn(b"%s\n" % msg)
3822 repo.ui.warn(b"%s\n" % msg)
4004 else:
3823 else:
4005 repo.ui.note(b"%s\n" % msg)
3824 repo.ui.note(b"%s\n" % msg)
4006
3825
4007
3826
4008 def wrongtooltocontinue(repo, task):
3827 def wrongtooltocontinue(repo, task):
4009 '''Raise an abort suggesting how to properly continue if there is an
3828 '''Raise an abort suggesting how to properly continue if there is an
4010 active task.
3829 active task.
4011
3830
4012 Uses howtocontinue() to find the active task.
3831 Uses howtocontinue() to find the active task.
4013
3832
4014 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3833 If there's no task (repo.ui.note for 'hg commit'), it does not offer
4015 a hint.
3834 a hint.
4016 '''
3835 '''
4017 after = howtocontinue(repo)
3836 after = howtocontinue(repo)
4018 hint = None
3837 hint = None
4019 if after[1]:
3838 if after[1]:
4020 hint = after[0]
3839 hint = after[0]
4021 raise error.Abort(_(b'no %s in progress') % task, hint=hint)
3840 raise error.Abort(_(b'no %s in progress') % task, hint=hint)
4022
3841
4023
3842
4024 def abortgraft(ui, repo, graftstate):
3843 def abortgraft(ui, repo, graftstate):
4025 """abort the interrupted graft and rollbacks to the state before interrupted
3844 """abort the interrupted graft and rollbacks to the state before interrupted
4026 graft"""
3845 graft"""
4027 if not graftstate.exists():
3846 if not graftstate.exists():
4028 raise error.Abort(_(b"no interrupted graft to abort"))
3847 raise error.Abort(_(b"no interrupted graft to abort"))
4029 statedata = readgraftstate(repo, graftstate)
3848 statedata = readgraftstate(repo, graftstate)
4030 newnodes = statedata.get(b'newnodes')
3849 newnodes = statedata.get(b'newnodes')
4031 if newnodes is None:
3850 if newnodes is None:
4032 # and old graft state which does not have all the data required to abort
3851 # and old graft state which does not have all the data required to abort
4033 # the graft
3852 # the graft
4034 raise error.Abort(_(b"cannot abort using an old graftstate"))
3853 raise error.Abort(_(b"cannot abort using an old graftstate"))
4035
3854
4036 # changeset from which graft operation was started
3855 # changeset from which graft operation was started
4037 if len(newnodes) > 0:
3856 if len(newnodes) > 0:
4038 startctx = repo[newnodes[0]].p1()
3857 startctx = repo[newnodes[0]].p1()
4039 else:
3858 else:
4040 startctx = repo[b'.']
3859 startctx = repo[b'.']
4041 # whether to strip or not
3860 # whether to strip or not
4042 cleanup = False
3861 cleanup = False
4043
3862
4044 if newnodes:
3863 if newnodes:
4045 newnodes = [repo[r].rev() for r in newnodes]
3864 newnodes = [repo[r].rev() for r in newnodes]
4046 cleanup = True
3865 cleanup = True
4047 # checking that none of the newnodes turned public or is public
3866 # checking that none of the newnodes turned public or is public
4048 immutable = [c for c in newnodes if not repo[c].mutable()]
3867 immutable = [c for c in newnodes if not repo[c].mutable()]
4049 if immutable:
3868 if immutable:
4050 repo.ui.warn(
3869 repo.ui.warn(
4051 _(b"cannot clean up public changesets %s\n")
3870 _(b"cannot clean up public changesets %s\n")
4052 % b', '.join(bytes(repo[r]) for r in immutable),
3871 % b', '.join(bytes(repo[r]) for r in immutable),
4053 hint=_(b"see 'hg help phases' for details"),
3872 hint=_(b"see 'hg help phases' for details"),
4054 )
3873 )
4055 cleanup = False
3874 cleanup = False
4056
3875
4057 # checking that no new nodes are created on top of grafted revs
3876 # checking that no new nodes are created on top of grafted revs
4058 desc = set(repo.changelog.descendants(newnodes))
3877 desc = set(repo.changelog.descendants(newnodes))
4059 if desc - set(newnodes):
3878 if desc - set(newnodes):
4060 repo.ui.warn(
3879 repo.ui.warn(
4061 _(
3880 _(
4062 b"new changesets detected on destination "
3881 b"new changesets detected on destination "
4063 b"branch, can't strip\n"
3882 b"branch, can't strip\n"
4064 )
3883 )
4065 )
3884 )
4066 cleanup = False
3885 cleanup = False
4067
3886
4068 if cleanup:
3887 if cleanup:
4069 with repo.wlock(), repo.lock():
3888 with repo.wlock(), repo.lock():
4070 mergemod.clean_update(startctx)
3889 mergemod.clean_update(startctx)
4071 # stripping the new nodes created
3890 # stripping the new nodes created
4072 strippoints = [
3891 strippoints = [
4073 c.node() for c in repo.set(b"roots(%ld)", newnodes)
3892 c.node() for c in repo.set(b"roots(%ld)", newnodes)
4074 ]
3893 ]
4075 repair.strip(repo.ui, repo, strippoints, backup=False)
3894 repair.strip(repo.ui, repo, strippoints, backup=False)
4076
3895
4077 if not cleanup:
3896 if not cleanup:
4078 # we don't update to the startnode if we can't strip
3897 # we don't update to the startnode if we can't strip
4079 startctx = repo[b'.']
3898 startctx = repo[b'.']
4080 mergemod.clean_update(startctx)
3899 mergemod.clean_update(startctx)
4081
3900
4082 ui.status(_(b"graft aborted\n"))
3901 ui.status(_(b"graft aborted\n"))
4083 ui.status(_(b"working directory is now at %s\n") % startctx.hex()[:12])
3902 ui.status(_(b"working directory is now at %s\n") % startctx.hex()[:12])
4084 graftstate.delete()
3903 graftstate.delete()
4085 return 0
3904 return 0
4086
3905
4087
3906
4088 def readgraftstate(repo, graftstate):
3907 def readgraftstate(repo, graftstate):
4089 # type: (Any, statemod.cmdstate) -> Dict[bytes, Any]
3908 # type: (Any, statemod.cmdstate) -> Dict[bytes, Any]
4090 """read the graft state file and return a dict of the data stored in it"""
3909 """read the graft state file and return a dict of the data stored in it"""
4091 try:
3910 try:
4092 return graftstate.read()
3911 return graftstate.read()
4093 except error.CorruptedState:
3912 except error.CorruptedState:
4094 nodes = repo.vfs.read(b'graftstate').splitlines()
3913 nodes = repo.vfs.read(b'graftstate').splitlines()
4095 return {b'nodes': nodes}
3914 return {b'nodes': nodes}
4096
3915
4097
3916
4098 def hgabortgraft(ui, repo):
3917 def hgabortgraft(ui, repo):
4099 """ abort logic for aborting graft using 'hg abort'"""
3918 """ abort logic for aborting graft using 'hg abort'"""
4100 with repo.wlock():
3919 with repo.wlock():
4101 graftstate = statemod.cmdstate(repo, b'graftstate')
3920 graftstate = statemod.cmdstate(repo, b'graftstate')
4102 return abortgraft(ui, repo, graftstate)
3921 return abortgraft(ui, repo, graftstate)
@@ -1,1212 +1,1210
1 # logcmdutil.py - utility for log-like commands
1 # logcmdutil.py - utility for log-like commands
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import itertools
10 import itertools
11 import os
11 import os
12 import posixpath
12 import posixpath
13
13
14 from .i18n import _
14 from .i18n import _
15 from .node import (
15 from .node import (
16 nullid,
16 nullid,
17 wdirid,
17 wdirid,
18 wdirrev,
18 wdirrev,
19 )
19 )
20
20
21 from .thirdparty import attr
21 from .thirdparty import attr
22
22
23 from . import (
23 from . import (
24 dagop,
24 dagop,
25 error,
25 error,
26 formatter,
26 formatter,
27 graphmod,
27 graphmod,
28 match as matchmod,
28 match as matchmod,
29 mdiff,
29 mdiff,
30 patch,
30 patch,
31 pathutil,
31 pathutil,
32 pycompat,
32 pycompat,
33 revset,
33 revset,
34 revsetlang,
34 revsetlang,
35 scmutil,
35 scmutil,
36 smartset,
36 smartset,
37 templatekw,
37 templatekw,
38 templater,
38 templater,
39 util,
39 util,
40 )
40 )
41 from .utils import (
41 from .utils import (
42 dateutil,
42 dateutil,
43 stringutil,
43 stringutil,
44 )
44 )
45
45
46
46
47 if pycompat.TYPE_CHECKING:
47 if pycompat.TYPE_CHECKING:
48 from typing import (
48 from typing import (
49 Any,
49 Any,
50 Callable,
50 Callable,
51 Dict,
51 Dict,
52 List,
52 List,
53 Optional,
53 Optional,
54 Tuple,
54 Tuple,
55 )
55 )
56
56
57 for t in (Any, Callable, Dict, List, Optional, Tuple):
57 for t in (Any, Callable, Dict, List, Optional, Tuple):
58 assert t
58 assert t
59
59
60
60
61 def getlimit(opts):
61 def getlimit(opts):
62 """get the log limit according to option -l/--limit"""
62 """get the log limit according to option -l/--limit"""
63 limit = opts.get(b'limit')
63 limit = opts.get(b'limit')
64 if limit:
64 if limit:
65 try:
65 try:
66 limit = int(limit)
66 limit = int(limit)
67 except ValueError:
67 except ValueError:
68 raise error.Abort(_(b'limit must be a positive integer'))
68 raise error.Abort(_(b'limit must be a positive integer'))
69 if limit <= 0:
69 if limit <= 0:
70 raise error.Abort(_(b'limit must be positive'))
70 raise error.Abort(_(b'limit must be positive'))
71 else:
71 else:
72 limit = None
72 limit = None
73 return limit
73 return limit
74
74
75
75
76 def diffordiffstat(
76 def diffordiffstat(
77 ui,
77 ui,
78 repo,
78 repo,
79 diffopts,
79 diffopts,
80 ctx1,
80 ctx1,
81 ctx2,
81 ctx2,
82 match,
82 match,
83 changes=None,
83 changes=None,
84 stat=False,
84 stat=False,
85 fp=None,
85 fp=None,
86 graphwidth=0,
86 graphwidth=0,
87 prefix=b'',
87 prefix=b'',
88 root=b'',
88 root=b'',
89 listsubrepos=False,
89 listsubrepos=False,
90 hunksfilterfn=None,
90 hunksfilterfn=None,
91 ):
91 ):
92 '''show diff or diffstat.'''
92 '''show diff or diffstat.'''
93 if root:
93 if root:
94 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
94 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
95 else:
95 else:
96 relroot = b''
96 relroot = b''
97 copysourcematch = None
97 copysourcematch = None
98
98
99 def compose(f, g):
99 def compose(f, g):
100 return lambda x: f(g(x))
100 return lambda x: f(g(x))
101
101
102 def pathfn(f):
102 def pathfn(f):
103 return posixpath.join(prefix, f)
103 return posixpath.join(prefix, f)
104
104
105 if relroot != b'':
105 if relroot != b'':
106 # XXX relative roots currently don't work if the root is within a
106 # XXX relative roots currently don't work if the root is within a
107 # subrepo
107 # subrepo
108 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
108 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
109 uirelroot = uipathfn(pathfn(relroot))
109 uirelroot = uipathfn(pathfn(relroot))
110 relroot += b'/'
110 relroot += b'/'
111 for matchroot in match.files():
111 for matchroot in match.files():
112 if not matchroot.startswith(relroot):
112 if not matchroot.startswith(relroot):
113 ui.warn(
113 ui.warn(
114 _(b'warning: %s not inside relative root %s\n')
114 _(b'warning: %s not inside relative root %s\n')
115 % (uipathfn(pathfn(matchroot)), uirelroot)
115 % (uipathfn(pathfn(matchroot)), uirelroot)
116 )
116 )
117
117
118 relrootmatch = scmutil.match(ctx2, pats=[relroot], default=b'path')
118 relrootmatch = scmutil.match(ctx2, pats=[relroot], default=b'path')
119 match = matchmod.intersectmatchers(match, relrootmatch)
119 match = matchmod.intersectmatchers(match, relrootmatch)
120 copysourcematch = relrootmatch
120 copysourcematch = relrootmatch
121
121
122 checkroot = repo.ui.configbool(
122 checkroot = repo.ui.configbool(
123 b'devel', b'all-warnings'
123 b'devel', b'all-warnings'
124 ) or repo.ui.configbool(b'devel', b'check-relroot')
124 ) or repo.ui.configbool(b'devel', b'check-relroot')
125
125
126 def relrootpathfn(f):
126 def relrootpathfn(f):
127 if checkroot and not f.startswith(relroot):
127 if checkroot and not f.startswith(relroot):
128 raise AssertionError(
128 raise AssertionError(
129 b"file %s doesn't start with relroot %s" % (f, relroot)
129 b"file %s doesn't start with relroot %s" % (f, relroot)
130 )
130 )
131 return f[len(relroot) :]
131 return f[len(relroot) :]
132
132
133 pathfn = compose(relrootpathfn, pathfn)
133 pathfn = compose(relrootpathfn, pathfn)
134
134
135 if stat:
135 if stat:
136 diffopts = diffopts.copy(context=0, noprefix=False)
136 diffopts = diffopts.copy(context=0, noprefix=False)
137 width = 80
137 width = 80
138 if not ui.plain():
138 if not ui.plain():
139 width = ui.termwidth() - graphwidth
139 width = ui.termwidth() - graphwidth
140 # If an explicit --root was given, don't respect ui.relative-paths
140 # If an explicit --root was given, don't respect ui.relative-paths
141 if not relroot:
141 if not relroot:
142 pathfn = compose(scmutil.getuipathfn(repo), pathfn)
142 pathfn = compose(scmutil.getuipathfn(repo), pathfn)
143
143
144 chunks = ctx2.diff(
144 chunks = ctx2.diff(
145 ctx1,
145 ctx1,
146 match,
146 match,
147 changes,
147 changes,
148 opts=diffopts,
148 opts=diffopts,
149 pathfn=pathfn,
149 pathfn=pathfn,
150 copysourcematch=copysourcematch,
150 copysourcematch=copysourcematch,
151 hunksfilterfn=hunksfilterfn,
151 hunksfilterfn=hunksfilterfn,
152 )
152 )
153
153
154 if fp is not None or ui.canwritewithoutlabels():
154 if fp is not None or ui.canwritewithoutlabels():
155 out = fp or ui
155 out = fp or ui
156 if stat:
156 if stat:
157 chunks = [patch.diffstat(util.iterlines(chunks), width=width)]
157 chunks = [patch.diffstat(util.iterlines(chunks), width=width)]
158 for chunk in util.filechunkiter(util.chunkbuffer(chunks)):
158 for chunk in util.filechunkiter(util.chunkbuffer(chunks)):
159 out.write(chunk)
159 out.write(chunk)
160 else:
160 else:
161 if stat:
161 if stat:
162 chunks = patch.diffstatui(util.iterlines(chunks), width=width)
162 chunks = patch.diffstatui(util.iterlines(chunks), width=width)
163 else:
163 else:
164 chunks = patch.difflabel(
164 chunks = patch.difflabel(
165 lambda chunks, **kwargs: chunks, chunks, opts=diffopts
165 lambda chunks, **kwargs: chunks, chunks, opts=diffopts
166 )
166 )
167 if ui.canbatchlabeledwrites():
167 if ui.canbatchlabeledwrites():
168
168
169 def gen():
169 def gen():
170 for chunk, label in chunks:
170 for chunk, label in chunks:
171 yield ui.label(chunk, label=label)
171 yield ui.label(chunk, label=label)
172
172
173 for chunk in util.filechunkiter(util.chunkbuffer(gen())):
173 for chunk in util.filechunkiter(util.chunkbuffer(gen())):
174 ui.write(chunk)
174 ui.write(chunk)
175 else:
175 else:
176 for chunk, label in chunks:
176 for chunk, label in chunks:
177 ui.write(chunk, label=label)
177 ui.write(chunk, label=label)
178
178
179 node2 = ctx2.node()
179 node2 = ctx2.node()
180 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
180 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
181 tempnode2 = node2
181 tempnode2 = node2
182 try:
182 try:
183 if node2 is not None:
183 if node2 is not None:
184 tempnode2 = ctx2.substate[subpath][1]
184 tempnode2 = ctx2.substate[subpath][1]
185 except KeyError:
185 except KeyError:
186 # A subrepo that existed in node1 was deleted between node1 and
186 # A subrepo that existed in node1 was deleted between node1 and
187 # node2 (inclusive). Thus, ctx2's substate won't contain that
187 # node2 (inclusive). Thus, ctx2's substate won't contain that
188 # subpath. The best we can do is to ignore it.
188 # subpath. The best we can do is to ignore it.
189 tempnode2 = None
189 tempnode2 = None
190 submatch = matchmod.subdirmatcher(subpath, match)
190 submatch = matchmod.subdirmatcher(subpath, match)
191 subprefix = repo.wvfs.reljoin(prefix, subpath)
191 subprefix = repo.wvfs.reljoin(prefix, subpath)
192 if listsubrepos or match.exact(subpath) or any(submatch.files()):
192 if listsubrepos or match.exact(subpath) or any(submatch.files()):
193 sub.diff(
193 sub.diff(
194 ui,
194 ui,
195 diffopts,
195 diffopts,
196 tempnode2,
196 tempnode2,
197 submatch,
197 submatch,
198 changes=changes,
198 changes=changes,
199 stat=stat,
199 stat=stat,
200 fp=fp,
200 fp=fp,
201 prefix=subprefix,
201 prefix=subprefix,
202 )
202 )
203
203
204
204
205 class changesetdiffer(object):
205 class changesetdiffer(object):
206 """Generate diff of changeset with pre-configured filtering functions"""
206 """Generate diff of changeset with pre-configured filtering functions"""
207
207
208 def _makefilematcher(self, ctx):
208 def _makefilematcher(self, ctx):
209 return scmutil.matchall(ctx.repo())
209 return scmutil.matchall(ctx.repo())
210
210
211 def _makehunksfilter(self, ctx):
211 def _makehunksfilter(self, ctx):
212 return None
212 return None
213
213
214 def showdiff(self, ui, ctx, diffopts, graphwidth=0, stat=False):
214 def showdiff(self, ui, ctx, diffopts, graphwidth=0, stat=False):
215 diffordiffstat(
215 diffordiffstat(
216 ui,
216 ui,
217 ctx.repo(),
217 ctx.repo(),
218 diffopts,
218 diffopts,
219 ctx.p1(),
219 ctx.p1(),
220 ctx,
220 ctx,
221 match=self._makefilematcher(ctx),
221 match=self._makefilematcher(ctx),
222 stat=stat,
222 stat=stat,
223 graphwidth=graphwidth,
223 graphwidth=graphwidth,
224 hunksfilterfn=self._makehunksfilter(ctx),
224 hunksfilterfn=self._makehunksfilter(ctx),
225 )
225 )
226
226
227
227
228 def changesetlabels(ctx):
228 def changesetlabels(ctx):
229 labels = [b'log.changeset', b'changeset.%s' % ctx.phasestr()]
229 labels = [b'log.changeset', b'changeset.%s' % ctx.phasestr()]
230 if ctx.obsolete():
230 if ctx.obsolete():
231 labels.append(b'changeset.obsolete')
231 labels.append(b'changeset.obsolete')
232 if ctx.isunstable():
232 if ctx.isunstable():
233 labels.append(b'changeset.unstable')
233 labels.append(b'changeset.unstable')
234 for instability in ctx.instabilities():
234 for instability in ctx.instabilities():
235 labels.append(b'instability.%s' % instability)
235 labels.append(b'instability.%s' % instability)
236 return b' '.join(labels)
236 return b' '.join(labels)
237
237
238
238
239 class changesetprinter(object):
239 class changesetprinter(object):
240 '''show changeset information when templating not requested.'''
240 '''show changeset information when templating not requested.'''
241
241
242 def __init__(self, ui, repo, differ=None, diffopts=None, buffered=False):
242 def __init__(self, ui, repo, differ=None, diffopts=None, buffered=False):
243 self.ui = ui
243 self.ui = ui
244 self.repo = repo
244 self.repo = repo
245 self.buffered = buffered
245 self.buffered = buffered
246 self._differ = differ or changesetdiffer()
246 self._differ = differ or changesetdiffer()
247 self._diffopts = patch.diffallopts(ui, diffopts)
247 self._diffopts = patch.diffallopts(ui, diffopts)
248 self._includestat = diffopts and diffopts.get(b'stat')
248 self._includestat = diffopts and diffopts.get(b'stat')
249 self._includediff = diffopts and diffopts.get(b'patch')
249 self._includediff = diffopts and diffopts.get(b'patch')
250 self.header = {}
250 self.header = {}
251 self.hunk = {}
251 self.hunk = {}
252 self.lastheader = None
252 self.lastheader = None
253 self.footer = None
253 self.footer = None
254 self._columns = templatekw.getlogcolumns()
254 self._columns = templatekw.getlogcolumns()
255
255
256 def flush(self, ctx):
256 def flush(self, ctx):
257 rev = ctx.rev()
257 rev = ctx.rev()
258 if rev in self.header:
258 if rev in self.header:
259 h = self.header[rev]
259 h = self.header[rev]
260 if h != self.lastheader:
260 if h != self.lastheader:
261 self.lastheader = h
261 self.lastheader = h
262 self.ui.write(h)
262 self.ui.write(h)
263 del self.header[rev]
263 del self.header[rev]
264 if rev in self.hunk:
264 if rev in self.hunk:
265 self.ui.write(self.hunk[rev])
265 self.ui.write(self.hunk[rev])
266 del self.hunk[rev]
266 del self.hunk[rev]
267
267
268 def close(self):
268 def close(self):
269 if self.footer:
269 if self.footer:
270 self.ui.write(self.footer)
270 self.ui.write(self.footer)
271
271
272 def show(self, ctx, copies=None, **props):
272 def show(self, ctx, copies=None, **props):
273 props = pycompat.byteskwargs(props)
273 props = pycompat.byteskwargs(props)
274 if self.buffered:
274 if self.buffered:
275 self.ui.pushbuffer(labeled=True)
275 self.ui.pushbuffer(labeled=True)
276 self._show(ctx, copies, props)
276 self._show(ctx, copies, props)
277 self.hunk[ctx.rev()] = self.ui.popbuffer()
277 self.hunk[ctx.rev()] = self.ui.popbuffer()
278 else:
278 else:
279 self._show(ctx, copies, props)
279 self._show(ctx, copies, props)
280
280
281 def _show(self, ctx, copies, props):
281 def _show(self, ctx, copies, props):
282 '''show a single changeset or file revision'''
282 '''show a single changeset or file revision'''
283 changenode = ctx.node()
283 changenode = ctx.node()
284 graphwidth = props.get(b'graphwidth', 0)
284 graphwidth = props.get(b'graphwidth', 0)
285
285
286 if self.ui.quiet:
286 if self.ui.quiet:
287 self.ui.write(
287 self.ui.write(
288 b"%s\n" % scmutil.formatchangeid(ctx), label=b'log.node'
288 b"%s\n" % scmutil.formatchangeid(ctx), label=b'log.node'
289 )
289 )
290 return
290 return
291
291
292 columns = self._columns
292 columns = self._columns
293 self.ui.write(
293 self.ui.write(
294 columns[b'changeset'] % scmutil.formatchangeid(ctx),
294 columns[b'changeset'] % scmutil.formatchangeid(ctx),
295 label=changesetlabels(ctx),
295 label=changesetlabels(ctx),
296 )
296 )
297
297
298 # branches are shown first before any other names due to backwards
298 # branches are shown first before any other names due to backwards
299 # compatibility
299 # compatibility
300 branch = ctx.branch()
300 branch = ctx.branch()
301 # don't show the default branch name
301 # don't show the default branch name
302 if branch != b'default':
302 if branch != b'default':
303 self.ui.write(columns[b'branch'] % branch, label=b'log.branch')
303 self.ui.write(columns[b'branch'] % branch, label=b'log.branch')
304
304
305 for nsname, ns in pycompat.iteritems(self.repo.names):
305 for nsname, ns in pycompat.iteritems(self.repo.names):
306 # branches has special logic already handled above, so here we just
306 # branches has special logic already handled above, so here we just
307 # skip it
307 # skip it
308 if nsname == b'branches':
308 if nsname == b'branches':
309 continue
309 continue
310 # we will use the templatename as the color name since those two
310 # we will use the templatename as the color name since those two
311 # should be the same
311 # should be the same
312 for name in ns.names(self.repo, changenode):
312 for name in ns.names(self.repo, changenode):
313 self.ui.write(ns.logfmt % name, label=b'log.%s' % ns.colorname)
313 self.ui.write(ns.logfmt % name, label=b'log.%s' % ns.colorname)
314 if self.ui.debugflag:
314 if self.ui.debugflag:
315 self.ui.write(
315 self.ui.write(
316 columns[b'phase'] % ctx.phasestr(), label=b'log.phase'
316 columns[b'phase'] % ctx.phasestr(), label=b'log.phase'
317 )
317 )
318 for pctx in scmutil.meaningfulparents(self.repo, ctx):
318 for pctx in scmutil.meaningfulparents(self.repo, ctx):
319 label = b'log.parent changeset.%s' % pctx.phasestr()
319 label = b'log.parent changeset.%s' % pctx.phasestr()
320 self.ui.write(
320 self.ui.write(
321 columns[b'parent'] % scmutil.formatchangeid(pctx), label=label
321 columns[b'parent'] % scmutil.formatchangeid(pctx), label=label
322 )
322 )
323
323
324 if self.ui.debugflag:
324 if self.ui.debugflag:
325 mnode = ctx.manifestnode()
325 mnode = ctx.manifestnode()
326 if mnode is None:
326 if mnode is None:
327 mnode = wdirid
327 mnode = wdirid
328 mrev = wdirrev
328 mrev = wdirrev
329 else:
329 else:
330 mrev = self.repo.manifestlog.rev(mnode)
330 mrev = self.repo.manifestlog.rev(mnode)
331 self.ui.write(
331 self.ui.write(
332 columns[b'manifest']
332 columns[b'manifest']
333 % scmutil.formatrevnode(self.ui, mrev, mnode),
333 % scmutil.formatrevnode(self.ui, mrev, mnode),
334 label=b'ui.debug log.manifest',
334 label=b'ui.debug log.manifest',
335 )
335 )
336 self.ui.write(columns[b'user'] % ctx.user(), label=b'log.user')
336 self.ui.write(columns[b'user'] % ctx.user(), label=b'log.user')
337 self.ui.write(
337 self.ui.write(
338 columns[b'date'] % dateutil.datestr(ctx.date()), label=b'log.date'
338 columns[b'date'] % dateutil.datestr(ctx.date()), label=b'log.date'
339 )
339 )
340
340
341 if ctx.isunstable():
341 if ctx.isunstable():
342 instabilities = ctx.instabilities()
342 instabilities = ctx.instabilities()
343 self.ui.write(
343 self.ui.write(
344 columns[b'instability'] % b', '.join(instabilities),
344 columns[b'instability'] % b', '.join(instabilities),
345 label=b'log.instability',
345 label=b'log.instability',
346 )
346 )
347
347
348 elif ctx.obsolete():
348 elif ctx.obsolete():
349 self._showobsfate(ctx)
349 self._showobsfate(ctx)
350
350
351 self._exthook(ctx)
351 self._exthook(ctx)
352
352
353 if self.ui.debugflag:
353 if self.ui.debugflag:
354 files = ctx.p1().status(ctx)
354 files = ctx.p1().status(ctx)
355 for key, value in zip(
355 for key, value in zip(
356 [b'files', b'files+', b'files-'],
356 [b'files', b'files+', b'files-'],
357 [files.modified, files.added, files.removed],
357 [files.modified, files.added, files.removed],
358 ):
358 ):
359 if value:
359 if value:
360 self.ui.write(
360 self.ui.write(
361 columns[key] % b" ".join(value),
361 columns[key] % b" ".join(value),
362 label=b'ui.debug log.files',
362 label=b'ui.debug log.files',
363 )
363 )
364 elif ctx.files() and self.ui.verbose:
364 elif ctx.files() and self.ui.verbose:
365 self.ui.write(
365 self.ui.write(
366 columns[b'files'] % b" ".join(ctx.files()),
366 columns[b'files'] % b" ".join(ctx.files()),
367 label=b'ui.note log.files',
367 label=b'ui.note log.files',
368 )
368 )
369 if copies and self.ui.verbose:
369 if copies and self.ui.verbose:
370 copies = [b'%s (%s)' % c for c in copies]
370 copies = [b'%s (%s)' % c for c in copies]
371 self.ui.write(
371 self.ui.write(
372 columns[b'copies'] % b' '.join(copies),
372 columns[b'copies'] % b' '.join(copies),
373 label=b'ui.note log.copies',
373 label=b'ui.note log.copies',
374 )
374 )
375
375
376 extra = ctx.extra()
376 extra = ctx.extra()
377 if extra and self.ui.debugflag:
377 if extra and self.ui.debugflag:
378 for key, value in sorted(extra.items()):
378 for key, value in sorted(extra.items()):
379 self.ui.write(
379 self.ui.write(
380 columns[b'extra'] % (key, stringutil.escapestr(value)),
380 columns[b'extra'] % (key, stringutil.escapestr(value)),
381 label=b'ui.debug log.extra',
381 label=b'ui.debug log.extra',
382 )
382 )
383
383
384 description = ctx.description().strip()
384 description = ctx.description().strip()
385 if description:
385 if description:
386 if self.ui.verbose:
386 if self.ui.verbose:
387 self.ui.write(
387 self.ui.write(
388 _(b"description:\n"), label=b'ui.note log.description'
388 _(b"description:\n"), label=b'ui.note log.description'
389 )
389 )
390 self.ui.write(description, label=b'ui.note log.description')
390 self.ui.write(description, label=b'ui.note log.description')
391 self.ui.write(b"\n\n")
391 self.ui.write(b"\n\n")
392 else:
392 else:
393 self.ui.write(
393 self.ui.write(
394 columns[b'summary'] % description.splitlines()[0],
394 columns[b'summary'] % description.splitlines()[0],
395 label=b'log.summary',
395 label=b'log.summary',
396 )
396 )
397 self.ui.write(b"\n")
397 self.ui.write(b"\n")
398
398
399 self._showpatch(ctx, graphwidth)
399 self._showpatch(ctx, graphwidth)
400
400
401 def _showobsfate(self, ctx):
401 def _showobsfate(self, ctx):
402 # TODO: do not depend on templater
402 # TODO: do not depend on templater
403 tres = formatter.templateresources(self.repo.ui, self.repo)
403 tres = formatter.templateresources(self.repo.ui, self.repo)
404 t = formatter.maketemplater(
404 t = formatter.maketemplater(
405 self.repo.ui,
405 self.repo.ui,
406 b'{join(obsfate, "\n")}',
406 b'{join(obsfate, "\n")}',
407 defaults=templatekw.keywords,
407 defaults=templatekw.keywords,
408 resources=tres,
408 resources=tres,
409 )
409 )
410 obsfate = t.renderdefault({b'ctx': ctx}).splitlines()
410 obsfate = t.renderdefault({b'ctx': ctx}).splitlines()
411
411
412 if obsfate:
412 if obsfate:
413 for obsfateline in obsfate:
413 for obsfateline in obsfate:
414 self.ui.write(
414 self.ui.write(
415 self._columns[b'obsolete'] % obsfateline,
415 self._columns[b'obsolete'] % obsfateline,
416 label=b'log.obsfate',
416 label=b'log.obsfate',
417 )
417 )
418
418
419 def _exthook(self, ctx):
419 def _exthook(self, ctx):
420 '''empty method used by extension as a hook point
420 '''empty method used by extension as a hook point
421 '''
421 '''
422
422
423 def _showpatch(self, ctx, graphwidth=0):
423 def _showpatch(self, ctx, graphwidth=0):
424 if self._includestat:
424 if self._includestat:
425 self._differ.showdiff(
425 self._differ.showdiff(
426 self.ui, ctx, self._diffopts, graphwidth, stat=True
426 self.ui, ctx, self._diffopts, graphwidth, stat=True
427 )
427 )
428 if self._includestat and self._includediff:
428 if self._includestat and self._includediff:
429 self.ui.write(b"\n")
429 self.ui.write(b"\n")
430 if self._includediff:
430 if self._includediff:
431 self._differ.showdiff(
431 self._differ.showdiff(
432 self.ui, ctx, self._diffopts, graphwidth, stat=False
432 self.ui, ctx, self._diffopts, graphwidth, stat=False
433 )
433 )
434 if self._includestat or self._includediff:
434 if self._includestat or self._includediff:
435 self.ui.write(b"\n")
435 self.ui.write(b"\n")
436
436
437
437
438 class changesetformatter(changesetprinter):
438 class changesetformatter(changesetprinter):
439 """Format changeset information by generic formatter"""
439 """Format changeset information by generic formatter"""
440
440
441 def __init__(
441 def __init__(
442 self, ui, repo, fm, differ=None, diffopts=None, buffered=False
442 self, ui, repo, fm, differ=None, diffopts=None, buffered=False
443 ):
443 ):
444 changesetprinter.__init__(self, ui, repo, differ, diffopts, buffered)
444 changesetprinter.__init__(self, ui, repo, differ, diffopts, buffered)
445 self._diffopts = patch.difffeatureopts(ui, diffopts, git=True)
445 self._diffopts = patch.difffeatureopts(ui, diffopts, git=True)
446 self._fm = fm
446 self._fm = fm
447
447
448 def close(self):
448 def close(self):
449 self._fm.end()
449 self._fm.end()
450
450
451 def _show(self, ctx, copies, props):
451 def _show(self, ctx, copies, props):
452 '''show a single changeset or file revision'''
452 '''show a single changeset or file revision'''
453 fm = self._fm
453 fm = self._fm
454 fm.startitem()
454 fm.startitem()
455 fm.context(ctx=ctx)
455 fm.context(ctx=ctx)
456 fm.data(rev=scmutil.intrev(ctx), node=fm.hexfunc(scmutil.binnode(ctx)))
456 fm.data(rev=scmutil.intrev(ctx), node=fm.hexfunc(scmutil.binnode(ctx)))
457
457
458 datahint = fm.datahint()
458 datahint = fm.datahint()
459 if self.ui.quiet and not datahint:
459 if self.ui.quiet and not datahint:
460 return
460 return
461
461
462 fm.data(
462 fm.data(
463 branch=ctx.branch(),
463 branch=ctx.branch(),
464 phase=ctx.phasestr(),
464 phase=ctx.phasestr(),
465 user=ctx.user(),
465 user=ctx.user(),
466 date=fm.formatdate(ctx.date()),
466 date=fm.formatdate(ctx.date()),
467 desc=ctx.description(),
467 desc=ctx.description(),
468 bookmarks=fm.formatlist(ctx.bookmarks(), name=b'bookmark'),
468 bookmarks=fm.formatlist(ctx.bookmarks(), name=b'bookmark'),
469 tags=fm.formatlist(ctx.tags(), name=b'tag'),
469 tags=fm.formatlist(ctx.tags(), name=b'tag'),
470 parents=fm.formatlist(
470 parents=fm.formatlist(
471 [fm.hexfunc(c.node()) for c in ctx.parents()], name=b'node'
471 [fm.hexfunc(c.node()) for c in ctx.parents()], name=b'node'
472 ),
472 ),
473 )
473 )
474
474
475 if self.ui.debugflag or b'manifest' in datahint:
475 if self.ui.debugflag or b'manifest' in datahint:
476 fm.data(manifest=fm.hexfunc(ctx.manifestnode() or wdirid))
476 fm.data(manifest=fm.hexfunc(ctx.manifestnode() or wdirid))
477 if self.ui.debugflag or b'extra' in datahint:
477 if self.ui.debugflag or b'extra' in datahint:
478 fm.data(extra=fm.formatdict(ctx.extra()))
478 fm.data(extra=fm.formatdict(ctx.extra()))
479
479
480 if (
480 if (
481 self.ui.debugflag
481 self.ui.debugflag
482 or b'modified' in datahint
482 or b'modified' in datahint
483 or b'added' in datahint
483 or b'added' in datahint
484 or b'removed' in datahint
484 or b'removed' in datahint
485 ):
485 ):
486 files = ctx.p1().status(ctx)
486 files = ctx.p1().status(ctx)
487 fm.data(
487 fm.data(
488 modified=fm.formatlist(files.modified, name=b'file'),
488 modified=fm.formatlist(files.modified, name=b'file'),
489 added=fm.formatlist(files.added, name=b'file'),
489 added=fm.formatlist(files.added, name=b'file'),
490 removed=fm.formatlist(files.removed, name=b'file'),
490 removed=fm.formatlist(files.removed, name=b'file'),
491 )
491 )
492
492
493 verbose = not self.ui.debugflag and self.ui.verbose
493 verbose = not self.ui.debugflag and self.ui.verbose
494 if verbose or b'files' in datahint:
494 if verbose or b'files' in datahint:
495 fm.data(files=fm.formatlist(ctx.files(), name=b'file'))
495 fm.data(files=fm.formatlist(ctx.files(), name=b'file'))
496 if verbose and copies or b'copies' in datahint:
496 if verbose and copies or b'copies' in datahint:
497 fm.data(
497 fm.data(
498 copies=fm.formatdict(copies or {}, key=b'name', value=b'source')
498 copies=fm.formatdict(copies or {}, key=b'name', value=b'source')
499 )
499 )
500
500
501 if self._includestat or b'diffstat' in datahint:
501 if self._includestat or b'diffstat' in datahint:
502 self.ui.pushbuffer()
502 self.ui.pushbuffer()
503 self._differ.showdiff(self.ui, ctx, self._diffopts, stat=True)
503 self._differ.showdiff(self.ui, ctx, self._diffopts, stat=True)
504 fm.data(diffstat=self.ui.popbuffer())
504 fm.data(diffstat=self.ui.popbuffer())
505 if self._includediff or b'diff' in datahint:
505 if self._includediff or b'diff' in datahint:
506 self.ui.pushbuffer()
506 self.ui.pushbuffer()
507 self._differ.showdiff(self.ui, ctx, self._diffopts, stat=False)
507 self._differ.showdiff(self.ui, ctx, self._diffopts, stat=False)
508 fm.data(diff=self.ui.popbuffer())
508 fm.data(diff=self.ui.popbuffer())
509
509
510
510
511 class changesettemplater(changesetprinter):
511 class changesettemplater(changesetprinter):
512 '''format changeset information.
512 '''format changeset information.
513
513
514 Note: there are a variety of convenience functions to build a
514 Note: there are a variety of convenience functions to build a
515 changesettemplater for common cases. See functions such as:
515 changesettemplater for common cases. See functions such as:
516 maketemplater, changesetdisplayer, buildcommittemplate, or other
516 maketemplater, changesetdisplayer, buildcommittemplate, or other
517 functions that use changesest_templater.
517 functions that use changesest_templater.
518 '''
518 '''
519
519
520 # Arguments before "buffered" used to be positional. Consider not
520 # Arguments before "buffered" used to be positional. Consider not
521 # adding/removing arguments before "buffered" to not break callers.
521 # adding/removing arguments before "buffered" to not break callers.
522 def __init__(
522 def __init__(
523 self, ui, repo, tmplspec, differ=None, diffopts=None, buffered=False
523 self, ui, repo, tmplspec, differ=None, diffopts=None, buffered=False
524 ):
524 ):
525 changesetprinter.__init__(self, ui, repo, differ, diffopts, buffered)
525 changesetprinter.__init__(self, ui, repo, differ, diffopts, buffered)
526 # tres is shared with _graphnodeformatter()
526 # tres is shared with _graphnodeformatter()
527 self._tresources = tres = formatter.templateresources(ui, repo)
527 self._tresources = tres = formatter.templateresources(ui, repo)
528 self.t = formatter.loadtemplater(
528 self.t = formatter.loadtemplater(
529 ui,
529 ui,
530 tmplspec,
530 tmplspec,
531 defaults=templatekw.keywords,
531 defaults=templatekw.keywords,
532 resources=tres,
532 resources=tres,
533 cache=templatekw.defaulttempl,
533 cache=templatekw.defaulttempl,
534 )
534 )
535 self._counter = itertools.count()
535 self._counter = itertools.count()
536
536
537 self._tref = tmplspec.ref
537 self._tref = tmplspec.ref
538 self._parts = {
538 self._parts = {
539 b'header': b'',
539 b'header': b'',
540 b'footer': b'',
540 b'footer': b'',
541 tmplspec.ref: tmplspec.ref,
541 tmplspec.ref: tmplspec.ref,
542 b'docheader': b'',
542 b'docheader': b'',
543 b'docfooter': b'',
543 b'docfooter': b'',
544 b'separator': b'',
544 b'separator': b'',
545 }
545 }
546 if tmplspec.mapfile:
546 if tmplspec.mapfile:
547 # find correct templates for current mode, for backward
547 # find correct templates for current mode, for backward
548 # compatibility with 'log -v/-q/--debug' using a mapfile
548 # compatibility with 'log -v/-q/--debug' using a mapfile
549 tmplmodes = [
549 tmplmodes = [
550 (True, b''),
550 (True, b''),
551 (self.ui.verbose, b'_verbose'),
551 (self.ui.verbose, b'_verbose'),
552 (self.ui.quiet, b'_quiet'),
552 (self.ui.quiet, b'_quiet'),
553 (self.ui.debugflag, b'_debug'),
553 (self.ui.debugflag, b'_debug'),
554 ]
554 ]
555 for mode, postfix in tmplmodes:
555 for mode, postfix in tmplmodes:
556 for t in self._parts:
556 for t in self._parts:
557 cur = t + postfix
557 cur = t + postfix
558 if mode and cur in self.t:
558 if mode and cur in self.t:
559 self._parts[t] = cur
559 self._parts[t] = cur
560 else:
560 else:
561 partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
561 partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
562 m = formatter.templatepartsmap(tmplspec, self.t, partnames)
562 m = formatter.templatepartsmap(tmplspec, self.t, partnames)
563 self._parts.update(m)
563 self._parts.update(m)
564
564
565 if self._parts[b'docheader']:
565 if self._parts[b'docheader']:
566 self.ui.write(self.t.render(self._parts[b'docheader'], {}))
566 self.ui.write(self.t.render(self._parts[b'docheader'], {}))
567
567
568 def close(self):
568 def close(self):
569 if self._parts[b'docfooter']:
569 if self._parts[b'docfooter']:
570 if not self.footer:
570 if not self.footer:
571 self.footer = b""
571 self.footer = b""
572 self.footer += self.t.render(self._parts[b'docfooter'], {})
572 self.footer += self.t.render(self._parts[b'docfooter'], {})
573 return super(changesettemplater, self).close()
573 return super(changesettemplater, self).close()
574
574
575 def _show(self, ctx, copies, props):
575 def _show(self, ctx, copies, props):
576 '''show a single changeset or file revision'''
576 '''show a single changeset or file revision'''
577 props = props.copy()
577 props = props.copy()
578 props[b'ctx'] = ctx
578 props[b'ctx'] = ctx
579 props[b'index'] = index = next(self._counter)
579 props[b'index'] = index = next(self._counter)
580 props[b'revcache'] = {b'copies': copies}
580 props[b'revcache'] = {b'copies': copies}
581 graphwidth = props.get(b'graphwidth', 0)
581 graphwidth = props.get(b'graphwidth', 0)
582
582
583 # write separator, which wouldn't work well with the header part below
583 # write separator, which wouldn't work well with the header part below
584 # since there's inherently a conflict between header (across items) and
584 # since there's inherently a conflict between header (across items) and
585 # separator (per item)
585 # separator (per item)
586 if self._parts[b'separator'] and index > 0:
586 if self._parts[b'separator'] and index > 0:
587 self.ui.write(self.t.render(self._parts[b'separator'], {}))
587 self.ui.write(self.t.render(self._parts[b'separator'], {}))
588
588
589 # write header
589 # write header
590 if self._parts[b'header']:
590 if self._parts[b'header']:
591 h = self.t.render(self._parts[b'header'], props)
591 h = self.t.render(self._parts[b'header'], props)
592 if self.buffered:
592 if self.buffered:
593 self.header[ctx.rev()] = h
593 self.header[ctx.rev()] = h
594 else:
594 else:
595 if self.lastheader != h:
595 if self.lastheader != h:
596 self.lastheader = h
596 self.lastheader = h
597 self.ui.write(h)
597 self.ui.write(h)
598
598
599 # write changeset metadata, then patch if requested
599 # write changeset metadata, then patch if requested
600 key = self._parts[self._tref]
600 key = self._parts[self._tref]
601 self.ui.write(self.t.render(key, props))
601 self.ui.write(self.t.render(key, props))
602 self._exthook(ctx)
602 self._exthook(ctx)
603 self._showpatch(ctx, graphwidth)
603 self._showpatch(ctx, graphwidth)
604
604
605 if self._parts[b'footer']:
605 if self._parts[b'footer']:
606 if not self.footer:
606 if not self.footer:
607 self.footer = self.t.render(self._parts[b'footer'], props)
607 self.footer = self.t.render(self._parts[b'footer'], props)
608
608
609
609
610 def templatespec(tmpl, mapfile):
610 def templatespec(tmpl, mapfile):
611 assert not (tmpl and mapfile)
611 assert not (tmpl and mapfile)
612 if mapfile:
612 if mapfile:
613 return formatter.mapfile_templatespec(b'changeset', mapfile)
613 return formatter.mapfile_templatespec(b'changeset', mapfile)
614 else:
614 else:
615 return formatter.literal_templatespec(tmpl)
615 return formatter.literal_templatespec(tmpl)
616
616
617
617
618 def _lookuptemplate(ui, tmpl, style):
618 def _lookuptemplate(ui, tmpl, style):
619 """Find the template matching the given template spec or style
619 """Find the template matching the given template spec or style
620
620
621 See formatter.lookuptemplate() for details.
621 See formatter.lookuptemplate() for details.
622 """
622 """
623
623
624 # ui settings
624 # ui settings
625 if not tmpl and not style: # template are stronger than style
625 if not tmpl and not style: # template are stronger than style
626 tmpl = ui.config(b'ui', b'logtemplate')
626 tmpl = ui.config(b'ui', b'logtemplate')
627 if tmpl:
627 if tmpl:
628 return formatter.literal_templatespec(templater.unquotestring(tmpl))
628 return formatter.literal_templatespec(templater.unquotestring(tmpl))
629 else:
629 else:
630 style = util.expandpath(ui.config(b'ui', b'style'))
630 style = util.expandpath(ui.config(b'ui', b'style'))
631
631
632 if not tmpl and style:
632 if not tmpl and style:
633 mapfile = style
633 mapfile = style
634 fp = None
634 fp = None
635 if not os.path.split(mapfile)[0]:
635 if not os.path.split(mapfile)[0]:
636 (mapname, fp) = templater.try_open_template(
636 (mapname, fp) = templater.try_open_template(
637 b'map-cmdline.' + mapfile
637 b'map-cmdline.' + mapfile
638 ) or templater.try_open_template(mapfile)
638 ) or templater.try_open_template(mapfile)
639 if mapname:
639 if mapname:
640 mapfile = mapname
640 mapfile = mapname
641 return formatter.mapfile_templatespec(b'changeset', mapfile, fp)
641 return formatter.mapfile_templatespec(b'changeset', mapfile, fp)
642
642
643 return formatter.lookuptemplate(ui, b'changeset', tmpl)
643 return formatter.lookuptemplate(ui, b'changeset', tmpl)
644
644
645
645
646 def maketemplater(ui, repo, tmpl, buffered=False):
646 def maketemplater(ui, repo, tmpl, buffered=False):
647 """Create a changesettemplater from a literal template 'tmpl'
647 """Create a changesettemplater from a literal template 'tmpl'
648 byte-string."""
648 byte-string."""
649 spec = formatter.literal_templatespec(tmpl)
649 spec = formatter.literal_templatespec(tmpl)
650 return changesettemplater(ui, repo, spec, buffered=buffered)
650 return changesettemplater(ui, repo, spec, buffered=buffered)
651
651
652
652
653 def changesetdisplayer(ui, repo, opts, differ=None, buffered=False):
653 def changesetdisplayer(ui, repo, opts, differ=None, buffered=False):
654 """show one changeset using template or regular display.
654 """show one changeset using template or regular display.
655
655
656 Display format will be the first non-empty hit of:
656 Display format will be the first non-empty hit of:
657 1. option 'template'
657 1. option 'template'
658 2. option 'style'
658 2. option 'style'
659 3. [ui] setting 'logtemplate'
659 3. [ui] setting 'logtemplate'
660 4. [ui] setting 'style'
660 4. [ui] setting 'style'
661 If all of these values are either the unset or the empty string,
661 If all of these values are either the unset or the empty string,
662 regular display via changesetprinter() is done.
662 regular display via changesetprinter() is done.
663 """
663 """
664 postargs = (differ, opts, buffered)
664 postargs = (differ, opts, buffered)
665 spec = _lookuptemplate(ui, opts.get(b'template'), opts.get(b'style'))
665 spec = _lookuptemplate(ui, opts.get(b'template'), opts.get(b'style'))
666
666
667 # machine-readable formats have slightly different keyword set than
667 # machine-readable formats have slightly different keyword set than
668 # plain templates, which are handled by changesetformatter.
668 # plain templates, which are handled by changesetformatter.
669 # note that {b'pickle', b'debug'} can also be added to the list if needed.
669 # note that {b'pickle', b'debug'} can also be added to the list if needed.
670 if spec.ref in {b'cbor', b'json'}:
670 if spec.ref in {b'cbor', b'json'}:
671 fm = ui.formatter(b'log', opts)
671 fm = ui.formatter(b'log', opts)
672 return changesetformatter(ui, repo, fm, *postargs)
672 return changesetformatter(ui, repo, fm, *postargs)
673
673
674 if not spec.ref and not spec.tmpl and not spec.mapfile:
674 if not spec.ref and not spec.tmpl and not spec.mapfile:
675 return changesetprinter(ui, repo, *postargs)
675 return changesetprinter(ui, repo, *postargs)
676
676
677 return changesettemplater(ui, repo, spec, *postargs)
677 return changesettemplater(ui, repo, spec, *postargs)
678
678
679
679
680 @attr.s
680 @attr.s
681 class walkopts(object):
681 class walkopts(object):
682 """Options to configure a set of revisions and file matcher factory
682 """Options to configure a set of revisions and file matcher factory
683 to scan revision/file history
683 to scan revision/file history
684 """
684 """
685
685
686 # raw command-line parameters, which a matcher will be built from
686 # raw command-line parameters, which a matcher will be built from
687 pats = attr.ib() # type: List[bytes]
687 pats = attr.ib() # type: List[bytes]
688 opts = attr.ib() # type: Dict[bytes, Any]
688 opts = attr.ib() # type: Dict[bytes, Any]
689
689
690 # a list of revset expressions to be traversed; if follow, it specifies
690 # a list of revset expressions to be traversed; if follow, it specifies
691 # the start revisions
691 # the start revisions
692 revspec = attr.ib() # type: List[bytes]
692 revspec = attr.ib() # type: List[bytes]
693
693
694 # miscellaneous queries to filter revisions (see "hg help log" for details)
694 # miscellaneous queries to filter revisions (see "hg help log" for details)
695 branches = attr.ib(default=attr.Factory(list)) # type: List[bytes]
695 branches = attr.ib(default=attr.Factory(list)) # type: List[bytes]
696 date = attr.ib(default=None) # type: Optional[bytes]
696 date = attr.ib(default=None) # type: Optional[bytes]
697 keywords = attr.ib(default=attr.Factory(list)) # type: List[bytes]
697 keywords = attr.ib(default=attr.Factory(list)) # type: List[bytes]
698 no_merges = attr.ib(default=False) # type: bool
698 no_merges = attr.ib(default=False) # type: bool
699 only_merges = attr.ib(default=False) # type: bool
699 only_merges = attr.ib(default=False) # type: bool
700 prune_ancestors = attr.ib(default=attr.Factory(list)) # type: List[bytes]
700 prune_ancestors = attr.ib(default=attr.Factory(list)) # type: List[bytes]
701 users = attr.ib(default=attr.Factory(list)) # type: List[bytes]
701 users = attr.ib(default=attr.Factory(list)) # type: List[bytes]
702
702
703 # miscellaneous matcher arguments
703 # miscellaneous matcher arguments
704 include_pats = attr.ib(default=attr.Factory(list)) # type: List[bytes]
704 include_pats = attr.ib(default=attr.Factory(list)) # type: List[bytes]
705 exclude_pats = attr.ib(default=attr.Factory(list)) # type: List[bytes]
705 exclude_pats = attr.ib(default=attr.Factory(list)) # type: List[bytes]
706
706
707 # 0: no follow, 1: follow first, 2: follow both parents
707 # 0: no follow, 1: follow first, 2: follow both parents
708 follow = attr.ib(default=0) # type: int
708 follow = attr.ib(default=0) # type: int
709
709
710 # do not attempt filelog-based traversal, which may be fast but cannot
710 # do not attempt filelog-based traversal, which may be fast but cannot
711 # include revisions where files were removed
711 # include revisions where files were removed
712 force_changelog_traversal = attr.ib(default=False) # type: bool
712 force_changelog_traversal = attr.ib(default=False) # type: bool
713
713
714 # filter revisions by file patterns, which should be disabled only if
714 # filter revisions by file patterns, which should be disabled only if
715 # you want to include revisions where files were unmodified
715 # you want to include revisions where files were unmodified
716 filter_revisions_by_pats = attr.ib(default=True) # type: bool
716 filter_revisions_by_pats = attr.ib(default=True) # type: bool
717
717
718 # sort revisions prior to traversal: 'desc', 'topo', or None
718 # sort revisions prior to traversal: 'desc', 'topo', or None
719 sort_revisions = attr.ib(default=None) # type: Optional[bytes]
719 sort_revisions = attr.ib(default=None) # type: Optional[bytes]
720
720
721 # limit number of changes displayed; None means unlimited
721 # limit number of changes displayed; None means unlimited
722 limit = attr.ib(default=None) # type: Optional[int]
722 limit = attr.ib(default=None) # type: Optional[int]
723
723
724
724
725 def parseopts(ui, pats, opts):
725 def parseopts(ui, pats, opts):
726 # type: (Any, List[bytes], Dict[bytes, Any]) -> walkopts
726 # type: (Any, List[bytes], Dict[bytes, Any]) -> walkopts
727 """Parse log command options into walkopts
727 """Parse log command options into walkopts
728
728
729 The returned walkopts will be passed in to getrevs() or makewalker().
729 The returned walkopts will be passed in to getrevs() or makewalker().
730 """
730 """
731 if opts.get(b'follow_first'):
731 if opts.get(b'follow_first'):
732 follow = 1
732 follow = 1
733 elif opts.get(b'follow'):
733 elif opts.get(b'follow'):
734 follow = 2
734 follow = 2
735 else:
735 else:
736 follow = 0
736 follow = 0
737
737
738 if opts.get(b'graph'):
738 if opts.get(b'graph'):
739 if ui.configbool(b'experimental', b'log.topo'):
739 if ui.configbool(b'experimental', b'log.topo'):
740 sort_revisions = b'topo'
740 sort_revisions = b'topo'
741 else:
741 else:
742 sort_revisions = b'desc'
742 sort_revisions = b'desc'
743 else:
743 else:
744 sort_revisions = None
744 sort_revisions = None
745
745
746 return walkopts(
746 return walkopts(
747 pats=pats,
747 pats=pats,
748 opts=opts,
748 opts=opts,
749 revspec=opts.get(b'rev', []),
749 revspec=opts.get(b'rev', []),
750 # branch and only_branch are really aliases and must be handled at
750 # branch and only_branch are really aliases and must be handled at
751 # the same time
751 # the same time
752 branches=opts.get(b'branch', []) + opts.get(b'only_branch', []),
752 branches=opts.get(b'branch', []) + opts.get(b'only_branch', []),
753 date=opts.get(b'date'),
753 date=opts.get(b'date'),
754 keywords=opts.get(b'keyword', []),
754 keywords=opts.get(b'keyword', []),
755 no_merges=bool(opts.get(b'no_merges')),
755 no_merges=bool(opts.get(b'no_merges')),
756 only_merges=bool(opts.get(b'only_merges')),
756 only_merges=bool(opts.get(b'only_merges')),
757 prune_ancestors=opts.get(b'prune', []),
757 prune_ancestors=opts.get(b'prune', []),
758 users=opts.get(b'user', []),
758 users=opts.get(b'user', []),
759 include_pats=opts.get(b'include', []),
759 include_pats=opts.get(b'include', []),
760 exclude_pats=opts.get(b'exclude', []),
760 exclude_pats=opts.get(b'exclude', []),
761 follow=follow,
761 follow=follow,
762 force_changelog_traversal=bool(opts.get(b'removed')),
762 force_changelog_traversal=bool(opts.get(b'removed')),
763 sort_revisions=sort_revisions,
763 sort_revisions=sort_revisions,
764 limit=getlimit(opts),
764 limit=getlimit(opts),
765 )
765 )
766
766
767
767
768 def _makematcher(repo, revs, wopts):
768 def _makematcher(repo, revs, wopts):
769 """Build matcher and expanded patterns from log options
769 """Build matcher and expanded patterns from log options
770
770
771 If --follow, revs are the revisions to follow from.
771 If --follow, revs are the revisions to follow from.
772
772
773 Returns (match, pats, slowpath) where
773 Returns (match, pats, slowpath) where
774 - match: a matcher built from the given pats and -I/-X opts
774 - match: a matcher built from the given pats and -I/-X opts
775 - pats: patterns used (globs are expanded on Windows)
775 - pats: patterns used (globs are expanded on Windows)
776 - slowpath: True if patterns aren't as simple as scanning filelogs
776 - slowpath: True if patterns aren't as simple as scanning filelogs
777 """
777 """
778 # pats/include/exclude are passed to match.match() directly in
778 # pats/include/exclude are passed to match.match() directly in
779 # _matchfiles() revset but walkchangerevs() builds its matcher with
779 # _matchfiles() revset, but a log-like command should build its matcher
780 # scmutil.match(). The difference is input pats are globbed on
780 # with scmutil.match(). The difference is input pats are globbed on
781 # platforms without shell expansion (windows).
781 # platforms without shell expansion (windows).
782 wctx = repo[None]
782 wctx = repo[None]
783 match, pats = scmutil.matchandpats(wctx, wopts.pats, wopts.opts)
783 match, pats = scmutil.matchandpats(wctx, wopts.pats, wopts.opts)
784 slowpath = match.anypats() or (
784 slowpath = match.anypats() or (
785 not match.always() and wopts.force_changelog_traversal
785 not match.always() and wopts.force_changelog_traversal
786 )
786 )
787 if not slowpath:
787 if not slowpath:
788 if wopts.follow and wopts.revspec:
788 if wopts.follow and wopts.revspec:
789 # There may be the case that a path doesn't exist in some (but
789 # There may be the case that a path doesn't exist in some (but
790 # not all) of the specified start revisions, but let's consider
790 # not all) of the specified start revisions, but let's consider
791 # the path is valid. Missing files will be warned by the matcher.
791 # the path is valid. Missing files will be warned by the matcher.
792 startctxs = [repo[r] for r in revs]
792 startctxs = [repo[r] for r in revs]
793 for f in match.files():
793 for f in match.files():
794 found = False
794 found = False
795 for c in startctxs:
795 for c in startctxs:
796 if f in c:
796 if f in c:
797 found = True
797 found = True
798 elif c.hasdir(f):
798 elif c.hasdir(f):
799 # If a directory exists in any of the start revisions,
799 # If a directory exists in any of the start revisions,
800 # take the slow path.
800 # take the slow path.
801 found = slowpath = True
801 found = slowpath = True
802 if not found:
802 if not found:
803 raise error.Abort(
803 raise error.Abort(
804 _(
804 _(
805 b'cannot follow file not in any of the specified '
805 b'cannot follow file not in any of the specified '
806 b'revisions: "%s"'
806 b'revisions: "%s"'
807 )
807 )
808 % f
808 % f
809 )
809 )
810 elif wopts.follow:
810 elif wopts.follow:
811 for f in match.files():
811 for f in match.files():
812 if f not in wctx:
812 if f not in wctx:
813 # If the file exists, it may be a directory, so let it
813 # If the file exists, it may be a directory, so let it
814 # take the slow path.
814 # take the slow path.
815 if os.path.exists(repo.wjoin(f)):
815 if os.path.exists(repo.wjoin(f)):
816 slowpath = True
816 slowpath = True
817 continue
817 continue
818 else:
818 else:
819 raise error.Abort(
819 raise error.Abort(
820 _(
820 _(
821 b'cannot follow file not in parent '
821 b'cannot follow file not in parent '
822 b'revision: "%s"'
822 b'revision: "%s"'
823 )
823 )
824 % f
824 % f
825 )
825 )
826 filelog = repo.file(f)
826 filelog = repo.file(f)
827 if not filelog:
827 if not filelog:
828 # A file exists in wdir but not in history, which means
828 # A file exists in wdir but not in history, which means
829 # the file isn't committed yet.
829 # the file isn't committed yet.
830 raise error.Abort(
830 raise error.Abort(
831 _(b'cannot follow nonexistent file: "%s"') % f
831 _(b'cannot follow nonexistent file: "%s"') % f
832 )
832 )
833 else:
833 else:
834 for f in match.files():
834 for f in match.files():
835 filelog = repo.file(f)
835 filelog = repo.file(f)
836 if not filelog:
836 if not filelog:
837 # A zero count may be a directory or deleted file, so
837 # A zero count may be a directory or deleted file, so
838 # try to find matching entries on the slow path.
838 # try to find matching entries on the slow path.
839 slowpath = True
839 slowpath = True
840
840
841 # We decided to fall back to the slowpath because at least one
841 # We decided to fall back to the slowpath because at least one
842 # of the paths was not a file. Check to see if at least one of them
842 # of the paths was not a file. Check to see if at least one of them
843 # existed in history - in that case, we'll continue down the
843 # existed in history - in that case, we'll continue down the
844 # slowpath; otherwise, we can turn off the slowpath
844 # slowpath; otherwise, we can turn off the slowpath
845 if slowpath:
845 if slowpath:
846 for path in match.files():
846 for path in match.files():
847 if path == b'.' or path in repo.store:
847 if path == b'.' or path in repo.store:
848 break
848 break
849 else:
849 else:
850 slowpath = False
850 slowpath = False
851
851
852 return match, pats, slowpath
852 return match, pats, slowpath
853
853
854
854
855 def _fileancestors(repo, revs, match, followfirst):
855 def _fileancestors(repo, revs, match, followfirst):
856 fctxs = []
856 fctxs = []
857 for r in revs:
857 for r in revs:
858 ctx = repo[r]
858 ctx = repo[r]
859 fctxs.extend(ctx[f].introfilectx() for f in ctx.walk(match))
859 fctxs.extend(ctx[f].introfilectx() for f in ctx.walk(match))
860
860
861 # When displaying a revision with --patch --follow FILE, we have
861 # When displaying a revision with --patch --follow FILE, we have
862 # to know which file of the revision must be diffed. With
862 # to know which file of the revision must be diffed. With
863 # --follow, we want the names of the ancestors of FILE in the
863 # --follow, we want the names of the ancestors of FILE in the
864 # revision, stored in "fcache". "fcache" is populated as a side effect
864 # revision, stored in "fcache". "fcache" is populated as a side effect
865 # of the graph traversal.
865 # of the graph traversal.
866 fcache = {}
866 fcache = {}
867
867
868 def filematcher(ctx):
868 def filematcher(ctx):
869 return scmutil.matchfiles(repo, fcache.get(scmutil.intrev(ctx), []))
869 return scmutil.matchfiles(repo, fcache.get(scmutil.intrev(ctx), []))
870
870
871 def revgen():
871 def revgen():
872 for rev, cs in dagop.filectxancestors(fctxs, followfirst=followfirst):
872 for rev, cs in dagop.filectxancestors(fctxs, followfirst=followfirst):
873 fcache[rev] = [c.path() for c in cs]
873 fcache[rev] = [c.path() for c in cs]
874 yield rev
874 yield rev
875
875
876 return smartset.generatorset(revgen(), iterasc=False), filematcher
876 return smartset.generatorset(revgen(), iterasc=False), filematcher
877
877
878
878
879 def _makenofollowfilematcher(repo, pats, opts):
879 def _makenofollowfilematcher(repo, pats, opts):
880 '''hook for extensions to override the filematcher for non-follow cases'''
880 '''hook for extensions to override the filematcher for non-follow cases'''
881 return None
881 return None
882
882
883
883
884 _opt2logrevset = {
884 _opt2logrevset = {
885 b'no_merges': (b'not merge()', None),
885 b'no_merges': (b'not merge()', None),
886 b'only_merges': (b'merge()', None),
886 b'only_merges': (b'merge()', None),
887 b'_matchfiles': (None, b'_matchfiles(%ps)'),
887 b'_matchfiles': (None, b'_matchfiles(%ps)'),
888 b'date': (b'date(%s)', None),
888 b'date': (b'date(%s)', None),
889 b'branch': (b'branch(%s)', b'%lr'),
889 b'branch': (b'branch(%s)', b'%lr'),
890 b'_patslog': (b'filelog(%s)', b'%lr'),
890 b'_patslog': (b'filelog(%s)', b'%lr'),
891 b'keyword': (b'keyword(%s)', b'%lr'),
891 b'keyword': (b'keyword(%s)', b'%lr'),
892 b'prune': (b'ancestors(%s)', b'not %lr'),
892 b'prune': (b'ancestors(%s)', b'not %lr'),
893 b'user': (b'user(%s)', b'%lr'),
893 b'user': (b'user(%s)', b'%lr'),
894 }
894 }
895
895
896
896
897 def _makerevset(repo, wopts, slowpath):
897 def _makerevset(repo, wopts, slowpath):
898 """Return a revset string built from log options and file patterns"""
898 """Return a revset string built from log options and file patterns"""
899 opts = {
899 opts = {
900 b'branch': [repo.lookupbranch(b) for b in wopts.branches],
900 b'branch': [repo.lookupbranch(b) for b in wopts.branches],
901 b'date': wopts.date,
901 b'date': wopts.date,
902 b'keyword': wopts.keywords,
902 b'keyword': wopts.keywords,
903 b'no_merges': wopts.no_merges,
903 b'no_merges': wopts.no_merges,
904 b'only_merges': wopts.only_merges,
904 b'only_merges': wopts.only_merges,
905 b'prune': wopts.prune_ancestors,
905 b'prune': wopts.prune_ancestors,
906 b'user': wopts.users,
906 b'user': wopts.users,
907 }
907 }
908
908
909 if wopts.filter_revisions_by_pats and slowpath:
909 if wopts.filter_revisions_by_pats and slowpath:
910 # See walkchangerevs() slow path.
911 #
912 # pats/include/exclude cannot be represented as separate
910 # pats/include/exclude cannot be represented as separate
913 # revset expressions as their filtering logic applies at file
911 # revset expressions as their filtering logic applies at file
914 # level. For instance "-I a -X b" matches a revision touching
912 # level. For instance "-I a -X b" matches a revision touching
915 # "a" and "b" while "file(a) and not file(b)" does
913 # "a" and "b" while "file(a) and not file(b)" does
916 # not. Besides, filesets are evaluated against the working
914 # not. Besides, filesets are evaluated against the working
917 # directory.
915 # directory.
918 matchargs = [b'r:', b'd:relpath']
916 matchargs = [b'r:', b'd:relpath']
919 for p in wopts.pats:
917 for p in wopts.pats:
920 matchargs.append(b'p:' + p)
918 matchargs.append(b'p:' + p)
921 for p in wopts.include_pats:
919 for p in wopts.include_pats:
922 matchargs.append(b'i:' + p)
920 matchargs.append(b'i:' + p)
923 for p in wopts.exclude_pats:
921 for p in wopts.exclude_pats:
924 matchargs.append(b'x:' + p)
922 matchargs.append(b'x:' + p)
925 opts[b'_matchfiles'] = matchargs
923 opts[b'_matchfiles'] = matchargs
926 elif wopts.filter_revisions_by_pats and not wopts.follow:
924 elif wopts.filter_revisions_by_pats and not wopts.follow:
927 opts[b'_patslog'] = list(wopts.pats)
925 opts[b'_patslog'] = list(wopts.pats)
928
926
929 expr = []
927 expr = []
930 for op, val in sorted(pycompat.iteritems(opts)):
928 for op, val in sorted(pycompat.iteritems(opts)):
931 if not val:
929 if not val:
932 continue
930 continue
933 revop, listop = _opt2logrevset[op]
931 revop, listop = _opt2logrevset[op]
934 if revop and b'%' not in revop:
932 if revop and b'%' not in revop:
935 expr.append(revop)
933 expr.append(revop)
936 elif not listop:
934 elif not listop:
937 expr.append(revsetlang.formatspec(revop, val))
935 expr.append(revsetlang.formatspec(revop, val))
938 else:
936 else:
939 if revop:
937 if revop:
940 val = [revsetlang.formatspec(revop, v) for v in val]
938 val = [revsetlang.formatspec(revop, v) for v in val]
941 expr.append(revsetlang.formatspec(listop, val))
939 expr.append(revsetlang.formatspec(listop, val))
942
940
943 if expr:
941 if expr:
944 expr = b'(' + b' and '.join(expr) + b')'
942 expr = b'(' + b' and '.join(expr) + b')'
945 else:
943 else:
946 expr = None
944 expr = None
947 return expr
945 return expr
948
946
949
947
950 def _initialrevs(repo, wopts):
948 def _initialrevs(repo, wopts):
951 """Return the initial set of revisions to be filtered or followed"""
949 """Return the initial set of revisions to be filtered or followed"""
952 if wopts.revspec:
950 if wopts.revspec:
953 revs = scmutil.revrange(repo, wopts.revspec)
951 revs = scmutil.revrange(repo, wopts.revspec)
954 elif wopts.follow and repo.dirstate.p1() == nullid:
952 elif wopts.follow and repo.dirstate.p1() == nullid:
955 revs = smartset.baseset()
953 revs = smartset.baseset()
956 elif wopts.follow:
954 elif wopts.follow:
957 revs = repo.revs(b'.')
955 revs = repo.revs(b'.')
958 else:
956 else:
959 revs = smartset.spanset(repo)
957 revs = smartset.spanset(repo)
960 revs.reverse()
958 revs.reverse()
961 return revs
959 return revs
962
960
963
961
964 def makewalker(repo, wopts):
962 def makewalker(repo, wopts):
965 # type: (Any, walkopts) -> Tuple[smartset.abstractsmartset, Optional[Callable[[Any], matchmod.basematcher]]]
963 # type: (Any, walkopts) -> Tuple[smartset.abstractsmartset, Optional[Callable[[Any], matchmod.basematcher]]]
966 """Build (revs, makefilematcher) to scan revision/file history
964 """Build (revs, makefilematcher) to scan revision/file history
967
965
968 - revs is the smartset to be traversed.
966 - revs is the smartset to be traversed.
969 - makefilematcher is a function to map ctx to a matcher for that revision
967 - makefilematcher is a function to map ctx to a matcher for that revision
970 """
968 """
971 revs = _initialrevs(repo, wopts)
969 revs = _initialrevs(repo, wopts)
972 if not revs:
970 if not revs:
973 return smartset.baseset(), None
971 return smartset.baseset(), None
974 # TODO: might want to merge slowpath with wopts.force_changelog_traversal
972 # TODO: might want to merge slowpath with wopts.force_changelog_traversal
975 match, pats, slowpath = _makematcher(repo, revs, wopts)
973 match, pats, slowpath = _makematcher(repo, revs, wopts)
976 wopts = attr.evolve(wopts, pats=pats)
974 wopts = attr.evolve(wopts, pats=pats)
977
975
978 filematcher = None
976 filematcher = None
979 if wopts.follow:
977 if wopts.follow:
980 if slowpath or match.always():
978 if slowpath or match.always():
981 revs = dagop.revancestors(repo, revs, followfirst=wopts.follow == 1)
979 revs = dagop.revancestors(repo, revs, followfirst=wopts.follow == 1)
982 else:
980 else:
983 assert not wopts.force_changelog_traversal
981 assert not wopts.force_changelog_traversal
984 revs, filematcher = _fileancestors(
982 revs, filematcher = _fileancestors(
985 repo, revs, match, followfirst=wopts.follow == 1
983 repo, revs, match, followfirst=wopts.follow == 1
986 )
984 )
987 revs.reverse()
985 revs.reverse()
988 if filematcher is None:
986 if filematcher is None:
989 filematcher = _makenofollowfilematcher(repo, wopts.pats, wopts.opts)
987 filematcher = _makenofollowfilematcher(repo, wopts.pats, wopts.opts)
990 if filematcher is None:
988 if filematcher is None:
991
989
992 def filematcher(ctx):
990 def filematcher(ctx):
993 return match
991 return match
994
992
995 expr = _makerevset(repo, wopts, slowpath)
993 expr = _makerevset(repo, wopts, slowpath)
996 if wopts.sort_revisions:
994 if wopts.sort_revisions:
997 assert wopts.sort_revisions in {b'topo', b'desc'}
995 assert wopts.sort_revisions in {b'topo', b'desc'}
998 if wopts.sort_revisions == b'topo':
996 if wopts.sort_revisions == b'topo':
999 if not revs.istopo():
997 if not revs.istopo():
1000 revs = dagop.toposort(revs, repo.changelog.parentrevs)
998 revs = dagop.toposort(revs, repo.changelog.parentrevs)
1001 # TODO: try to iterate the set lazily
999 # TODO: try to iterate the set lazily
1002 revs = revset.baseset(list(revs), istopo=True)
1000 revs = revset.baseset(list(revs), istopo=True)
1003 elif not (revs.isdescending() or revs.istopo()):
1001 elif not (revs.isdescending() or revs.istopo()):
1004 # User-specified revs might be unsorted
1002 # User-specified revs might be unsorted
1005 revs.sort(reverse=True)
1003 revs.sort(reverse=True)
1006 if expr:
1004 if expr:
1007 matcher = revset.match(None, expr)
1005 matcher = revset.match(None, expr)
1008 revs = matcher(repo, revs)
1006 revs = matcher(repo, revs)
1009 if wopts.limit is not None:
1007 if wopts.limit is not None:
1010 revs = revs.slice(0, wopts.limit)
1008 revs = revs.slice(0, wopts.limit)
1011
1009
1012 return revs, filematcher
1010 return revs, filematcher
1013
1011
1014
1012
1015 def getrevs(repo, wopts):
1013 def getrevs(repo, wopts):
1016 # type: (Any, walkopts) -> Tuple[smartset.abstractsmartset, Optional[changesetdiffer]]
1014 # type: (Any, walkopts) -> Tuple[smartset.abstractsmartset, Optional[changesetdiffer]]
1017 """Return (revs, differ) where revs is a smartset
1015 """Return (revs, differ) where revs is a smartset
1018
1016
1019 differ is a changesetdiffer with pre-configured file matcher.
1017 differ is a changesetdiffer with pre-configured file matcher.
1020 """
1018 """
1021 revs, filematcher = makewalker(repo, wopts)
1019 revs, filematcher = makewalker(repo, wopts)
1022 if not revs:
1020 if not revs:
1023 return revs, None
1021 return revs, None
1024 differ = changesetdiffer()
1022 differ = changesetdiffer()
1025 differ._makefilematcher = filematcher
1023 differ._makefilematcher = filematcher
1026 return revs, differ
1024 return revs, differ
1027
1025
1028
1026
1029 def _parselinerangeopt(repo, opts):
1027 def _parselinerangeopt(repo, opts):
1030 """Parse --line-range log option and return a list of tuples (filename,
1028 """Parse --line-range log option and return a list of tuples (filename,
1031 (fromline, toline)).
1029 (fromline, toline)).
1032 """
1030 """
1033 linerangebyfname = []
1031 linerangebyfname = []
1034 for pat in opts.get(b'line_range', []):
1032 for pat in opts.get(b'line_range', []):
1035 try:
1033 try:
1036 pat, linerange = pat.rsplit(b',', 1)
1034 pat, linerange = pat.rsplit(b',', 1)
1037 except ValueError:
1035 except ValueError:
1038 raise error.Abort(_(b'malformatted line-range pattern %s') % pat)
1036 raise error.Abort(_(b'malformatted line-range pattern %s') % pat)
1039 try:
1037 try:
1040 fromline, toline = map(int, linerange.split(b':'))
1038 fromline, toline = map(int, linerange.split(b':'))
1041 except ValueError:
1039 except ValueError:
1042 raise error.Abort(_(b"invalid line range for %s") % pat)
1040 raise error.Abort(_(b"invalid line range for %s") % pat)
1043 msg = _(b"line range pattern '%s' must match exactly one file") % pat
1041 msg = _(b"line range pattern '%s' must match exactly one file") % pat
1044 fname = scmutil.parsefollowlinespattern(repo, None, pat, msg)
1042 fname = scmutil.parsefollowlinespattern(repo, None, pat, msg)
1045 linerangebyfname.append(
1043 linerangebyfname.append(
1046 (fname, util.processlinerange(fromline, toline))
1044 (fname, util.processlinerange(fromline, toline))
1047 )
1045 )
1048 return linerangebyfname
1046 return linerangebyfname
1049
1047
1050
1048
1051 def getlinerangerevs(repo, userrevs, opts):
1049 def getlinerangerevs(repo, userrevs, opts):
1052 """Return (revs, differ).
1050 """Return (revs, differ).
1053
1051
1054 "revs" are revisions obtained by processing "line-range" log options and
1052 "revs" are revisions obtained by processing "line-range" log options and
1055 walking block ancestors of each specified file/line-range.
1053 walking block ancestors of each specified file/line-range.
1056
1054
1057 "differ" is a changesetdiffer with pre-configured file matcher and hunks
1055 "differ" is a changesetdiffer with pre-configured file matcher and hunks
1058 filter.
1056 filter.
1059 """
1057 """
1060 wctx = repo[None]
1058 wctx = repo[None]
1061
1059
1062 # Two-levels map of "rev -> file ctx -> [line range]".
1060 # Two-levels map of "rev -> file ctx -> [line range]".
1063 linerangesbyrev = {}
1061 linerangesbyrev = {}
1064 for fname, (fromline, toline) in _parselinerangeopt(repo, opts):
1062 for fname, (fromline, toline) in _parselinerangeopt(repo, opts):
1065 if fname not in wctx:
1063 if fname not in wctx:
1066 raise error.Abort(
1064 raise error.Abort(
1067 _(b'cannot follow file not in parent revision: "%s"') % fname
1065 _(b'cannot follow file not in parent revision: "%s"') % fname
1068 )
1066 )
1069 fctx = wctx.filectx(fname)
1067 fctx = wctx.filectx(fname)
1070 for fctx, linerange in dagop.blockancestors(fctx, fromline, toline):
1068 for fctx, linerange in dagop.blockancestors(fctx, fromline, toline):
1071 rev = fctx.introrev()
1069 rev = fctx.introrev()
1072 if rev is None:
1070 if rev is None:
1073 rev = wdirrev
1071 rev = wdirrev
1074 if rev not in userrevs:
1072 if rev not in userrevs:
1075 continue
1073 continue
1076 linerangesbyrev.setdefault(rev, {}).setdefault(
1074 linerangesbyrev.setdefault(rev, {}).setdefault(
1077 fctx.path(), []
1075 fctx.path(), []
1078 ).append(linerange)
1076 ).append(linerange)
1079
1077
1080 def nofilterhunksfn(fctx, hunks):
1078 def nofilterhunksfn(fctx, hunks):
1081 return hunks
1079 return hunks
1082
1080
1083 def hunksfilter(ctx):
1081 def hunksfilter(ctx):
1084 fctxlineranges = linerangesbyrev.get(scmutil.intrev(ctx))
1082 fctxlineranges = linerangesbyrev.get(scmutil.intrev(ctx))
1085 if fctxlineranges is None:
1083 if fctxlineranges is None:
1086 return nofilterhunksfn
1084 return nofilterhunksfn
1087
1085
1088 def filterfn(fctx, hunks):
1086 def filterfn(fctx, hunks):
1089 lineranges = fctxlineranges.get(fctx.path())
1087 lineranges = fctxlineranges.get(fctx.path())
1090 if lineranges is not None:
1088 if lineranges is not None:
1091 for hr, lines in hunks:
1089 for hr, lines in hunks:
1092 if hr is None: # binary
1090 if hr is None: # binary
1093 yield hr, lines
1091 yield hr, lines
1094 continue
1092 continue
1095 if any(mdiff.hunkinrange(hr[2:], lr) for lr in lineranges):
1093 if any(mdiff.hunkinrange(hr[2:], lr) for lr in lineranges):
1096 yield hr, lines
1094 yield hr, lines
1097 else:
1095 else:
1098 for hunk in hunks:
1096 for hunk in hunks:
1099 yield hunk
1097 yield hunk
1100
1098
1101 return filterfn
1099 return filterfn
1102
1100
1103 def filematcher(ctx):
1101 def filematcher(ctx):
1104 files = list(linerangesbyrev.get(scmutil.intrev(ctx), []))
1102 files = list(linerangesbyrev.get(scmutil.intrev(ctx), []))
1105 return scmutil.matchfiles(repo, files)
1103 return scmutil.matchfiles(repo, files)
1106
1104
1107 revs = sorted(linerangesbyrev, reverse=True)
1105 revs = sorted(linerangesbyrev, reverse=True)
1108
1106
1109 differ = changesetdiffer()
1107 differ = changesetdiffer()
1110 differ._makefilematcher = filematcher
1108 differ._makefilematcher = filematcher
1111 differ._makehunksfilter = hunksfilter
1109 differ._makehunksfilter = hunksfilter
1112 return smartset.baseset(revs), differ
1110 return smartset.baseset(revs), differ
1113
1111
1114
1112
1115 def _graphnodeformatter(ui, displayer):
1113 def _graphnodeformatter(ui, displayer):
1116 spec = ui.config(b'ui', b'graphnodetemplate')
1114 spec = ui.config(b'ui', b'graphnodetemplate')
1117 if not spec:
1115 if not spec:
1118 return templatekw.getgraphnode # fast path for "{graphnode}"
1116 return templatekw.getgraphnode # fast path for "{graphnode}"
1119
1117
1120 spec = templater.unquotestring(spec)
1118 spec = templater.unquotestring(spec)
1121 if isinstance(displayer, changesettemplater):
1119 if isinstance(displayer, changesettemplater):
1122 # reuse cache of slow templates
1120 # reuse cache of slow templates
1123 tres = displayer._tresources
1121 tres = displayer._tresources
1124 else:
1122 else:
1125 tres = formatter.templateresources(ui)
1123 tres = formatter.templateresources(ui)
1126 templ = formatter.maketemplater(
1124 templ = formatter.maketemplater(
1127 ui, spec, defaults=templatekw.keywords, resources=tres
1125 ui, spec, defaults=templatekw.keywords, resources=tres
1128 )
1126 )
1129
1127
1130 def formatnode(repo, ctx, cache):
1128 def formatnode(repo, ctx, cache):
1131 props = {b'ctx': ctx, b'repo': repo}
1129 props = {b'ctx': ctx, b'repo': repo}
1132 return templ.renderdefault(props)
1130 return templ.renderdefault(props)
1133
1131
1134 return formatnode
1132 return formatnode
1135
1133
1136
1134
1137 def displaygraph(ui, repo, dag, displayer, edgefn, getcopies=None, props=None):
1135 def displaygraph(ui, repo, dag, displayer, edgefn, getcopies=None, props=None):
1138 props = props or {}
1136 props = props or {}
1139 formatnode = _graphnodeformatter(ui, displayer)
1137 formatnode = _graphnodeformatter(ui, displayer)
1140 state = graphmod.asciistate()
1138 state = graphmod.asciistate()
1141 styles = state.styles
1139 styles = state.styles
1142
1140
1143 # only set graph styling if HGPLAIN is not set.
1141 # only set graph styling if HGPLAIN is not set.
1144 if ui.plain(b'graph'):
1142 if ui.plain(b'graph'):
1145 # set all edge styles to |, the default pre-3.8 behaviour
1143 # set all edge styles to |, the default pre-3.8 behaviour
1146 styles.update(dict.fromkeys(styles, b'|'))
1144 styles.update(dict.fromkeys(styles, b'|'))
1147 else:
1145 else:
1148 edgetypes = {
1146 edgetypes = {
1149 b'parent': graphmod.PARENT,
1147 b'parent': graphmod.PARENT,
1150 b'grandparent': graphmod.GRANDPARENT,
1148 b'grandparent': graphmod.GRANDPARENT,
1151 b'missing': graphmod.MISSINGPARENT,
1149 b'missing': graphmod.MISSINGPARENT,
1152 }
1150 }
1153 for name, key in edgetypes.items():
1151 for name, key in edgetypes.items():
1154 # experimental config: experimental.graphstyle.*
1152 # experimental config: experimental.graphstyle.*
1155 styles[key] = ui.config(
1153 styles[key] = ui.config(
1156 b'experimental', b'graphstyle.%s' % name, styles[key]
1154 b'experimental', b'graphstyle.%s' % name, styles[key]
1157 )
1155 )
1158 if not styles[key]:
1156 if not styles[key]:
1159 styles[key] = None
1157 styles[key] = None
1160
1158
1161 # experimental config: experimental.graphshorten
1159 # experimental config: experimental.graphshorten
1162 state.graphshorten = ui.configbool(b'experimental', b'graphshorten')
1160 state.graphshorten = ui.configbool(b'experimental', b'graphshorten')
1163
1161
1164 formatnode_cache = {}
1162 formatnode_cache = {}
1165 for rev, type, ctx, parents in dag:
1163 for rev, type, ctx, parents in dag:
1166 char = formatnode(repo, ctx, formatnode_cache)
1164 char = formatnode(repo, ctx, formatnode_cache)
1167 copies = getcopies(ctx) if getcopies else None
1165 copies = getcopies(ctx) if getcopies else None
1168 edges = edgefn(type, char, state, rev, parents)
1166 edges = edgefn(type, char, state, rev, parents)
1169 firstedge = next(edges)
1167 firstedge = next(edges)
1170 width = firstedge[2]
1168 width = firstedge[2]
1171 displayer.show(
1169 displayer.show(
1172 ctx, copies=copies, graphwidth=width, **pycompat.strkwargs(props)
1170 ctx, copies=copies, graphwidth=width, **pycompat.strkwargs(props)
1173 )
1171 )
1174 lines = displayer.hunk.pop(rev).split(b'\n')
1172 lines = displayer.hunk.pop(rev).split(b'\n')
1175 if not lines[-1]:
1173 if not lines[-1]:
1176 del lines[-1]
1174 del lines[-1]
1177 displayer.flush(ctx)
1175 displayer.flush(ctx)
1178 for type, char, width, coldata in itertools.chain([firstedge], edges):
1176 for type, char, width, coldata in itertools.chain([firstedge], edges):
1179 graphmod.ascii(ui, state, type, char, lines, coldata)
1177 graphmod.ascii(ui, state, type, char, lines, coldata)
1180 lines = []
1178 lines = []
1181 displayer.close()
1179 displayer.close()
1182
1180
1183
1181
1184 def displaygraphrevs(ui, repo, revs, displayer, getrenamed):
1182 def displaygraphrevs(ui, repo, revs, displayer, getrenamed):
1185 revdag = graphmod.dagwalker(repo, revs)
1183 revdag = graphmod.dagwalker(repo, revs)
1186 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed)
1184 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed)
1187
1185
1188
1186
1189 def displayrevs(ui, repo, revs, displayer, getcopies):
1187 def displayrevs(ui, repo, revs, displayer, getcopies):
1190 for rev in revs:
1188 for rev in revs:
1191 ctx = repo[rev]
1189 ctx = repo[rev]
1192 copies = getcopies(ctx) if getcopies else None
1190 copies = getcopies(ctx) if getcopies else None
1193 displayer.show(ctx, copies=copies)
1191 displayer.show(ctx, copies=copies)
1194 displayer.flush(ctx)
1192 displayer.flush(ctx)
1195 displayer.close()
1193 displayer.close()
1196
1194
1197
1195
1198 def checkunsupportedgraphflags(pats, opts):
1196 def checkunsupportedgraphflags(pats, opts):
1199 for op in [b"newest_first"]:
1197 for op in [b"newest_first"]:
1200 if op in opts and opts[op]:
1198 if op in opts and opts[op]:
1201 raise error.Abort(
1199 raise error.Abort(
1202 _(b"-G/--graph option is incompatible with --%s")
1200 _(b"-G/--graph option is incompatible with --%s")
1203 % op.replace(b"_", b"-")
1201 % op.replace(b"_", b"-")
1204 )
1202 )
1205
1203
1206
1204
1207 def graphrevs(repo, nodes, opts):
1205 def graphrevs(repo, nodes, opts):
1208 limit = getlimit(opts)
1206 limit = getlimit(opts)
1209 nodes.reverse()
1207 nodes.reverse()
1210 if limit is not None:
1208 if limit is not None:
1211 nodes = nodes[:limit]
1209 nodes = nodes[:limit]
1212 return graphmod.nodes(repo, nodes)
1210 return graphmod.nodes(repo, nodes)
General Comments 0
You need to be logged in to leave comments. Login now