##// END OF EJS Templates
hgweb: move ismember from `hgweb.common` to `scmutil`...
marmoute -
r51314:4bddc2f7 default
parent child Browse files
Show More
@@ -1,314 +1,307 b''
1 # hgweb/common.py - Utility functions needed by hgweb_mod and hgwebdir_mod
1 # hgweb/common.py - Utility functions needed by hgweb_mod and hgwebdir_mod
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com>
4 # Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9
9
10 import base64
10 import base64
11 import errno
11 import errno
12 import mimetypes
12 import mimetypes
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from ..i18n import _
16 from ..i18n import _
17 from ..pycompat import (
17 from ..pycompat import (
18 getattr,
18 getattr,
19 open,
19 open,
20 )
20 )
21 from .. import (
21 from .. import (
22 encoding,
22 encoding,
23 pycompat,
23 pycompat,
24 scmutil,
24 templater,
25 templater,
25 util,
26 util,
26 )
27 )
27
28
28 httpserver = util.httpserver
29 httpserver = util.httpserver
29
30
30 HTTP_OK = 200
31 HTTP_OK = 200
31 HTTP_CREATED = 201
32 HTTP_CREATED = 201
32 HTTP_NOT_MODIFIED = 304
33 HTTP_NOT_MODIFIED = 304
33 HTTP_BAD_REQUEST = 400
34 HTTP_BAD_REQUEST = 400
34 HTTP_UNAUTHORIZED = 401
35 HTTP_UNAUTHORIZED = 401
35 HTTP_FORBIDDEN = 403
36 HTTP_FORBIDDEN = 403
36 HTTP_NOT_FOUND = 404
37 HTTP_NOT_FOUND = 404
37 HTTP_METHOD_NOT_ALLOWED = 405
38 HTTP_METHOD_NOT_ALLOWED = 405
38 HTTP_NOT_ACCEPTABLE = 406
39 HTTP_NOT_ACCEPTABLE = 406
39 HTTP_UNSUPPORTED_MEDIA_TYPE = 415
40 HTTP_UNSUPPORTED_MEDIA_TYPE = 415
40 HTTP_SERVER_ERROR = 500
41 HTTP_SERVER_ERROR = 500
41
42
42
43 ismember = scmutil.ismember
43 def ismember(ui, username, userlist):
44 """Check if username is a member of userlist.
45
46 If userlist has a single '*' member, all users are considered members.
47 Can be overridden by extensions to provide more complex authorization
48 schemes.
49 """
50 return userlist == [b'*'] or username in userlist
51
44
52
45
53 def hashiddenaccess(repo, req):
46 def hashiddenaccess(repo, req):
54 if bool(req.qsparams.get(b'access-hidden')):
47 if bool(req.qsparams.get(b'access-hidden')):
55 # Disable this by default for now. Main risk is to get critical
48 # Disable this by default for now. Main risk is to get critical
56 # information exposed through this. This is expecially risky if
49 # information exposed through this. This is expecially risky if
57 # someone decided to make a changeset secret for good reason, but
50 # someone decided to make a changeset secret for good reason, but
58 # its predecessors are still draft.
51 # its predecessors are still draft.
59 #
52 #
60 # The feature is currently experimental, so we can still decide to
53 # The feature is currently experimental, so we can still decide to
61 # change the default.
54 # change the default.
62 ui = repo.ui
55 ui = repo.ui
63 allow = ui.configlist(b'experimental', b'server.allow-hidden-access')
56 allow = ui.configlist(b'experimental', b'server.allow-hidden-access')
64 user = req.remoteuser
57 user = req.remoteuser
65 if allow and ismember(ui, user, allow):
58 if allow and ismember(ui, user, allow):
66 return True
59 return True
67 else:
60 else:
68 msg = (
61 msg = (
69 _(
62 _(
70 b'ignoring request to access hidden changeset by '
63 b'ignoring request to access hidden changeset by '
71 b'unauthorized user: %r\n'
64 b'unauthorized user: %r\n'
72 )
65 )
73 % user
66 % user
74 )
67 )
75 ui.warn(msg)
68 ui.warn(msg)
76 return False
69 return False
77
70
78
71
79 def checkauthz(hgweb, req, op):
72 def checkauthz(hgweb, req, op):
80 """Check permission for operation based on request data (including
73 """Check permission for operation based on request data (including
81 authentication info). Return if op allowed, else raise an ErrorResponse
74 authentication info). Return if op allowed, else raise an ErrorResponse
82 exception."""
75 exception."""
83
76
84 user = req.remoteuser
77 user = req.remoteuser
85
78
86 deny_read = hgweb.configlist(b'web', b'deny_read')
79 deny_read = hgweb.configlist(b'web', b'deny_read')
87 if deny_read and (not user or ismember(hgweb.repo.ui, user, deny_read)):
80 if deny_read and (not user or ismember(hgweb.repo.ui, user, deny_read)):
88 raise ErrorResponse(HTTP_UNAUTHORIZED, b'read not authorized')
81 raise ErrorResponse(HTTP_UNAUTHORIZED, b'read not authorized')
89
82
90 allow_read = hgweb.configlist(b'web', b'allow_read')
83 allow_read = hgweb.configlist(b'web', b'allow_read')
91 if allow_read and (not ismember(hgweb.repo.ui, user, allow_read)):
84 if allow_read and (not ismember(hgweb.repo.ui, user, allow_read)):
92 raise ErrorResponse(HTTP_UNAUTHORIZED, b'read not authorized')
85 raise ErrorResponse(HTTP_UNAUTHORIZED, b'read not authorized')
93
86
94 if op == b'pull' and not hgweb.allowpull:
87 if op == b'pull' and not hgweb.allowpull:
95 raise ErrorResponse(HTTP_UNAUTHORIZED, b'pull not authorized')
88 raise ErrorResponse(HTTP_UNAUTHORIZED, b'pull not authorized')
96 elif op == b'pull' or op is None: # op is None for interface requests
89 elif op == b'pull' or op is None: # op is None for interface requests
97 return
90 return
98
91
99 # Allow LFS uploading via PUT requests
92 # Allow LFS uploading via PUT requests
100 if op == b'upload':
93 if op == b'upload':
101 if req.method != b'PUT':
94 if req.method != b'PUT':
102 msg = b'upload requires PUT request'
95 msg = b'upload requires PUT request'
103 raise ErrorResponse(HTTP_METHOD_NOT_ALLOWED, msg)
96 raise ErrorResponse(HTTP_METHOD_NOT_ALLOWED, msg)
104 # enforce that you can only push using POST requests
97 # enforce that you can only push using POST requests
105 elif req.method != b'POST':
98 elif req.method != b'POST':
106 msg = b'push requires POST request'
99 msg = b'push requires POST request'
107 raise ErrorResponse(HTTP_METHOD_NOT_ALLOWED, msg)
100 raise ErrorResponse(HTTP_METHOD_NOT_ALLOWED, msg)
108
101
109 # require ssl by default for pushing, auth info cannot be sniffed
102 # require ssl by default for pushing, auth info cannot be sniffed
110 # and replayed
103 # and replayed
111 if hgweb.configbool(b'web', b'push_ssl') and req.urlscheme != b'https':
104 if hgweb.configbool(b'web', b'push_ssl') and req.urlscheme != b'https':
112 raise ErrorResponse(HTTP_FORBIDDEN, b'ssl required')
105 raise ErrorResponse(HTTP_FORBIDDEN, b'ssl required')
113
106
114 deny = hgweb.configlist(b'web', b'deny_push')
107 deny = hgweb.configlist(b'web', b'deny_push')
115 if deny and (not user or ismember(hgweb.repo.ui, user, deny)):
108 if deny and (not user or ismember(hgweb.repo.ui, user, deny)):
116 raise ErrorResponse(HTTP_UNAUTHORIZED, b'push not authorized')
109 raise ErrorResponse(HTTP_UNAUTHORIZED, b'push not authorized')
117
110
118 allow = hgweb.configlist(b'web', b'allow-push')
111 allow = hgweb.configlist(b'web', b'allow-push')
119 if not (allow and ismember(hgweb.repo.ui, user, allow)):
112 if not (allow and ismember(hgweb.repo.ui, user, allow)):
120 raise ErrorResponse(HTTP_UNAUTHORIZED, b'push not authorized')
113 raise ErrorResponse(HTTP_UNAUTHORIZED, b'push not authorized')
121
114
122
115
123 # Hooks for hgweb permission checks; extensions can add hooks here.
116 # Hooks for hgweb permission checks; extensions can add hooks here.
124 # Each hook is invoked like this: hook(hgweb, request, operation),
117 # Each hook is invoked like this: hook(hgweb, request, operation),
125 # where operation is either read, pull, push or upload. Hooks should either
118 # where operation is either read, pull, push or upload. Hooks should either
126 # raise an ErrorResponse exception, or just return.
119 # raise an ErrorResponse exception, or just return.
127 #
120 #
128 # It is possible to do both authentication and authorization through
121 # It is possible to do both authentication and authorization through
129 # this.
122 # this.
130 permhooks = [checkauthz]
123 permhooks = [checkauthz]
131
124
132
125
133 class ErrorResponse(Exception):
126 class ErrorResponse(Exception):
134 def __init__(self, code, message=None, headers=None):
127 def __init__(self, code, message=None, headers=None):
135 if message is None:
128 if message is None:
136 message = _statusmessage(code)
129 message = _statusmessage(code)
137 Exception.__init__(self, pycompat.sysstr(message))
130 Exception.__init__(self, pycompat.sysstr(message))
138 self.code = code
131 self.code = code
139 if headers is None:
132 if headers is None:
140 headers = []
133 headers = []
141 self.headers = headers
134 self.headers = headers
142 self.message = message
135 self.message = message
143
136
144
137
145 class continuereader:
138 class continuereader:
146 """File object wrapper to handle HTTP 100-continue.
139 """File object wrapper to handle HTTP 100-continue.
147
140
148 This is used by servers so they automatically handle Expect: 100-continue
141 This is used by servers so they automatically handle Expect: 100-continue
149 request headers. On first read of the request body, the 100 Continue
142 request headers. On first read of the request body, the 100 Continue
150 response is sent. This should trigger the client into actually sending
143 response is sent. This should trigger the client into actually sending
151 the request body.
144 the request body.
152 """
145 """
153
146
154 def __init__(self, f, write):
147 def __init__(self, f, write):
155 self.f = f
148 self.f = f
156 self._write = write
149 self._write = write
157 self.continued = False
150 self.continued = False
158
151
159 def read(self, amt=-1):
152 def read(self, amt=-1):
160 if not self.continued:
153 if not self.continued:
161 self.continued = True
154 self.continued = True
162 self._write(b'HTTP/1.1 100 Continue\r\n\r\n')
155 self._write(b'HTTP/1.1 100 Continue\r\n\r\n')
163 return self.f.read(amt)
156 return self.f.read(amt)
164
157
165 def __getattr__(self, attr):
158 def __getattr__(self, attr):
166 if attr in (b'close', b'readline', b'readlines', b'__iter__'):
159 if attr in (b'close', b'readline', b'readlines', b'__iter__'):
167 return getattr(self.f, attr)
160 return getattr(self.f, attr)
168 raise AttributeError
161 raise AttributeError
169
162
170
163
171 def _statusmessage(code):
164 def _statusmessage(code):
172 responses = httpserver.basehttprequesthandler.responses
165 responses = httpserver.basehttprequesthandler.responses
173 return pycompat.bytesurl(responses.get(code, ('Error', 'Unknown error'))[0])
166 return pycompat.bytesurl(responses.get(code, ('Error', 'Unknown error'))[0])
174
167
175
168
176 def statusmessage(code, message=None):
169 def statusmessage(code, message=None):
177 return b'%d %s' % (code, message or _statusmessage(code))
170 return b'%d %s' % (code, message or _statusmessage(code))
178
171
179
172
180 def get_stat(spath, fn):
173 def get_stat(spath, fn):
181 """stat fn if it exists, spath otherwise"""
174 """stat fn if it exists, spath otherwise"""
182 cl_path = os.path.join(spath, fn)
175 cl_path = os.path.join(spath, fn)
183 if os.path.exists(cl_path):
176 if os.path.exists(cl_path):
184 return os.stat(cl_path)
177 return os.stat(cl_path)
185 else:
178 else:
186 return os.stat(spath)
179 return os.stat(spath)
187
180
188
181
189 def get_mtime(spath):
182 def get_mtime(spath):
190 return get_stat(spath, b"00changelog.i")[stat.ST_MTIME]
183 return get_stat(spath, b"00changelog.i")[stat.ST_MTIME]
191
184
192
185
193 def ispathsafe(path):
186 def ispathsafe(path):
194 """Determine if a path is safe to use for filesystem access."""
187 """Determine if a path is safe to use for filesystem access."""
195 parts = path.split(b'/')
188 parts = path.split(b'/')
196 for part in parts:
189 for part in parts:
197 if (
190 if (
198 part in (b'', pycompat.oscurdir, pycompat.ospardir)
191 part in (b'', pycompat.oscurdir, pycompat.ospardir)
199 or pycompat.ossep in part
192 or pycompat.ossep in part
200 or pycompat.osaltsep is not None
193 or pycompat.osaltsep is not None
201 and pycompat.osaltsep in part
194 and pycompat.osaltsep in part
202 ):
195 ):
203 return False
196 return False
204
197
205 return True
198 return True
206
199
207
200
208 def staticfile(templatepath, directory, fname, res):
201 def staticfile(templatepath, directory, fname, res):
209 """return a file inside directory with guessed Content-Type header
202 """return a file inside directory with guessed Content-Type header
210
203
211 fname always uses '/' as directory separator and isn't allowed to
204 fname always uses '/' as directory separator and isn't allowed to
212 contain unusual path components.
205 contain unusual path components.
213 Content-Type is guessed using the mimetypes module.
206 Content-Type is guessed using the mimetypes module.
214 Return an empty string if fname is illegal or file not found.
207 Return an empty string if fname is illegal or file not found.
215
208
216 """
209 """
217 if not ispathsafe(fname):
210 if not ispathsafe(fname):
218 return
211 return
219
212
220 if not directory:
213 if not directory:
221 tp = templatepath or templater.templatedir()
214 tp = templatepath or templater.templatedir()
222 if tp is not None:
215 if tp is not None:
223 directory = os.path.join(tp, b'static')
216 directory = os.path.join(tp, b'static')
224
217
225 fpath = os.path.join(*fname.split(b'/'))
218 fpath = os.path.join(*fname.split(b'/'))
226 ct = pycompat.sysbytes(
219 ct = pycompat.sysbytes(
227 mimetypes.guess_type(pycompat.fsdecode(fpath))[0] or r"text/plain"
220 mimetypes.guess_type(pycompat.fsdecode(fpath))[0] or r"text/plain"
228 )
221 )
229 path = os.path.join(directory, fpath)
222 path = os.path.join(directory, fpath)
230 try:
223 try:
231 os.stat(path)
224 os.stat(path)
232 with open(path, b'rb') as fh:
225 with open(path, b'rb') as fh:
233 data = fh.read()
226 data = fh.read()
234 except TypeError:
227 except TypeError:
235 raise ErrorResponse(HTTP_SERVER_ERROR, b'illegal filename')
228 raise ErrorResponse(HTTP_SERVER_ERROR, b'illegal filename')
236 except OSError as err:
229 except OSError as err:
237 if err.errno == errno.ENOENT:
230 if err.errno == errno.ENOENT:
238 raise ErrorResponse(HTTP_NOT_FOUND)
231 raise ErrorResponse(HTTP_NOT_FOUND)
239 else:
232 else:
240 raise ErrorResponse(
233 raise ErrorResponse(
241 HTTP_SERVER_ERROR, encoding.strtolocal(err.strerror)
234 HTTP_SERVER_ERROR, encoding.strtolocal(err.strerror)
242 )
235 )
243
236
244 res.headers[b'Content-Type'] = ct
237 res.headers[b'Content-Type'] = ct
245 res.setbodybytes(data)
238 res.setbodybytes(data)
246 return res
239 return res
247
240
248
241
249 def paritygen(stripecount, offset=0):
242 def paritygen(stripecount, offset=0):
250 """count parity of horizontal stripes for easier reading"""
243 """count parity of horizontal stripes for easier reading"""
251 if stripecount and offset:
244 if stripecount and offset:
252 # account for offset, e.g. due to building the list in reverse
245 # account for offset, e.g. due to building the list in reverse
253 count = (stripecount + offset) % stripecount
246 count = (stripecount + offset) % stripecount
254 parity = (stripecount + offset) // stripecount & 1
247 parity = (stripecount + offset) // stripecount & 1
255 else:
248 else:
256 count = 0
249 count = 0
257 parity = 0
250 parity = 0
258 while True:
251 while True:
259 yield parity
252 yield parity
260 count += 1
253 count += 1
261 if stripecount and count >= stripecount:
254 if stripecount and count >= stripecount:
262 parity = 1 - parity
255 parity = 1 - parity
263 count = 0
256 count = 0
264
257
265
258
266 def get_contact(config):
259 def get_contact(config):
267 """Return repo contact information or empty string.
260 """Return repo contact information or empty string.
268
261
269 web.contact is the primary source, but if that is not set, try
262 web.contact is the primary source, but if that is not set, try
270 ui.username or $EMAIL as a fallback to display something useful.
263 ui.username or $EMAIL as a fallback to display something useful.
271 """
264 """
272 return (
265 return (
273 config(b"web", b"contact")
266 config(b"web", b"contact")
274 or config(b"ui", b"username")
267 or config(b"ui", b"username")
275 or encoding.environ.get(b"EMAIL")
268 or encoding.environ.get(b"EMAIL")
276 or b""
269 or b""
277 )
270 )
278
271
279
272
280 def cspvalues(ui):
273 def cspvalues(ui):
281 """Obtain the Content-Security-Policy header and nonce value.
274 """Obtain the Content-Security-Policy header and nonce value.
282
275
283 Returns a 2-tuple of the CSP header value and the nonce value.
276 Returns a 2-tuple of the CSP header value and the nonce value.
284
277
285 First value is ``None`` if CSP isn't enabled. Second value is ``None``
278 First value is ``None`` if CSP isn't enabled. Second value is ``None``
286 if CSP isn't enabled or if the CSP header doesn't need a nonce.
279 if CSP isn't enabled or if the CSP header doesn't need a nonce.
287 """
280 """
288 # Without demandimport, "import uuid" could have an immediate side-effect
281 # Without demandimport, "import uuid" could have an immediate side-effect
289 # running "ldconfig" on Linux trying to find libuuid.
282 # running "ldconfig" on Linux trying to find libuuid.
290 # With Python <= 2.7.12, that "ldconfig" is run via a shell and the shell
283 # With Python <= 2.7.12, that "ldconfig" is run via a shell and the shell
291 # may pollute the terminal with:
284 # may pollute the terminal with:
292 #
285 #
293 # shell-init: error retrieving current directory: getcwd: cannot access
286 # shell-init: error retrieving current directory: getcwd: cannot access
294 # parent directories: No such file or directory
287 # parent directories: No such file or directory
295 #
288 #
296 # Python >= 2.7.13 has fixed it by running "ldconfig" directly without a
289 # Python >= 2.7.13 has fixed it by running "ldconfig" directly without a
297 # shell (hg changeset a09ae70f3489).
290 # shell (hg changeset a09ae70f3489).
298 #
291 #
299 # Moved "import uuid" from here so it's executed after we know we have
292 # Moved "import uuid" from here so it's executed after we know we have
300 # a sane cwd (i.e. after dispatch.py cwd check).
293 # a sane cwd (i.e. after dispatch.py cwd check).
301 #
294 #
302 # We can move it back once we no longer need Python <= 2.7.12 support.
295 # We can move it back once we no longer need Python <= 2.7.12 support.
303 import uuid
296 import uuid
304
297
305 # Don't allow untrusted CSP setting since it be disable protections
298 # Don't allow untrusted CSP setting since it be disable protections
306 # from a trusted/global source.
299 # from a trusted/global source.
307 csp = ui.config(b'web', b'csp', untrusted=False)
300 csp = ui.config(b'web', b'csp', untrusted=False)
308 nonce = None
301 nonce = None
309
302
310 if csp and b'%nonce%' in csp:
303 if csp and b'%nonce%' in csp:
311 nonce = base64.urlsafe_b64encode(uuid.uuid4().bytes).rstrip(b'=')
304 nonce = base64.urlsafe_b64encode(uuid.uuid4().bytes).rstrip(b'=')
312 csp = csp.replace(b'%nonce%', nonce)
305 csp = csp.replace(b'%nonce%', nonce)
313
306
314 return csp, nonce
307 return csp, nonce
@@ -1,2315 +1,2325 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Olivia Mackall <olivia@selenic.com>
3 # Copyright Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import binascii
9 import binascii
10 import errno
10 import errno
11 import glob
11 import glob
12 import os
12 import os
13 import posixpath
13 import posixpath
14 import re
14 import re
15 import subprocess
15 import subprocess
16 import weakref
16 import weakref
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import (
19 from .node import (
20 bin,
20 bin,
21 hex,
21 hex,
22 nullrev,
22 nullrev,
23 short,
23 short,
24 wdirrev,
24 wdirrev,
25 )
25 )
26 from .pycompat import getattr
26 from .pycompat import getattr
27 from .thirdparty import attr
27 from .thirdparty import attr
28 from . import (
28 from . import (
29 copies as copiesmod,
29 copies as copiesmod,
30 encoding,
30 encoding,
31 error,
31 error,
32 match as matchmod,
32 match as matchmod,
33 obsolete,
33 obsolete,
34 obsutil,
34 obsutil,
35 pathutil,
35 pathutil,
36 phases,
36 phases,
37 policy,
37 policy,
38 pycompat,
38 pycompat,
39 requirements as requirementsmod,
39 requirements as requirementsmod,
40 revsetlang,
40 revsetlang,
41 similar,
41 similar,
42 smartset,
42 smartset,
43 url,
43 url,
44 util,
44 util,
45 vfs,
45 vfs,
46 )
46 )
47
47
48 from .utils import (
48 from .utils import (
49 hashutil,
49 hashutil,
50 procutil,
50 procutil,
51 stringutil,
51 stringutil,
52 )
52 )
53
53
54 if pycompat.iswindows:
54 if pycompat.iswindows:
55 from . import scmwindows as scmplatform
55 from . import scmwindows as scmplatform
56 else:
56 else:
57 from . import scmposix as scmplatform
57 from . import scmposix as scmplatform
58
58
59 parsers = policy.importmod('parsers')
59 parsers = policy.importmod('parsers')
60 rustrevlog = policy.importrust('revlog')
60 rustrevlog = policy.importrust('revlog')
61
61
62 termsize = scmplatform.termsize
62 termsize = scmplatform.termsize
63
63
64
64
65 @attr.s(slots=True, repr=False)
65 @attr.s(slots=True, repr=False)
66 class status:
66 class status:
67 """Struct with a list of files per status.
67 """Struct with a list of files per status.
68
68
69 The 'deleted', 'unknown' and 'ignored' properties are only
69 The 'deleted', 'unknown' and 'ignored' properties are only
70 relevant to the working copy.
70 relevant to the working copy.
71 """
71 """
72
72
73 modified = attr.ib(default=attr.Factory(list))
73 modified = attr.ib(default=attr.Factory(list))
74 added = attr.ib(default=attr.Factory(list))
74 added = attr.ib(default=attr.Factory(list))
75 removed = attr.ib(default=attr.Factory(list))
75 removed = attr.ib(default=attr.Factory(list))
76 deleted = attr.ib(default=attr.Factory(list))
76 deleted = attr.ib(default=attr.Factory(list))
77 unknown = attr.ib(default=attr.Factory(list))
77 unknown = attr.ib(default=attr.Factory(list))
78 ignored = attr.ib(default=attr.Factory(list))
78 ignored = attr.ib(default=attr.Factory(list))
79 clean = attr.ib(default=attr.Factory(list))
79 clean = attr.ib(default=attr.Factory(list))
80
80
81 def __iter__(self):
81 def __iter__(self):
82 yield self.modified
82 yield self.modified
83 yield self.added
83 yield self.added
84 yield self.removed
84 yield self.removed
85 yield self.deleted
85 yield self.deleted
86 yield self.unknown
86 yield self.unknown
87 yield self.ignored
87 yield self.ignored
88 yield self.clean
88 yield self.clean
89
89
90 def __repr__(self):
90 def __repr__(self):
91 return (
91 return (
92 r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
92 r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
93 r'unknown=%s, ignored=%s, clean=%s>'
93 r'unknown=%s, ignored=%s, clean=%s>'
94 ) % tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self)
94 ) % tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self)
95
95
96
96
97 def itersubrepos(ctx1, ctx2):
97 def itersubrepos(ctx1, ctx2):
98 """find subrepos in ctx1 or ctx2"""
98 """find subrepos in ctx1 or ctx2"""
99 # Create a (subpath, ctx) mapping where we prefer subpaths from
99 # Create a (subpath, ctx) mapping where we prefer subpaths from
100 # ctx1. The subpaths from ctx2 are important when the .hgsub file
100 # ctx1. The subpaths from ctx2 are important when the .hgsub file
101 # has been modified (in ctx2) but not yet committed (in ctx1).
101 # has been modified (in ctx2) but not yet committed (in ctx1).
102 subpaths = dict.fromkeys(ctx2.substate, ctx2)
102 subpaths = dict.fromkeys(ctx2.substate, ctx2)
103 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
103 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
104
104
105 missing = set()
105 missing = set()
106
106
107 for subpath in ctx2.substate:
107 for subpath in ctx2.substate:
108 if subpath not in ctx1.substate:
108 if subpath not in ctx1.substate:
109 del subpaths[subpath]
109 del subpaths[subpath]
110 missing.add(subpath)
110 missing.add(subpath)
111
111
112 for subpath, ctx in sorted(subpaths.items()):
112 for subpath, ctx in sorted(subpaths.items()):
113 yield subpath, ctx.sub(subpath)
113 yield subpath, ctx.sub(subpath)
114
114
115 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
115 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
116 # status and diff will have an accurate result when it does
116 # status and diff will have an accurate result when it does
117 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
117 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
118 # against itself.
118 # against itself.
119 for subpath in missing:
119 for subpath in missing:
120 yield subpath, ctx2.nullsub(subpath, ctx1)
120 yield subpath, ctx2.nullsub(subpath, ctx1)
121
121
122
122
123 def nochangesfound(ui, repo, excluded=None):
123 def nochangesfound(ui, repo, excluded=None):
124 """Report no changes for push/pull, excluded is None or a list of
124 """Report no changes for push/pull, excluded is None or a list of
125 nodes excluded from the push/pull.
125 nodes excluded from the push/pull.
126 """
126 """
127 secretlist = []
127 secretlist = []
128 if excluded:
128 if excluded:
129 for n in excluded:
129 for n in excluded:
130 ctx = repo[n]
130 ctx = repo[n]
131 if ctx.phase() >= phases.secret and not ctx.extinct():
131 if ctx.phase() >= phases.secret and not ctx.extinct():
132 secretlist.append(n)
132 secretlist.append(n)
133
133
134 if secretlist:
134 if secretlist:
135 ui.status(
135 ui.status(
136 _(b"no changes found (ignored %d secret changesets)\n")
136 _(b"no changes found (ignored %d secret changesets)\n")
137 % len(secretlist)
137 % len(secretlist)
138 )
138 )
139 else:
139 else:
140 ui.status(_(b"no changes found\n"))
140 ui.status(_(b"no changes found\n"))
141
141
142
142
143 def callcatch(ui, func):
143 def callcatch(ui, func):
144 """call func() with global exception handling
144 """call func() with global exception handling
145
145
146 return func() if no exception happens. otherwise do some error handling
146 return func() if no exception happens. otherwise do some error handling
147 and return an exit code accordingly. does not handle all exceptions.
147 and return an exit code accordingly. does not handle all exceptions.
148 """
148 """
149 coarse_exit_code = -1
149 coarse_exit_code = -1
150 detailed_exit_code = -1
150 detailed_exit_code = -1
151 try:
151 try:
152 try:
152 try:
153 return func()
153 return func()
154 except: # re-raises
154 except: # re-raises
155 ui.traceback()
155 ui.traceback()
156 raise
156 raise
157 # Global exception handling, alphabetically
157 # Global exception handling, alphabetically
158 # Mercurial-specific first, followed by built-in and library exceptions
158 # Mercurial-specific first, followed by built-in and library exceptions
159 except error.LockHeld as inst:
159 except error.LockHeld as inst:
160 detailed_exit_code = 20
160 detailed_exit_code = 20
161 if inst.errno == errno.ETIMEDOUT:
161 if inst.errno == errno.ETIMEDOUT:
162 reason = _(b'timed out waiting for lock held by %r') % (
162 reason = _(b'timed out waiting for lock held by %r') % (
163 pycompat.bytestr(inst.locker)
163 pycompat.bytestr(inst.locker)
164 )
164 )
165 else:
165 else:
166 reason = _(b'lock held by %r') % inst.locker
166 reason = _(b'lock held by %r') % inst.locker
167 ui.error(
167 ui.error(
168 _(b"abort: %s: %s\n")
168 _(b"abort: %s: %s\n")
169 % (inst.desc or stringutil.forcebytestr(inst.filename), reason)
169 % (inst.desc or stringutil.forcebytestr(inst.filename), reason)
170 )
170 )
171 if not inst.locker:
171 if not inst.locker:
172 ui.error(_(b"(lock might be very busy)\n"))
172 ui.error(_(b"(lock might be very busy)\n"))
173 except error.LockUnavailable as inst:
173 except error.LockUnavailable as inst:
174 detailed_exit_code = 20
174 detailed_exit_code = 20
175 ui.error(
175 ui.error(
176 _(b"abort: could not lock %s: %s\n")
176 _(b"abort: could not lock %s: %s\n")
177 % (
177 % (
178 inst.desc or stringutil.forcebytestr(inst.filename),
178 inst.desc or stringutil.forcebytestr(inst.filename),
179 encoding.strtolocal(inst.strerror),
179 encoding.strtolocal(inst.strerror),
180 )
180 )
181 )
181 )
182 except error.RepoError as inst:
182 except error.RepoError as inst:
183 if isinstance(inst, error.RepoLookupError):
183 if isinstance(inst, error.RepoLookupError):
184 detailed_exit_code = 10
184 detailed_exit_code = 10
185 ui.error(_(b"abort: %s\n") % inst)
185 ui.error(_(b"abort: %s\n") % inst)
186 if inst.hint:
186 if inst.hint:
187 ui.error(_(b"(%s)\n") % inst.hint)
187 ui.error(_(b"(%s)\n") % inst.hint)
188 except error.ResponseError as inst:
188 except error.ResponseError as inst:
189 ui.error(_(b"abort: %s") % inst.args[0])
189 ui.error(_(b"abort: %s") % inst.args[0])
190 msg = inst.args[1]
190 msg = inst.args[1]
191 if isinstance(msg, type(u'')):
191 if isinstance(msg, type(u'')):
192 msg = pycompat.sysbytes(msg)
192 msg = pycompat.sysbytes(msg)
193 if msg is None:
193 if msg is None:
194 ui.error(b"\n")
194 ui.error(b"\n")
195 elif not isinstance(msg, bytes):
195 elif not isinstance(msg, bytes):
196 ui.error(b" %r\n" % (msg,))
196 ui.error(b" %r\n" % (msg,))
197 elif not msg:
197 elif not msg:
198 ui.error(_(b" empty string\n"))
198 ui.error(_(b" empty string\n"))
199 else:
199 else:
200 ui.error(b"\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
200 ui.error(b"\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
201 except error.CensoredNodeError as inst:
201 except error.CensoredNodeError as inst:
202 ui.error(_(b"abort: file censored %s\n") % inst)
202 ui.error(_(b"abort: file censored %s\n") % inst)
203 except error.WdirUnsupported:
203 except error.WdirUnsupported:
204 ui.error(_(b"abort: working directory revision cannot be specified\n"))
204 ui.error(_(b"abort: working directory revision cannot be specified\n"))
205 except error.Error as inst:
205 except error.Error as inst:
206 if inst.detailed_exit_code is not None:
206 if inst.detailed_exit_code is not None:
207 detailed_exit_code = inst.detailed_exit_code
207 detailed_exit_code = inst.detailed_exit_code
208 if inst.coarse_exit_code is not None:
208 if inst.coarse_exit_code is not None:
209 coarse_exit_code = inst.coarse_exit_code
209 coarse_exit_code = inst.coarse_exit_code
210 ui.error(inst.format())
210 ui.error(inst.format())
211 except error.WorkerError as inst:
211 except error.WorkerError as inst:
212 # Don't print a message -- the worker already should have
212 # Don't print a message -- the worker already should have
213 return inst.status_code
213 return inst.status_code
214 except ImportError as inst:
214 except ImportError as inst:
215 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
215 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
216 m = stringutil.forcebytestr(inst).split()[-1]
216 m = stringutil.forcebytestr(inst).split()[-1]
217 if m in b"mpatch bdiff".split():
217 if m in b"mpatch bdiff".split():
218 ui.error(_(b"(did you forget to compile extensions?)\n"))
218 ui.error(_(b"(did you forget to compile extensions?)\n"))
219 elif m in b"zlib".split():
219 elif m in b"zlib".split():
220 ui.error(_(b"(is your Python install correct?)\n"))
220 ui.error(_(b"(is your Python install correct?)\n"))
221 except util.urlerr.httperror as inst:
221 except util.urlerr.httperror as inst:
222 detailed_exit_code = 100
222 detailed_exit_code = 100
223 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
223 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
224 except util.urlerr.urlerror as inst:
224 except util.urlerr.urlerror as inst:
225 detailed_exit_code = 100
225 detailed_exit_code = 100
226 try: # usually it is in the form (errno, strerror)
226 try: # usually it is in the form (errno, strerror)
227 reason = inst.reason.args[1]
227 reason = inst.reason.args[1]
228 except (AttributeError, IndexError):
228 except (AttributeError, IndexError):
229 # it might be anything, for example a string
229 # it might be anything, for example a string
230 reason = inst.reason
230 reason = inst.reason
231 if isinstance(reason, str):
231 if isinstance(reason, str):
232 # SSLError of Python 2.7.9 contains a unicode
232 # SSLError of Python 2.7.9 contains a unicode
233 reason = encoding.unitolocal(reason)
233 reason = encoding.unitolocal(reason)
234 ui.error(_(b"abort: error: %s\n") % stringutil.forcebytestr(reason))
234 ui.error(_(b"abort: error: %s\n") % stringutil.forcebytestr(reason))
235 except (IOError, OSError) as inst:
235 except (IOError, OSError) as inst:
236 if (
236 if (
237 util.safehasattr(inst, b"args")
237 util.safehasattr(inst, b"args")
238 and inst.args
238 and inst.args
239 and inst.args[0] == errno.EPIPE
239 and inst.args[0] == errno.EPIPE
240 ):
240 ):
241 pass
241 pass
242 elif getattr(inst, "strerror", None): # common IOError or OSError
242 elif getattr(inst, "strerror", None): # common IOError or OSError
243 if getattr(inst, "filename", None) is not None:
243 if getattr(inst, "filename", None) is not None:
244 ui.error(
244 ui.error(
245 _(b"abort: %s: '%s'\n")
245 _(b"abort: %s: '%s'\n")
246 % (
246 % (
247 encoding.strtolocal(inst.strerror),
247 encoding.strtolocal(inst.strerror),
248 stringutil.forcebytestr(inst.filename),
248 stringutil.forcebytestr(inst.filename),
249 )
249 )
250 )
250 )
251 else:
251 else:
252 ui.error(_(b"abort: %s\n") % encoding.strtolocal(inst.strerror))
252 ui.error(_(b"abort: %s\n") % encoding.strtolocal(inst.strerror))
253 else: # suspicious IOError
253 else: # suspicious IOError
254 raise
254 raise
255 except MemoryError:
255 except MemoryError:
256 ui.error(_(b"abort: out of memory\n"))
256 ui.error(_(b"abort: out of memory\n"))
257 except SystemExit as inst:
257 except SystemExit as inst:
258 # Commands shouldn't sys.exit directly, but give a return code.
258 # Commands shouldn't sys.exit directly, but give a return code.
259 # Just in case catch this and and pass exit code to caller.
259 # Just in case catch this and and pass exit code to caller.
260 detailed_exit_code = 254
260 detailed_exit_code = 254
261 coarse_exit_code = inst.code
261 coarse_exit_code = inst.code
262
262
263 if ui.configbool(b'ui', b'detailed-exit-code'):
263 if ui.configbool(b'ui', b'detailed-exit-code'):
264 return detailed_exit_code
264 return detailed_exit_code
265 else:
265 else:
266 return coarse_exit_code
266 return coarse_exit_code
267
267
268
268
269 def checknewlabel(repo, lbl, kind):
269 def checknewlabel(repo, lbl, kind):
270 # Do not use the "kind" parameter in ui output.
270 # Do not use the "kind" parameter in ui output.
271 # It makes strings difficult to translate.
271 # It makes strings difficult to translate.
272 if lbl in [b'tip', b'.', b'null']:
272 if lbl in [b'tip', b'.', b'null']:
273 raise error.InputError(_(b"the name '%s' is reserved") % lbl)
273 raise error.InputError(_(b"the name '%s' is reserved") % lbl)
274 for c in (b':', b'\0', b'\n', b'\r'):
274 for c in (b':', b'\0', b'\n', b'\r'):
275 if c in lbl:
275 if c in lbl:
276 raise error.InputError(
276 raise error.InputError(
277 _(b"%r cannot be used in a name") % pycompat.bytestr(c)
277 _(b"%r cannot be used in a name") % pycompat.bytestr(c)
278 )
278 )
279 try:
279 try:
280 int(lbl)
280 int(lbl)
281 if b'_' in lbl:
281 if b'_' in lbl:
282 # If label contains underscores, Python might consider it an
282 # If label contains underscores, Python might consider it an
283 # integer (with "_" as visual separators), but we do not.
283 # integer (with "_" as visual separators), but we do not.
284 # See PEP 515 - Underscores in Numeric Literals.
284 # See PEP 515 - Underscores in Numeric Literals.
285 raise ValueError
285 raise ValueError
286 raise error.InputError(_(b"cannot use an integer as a name"))
286 raise error.InputError(_(b"cannot use an integer as a name"))
287 except ValueError:
287 except ValueError:
288 pass
288 pass
289 if lbl.strip() != lbl:
289 if lbl.strip() != lbl:
290 raise error.InputError(
290 raise error.InputError(
291 _(b"leading or trailing whitespace in name %r") % lbl
291 _(b"leading or trailing whitespace in name %r") % lbl
292 )
292 )
293
293
294
294
295 def checkfilename(f):
295 def checkfilename(f):
296 '''Check that the filename f is an acceptable filename for a tracked file'''
296 '''Check that the filename f is an acceptable filename for a tracked file'''
297 if b'\r' in f or b'\n' in f:
297 if b'\r' in f or b'\n' in f:
298 raise error.InputError(
298 raise error.InputError(
299 _(b"'\\n' and '\\r' disallowed in filenames: %r")
299 _(b"'\\n' and '\\r' disallowed in filenames: %r")
300 % pycompat.bytestr(f)
300 % pycompat.bytestr(f)
301 )
301 )
302
302
303
303
304 def checkportable(ui, f):
304 def checkportable(ui, f):
305 '''Check if filename f is portable and warn or abort depending on config'''
305 '''Check if filename f is portable and warn or abort depending on config'''
306 checkfilename(f)
306 checkfilename(f)
307 abort, warn = checkportabilityalert(ui)
307 abort, warn = checkportabilityalert(ui)
308 if abort or warn:
308 if abort or warn:
309 msg = util.checkwinfilename(f)
309 msg = util.checkwinfilename(f)
310 if msg:
310 if msg:
311 msg = b"%s: %s" % (msg, procutil.shellquote(f))
311 msg = b"%s: %s" % (msg, procutil.shellquote(f))
312 if abort:
312 if abort:
313 raise error.InputError(msg)
313 raise error.InputError(msg)
314 ui.warn(_(b"warning: %s\n") % msg)
314 ui.warn(_(b"warning: %s\n") % msg)
315
315
316
316
317 def checkportabilityalert(ui):
317 def checkportabilityalert(ui):
318 """check if the user's config requests nothing, a warning, or abort for
318 """check if the user's config requests nothing, a warning, or abort for
319 non-portable filenames"""
319 non-portable filenames"""
320 val = ui.config(b'ui', b'portablefilenames')
320 val = ui.config(b'ui', b'portablefilenames')
321 lval = val.lower()
321 lval = val.lower()
322 bval = stringutil.parsebool(val)
322 bval = stringutil.parsebool(val)
323 abort = pycompat.iswindows or lval == b'abort'
323 abort = pycompat.iswindows or lval == b'abort'
324 warn = bval or lval == b'warn'
324 warn = bval or lval == b'warn'
325 if bval is None and not (warn or abort or lval == b'ignore'):
325 if bval is None and not (warn or abort or lval == b'ignore'):
326 raise error.ConfigError(
326 raise error.ConfigError(
327 _(b"ui.portablefilenames value is invalid ('%s')") % val
327 _(b"ui.portablefilenames value is invalid ('%s')") % val
328 )
328 )
329 return abort, warn
329 return abort, warn
330
330
331
331
332 class casecollisionauditor:
332 class casecollisionauditor:
333 def __init__(self, ui, abort, dirstate):
333 def __init__(self, ui, abort, dirstate):
334 self._ui = ui
334 self._ui = ui
335 self._abort = abort
335 self._abort = abort
336 allfiles = b'\0'.join(dirstate)
336 allfiles = b'\0'.join(dirstate)
337 self._loweredfiles = set(encoding.lower(allfiles).split(b'\0'))
337 self._loweredfiles = set(encoding.lower(allfiles).split(b'\0'))
338 self._dirstate = dirstate
338 self._dirstate = dirstate
339 # The purpose of _newfiles is so that we don't complain about
339 # The purpose of _newfiles is so that we don't complain about
340 # case collisions if someone were to call this object with the
340 # case collisions if someone were to call this object with the
341 # same filename twice.
341 # same filename twice.
342 self._newfiles = set()
342 self._newfiles = set()
343
343
344 def __call__(self, f):
344 def __call__(self, f):
345 if f in self._newfiles:
345 if f in self._newfiles:
346 return
346 return
347 fl = encoding.lower(f)
347 fl = encoding.lower(f)
348 if fl in self._loweredfiles and f not in self._dirstate:
348 if fl in self._loweredfiles and f not in self._dirstate:
349 msg = _(b'possible case-folding collision for %s') % f
349 msg = _(b'possible case-folding collision for %s') % f
350 if self._abort:
350 if self._abort:
351 raise error.StateError(msg)
351 raise error.StateError(msg)
352 self._ui.warn(_(b"warning: %s\n") % msg)
352 self._ui.warn(_(b"warning: %s\n") % msg)
353 self._loweredfiles.add(fl)
353 self._loweredfiles.add(fl)
354 self._newfiles.add(f)
354 self._newfiles.add(f)
355
355
356
356
357 def filteredhash(repo, maxrev, needobsolete=False):
357 def filteredhash(repo, maxrev, needobsolete=False):
358 """build hash of filtered revisions in the current repoview.
358 """build hash of filtered revisions in the current repoview.
359
359
360 Multiple caches perform up-to-date validation by checking that the
360 Multiple caches perform up-to-date validation by checking that the
361 tiprev and tipnode stored in the cache file match the current repository.
361 tiprev and tipnode stored in the cache file match the current repository.
362 However, this is not sufficient for validating repoviews because the set
362 However, this is not sufficient for validating repoviews because the set
363 of revisions in the view may change without the repository tiprev and
363 of revisions in the view may change without the repository tiprev and
364 tipnode changing.
364 tipnode changing.
365
365
366 This function hashes all the revs filtered from the view (and, optionally,
366 This function hashes all the revs filtered from the view (and, optionally,
367 all obsolete revs) up to maxrev and returns that SHA-1 digest.
367 all obsolete revs) up to maxrev and returns that SHA-1 digest.
368 """
368 """
369 cl = repo.changelog
369 cl = repo.changelog
370 if needobsolete:
370 if needobsolete:
371 obsrevs = obsolete.getrevs(repo, b'obsolete')
371 obsrevs = obsolete.getrevs(repo, b'obsolete')
372 if not cl.filteredrevs and not obsrevs:
372 if not cl.filteredrevs and not obsrevs:
373 return None
373 return None
374 key = (maxrev, hash(cl.filteredrevs), hash(obsrevs))
374 key = (maxrev, hash(cl.filteredrevs), hash(obsrevs))
375 else:
375 else:
376 if not cl.filteredrevs:
376 if not cl.filteredrevs:
377 return None
377 return None
378 key = maxrev
378 key = maxrev
379 obsrevs = frozenset()
379 obsrevs = frozenset()
380
380
381 result = cl._filteredrevs_hashcache.get(key)
381 result = cl._filteredrevs_hashcache.get(key)
382 if not result:
382 if not result:
383 revs = sorted(r for r in cl.filteredrevs | obsrevs if r <= maxrev)
383 revs = sorted(r for r in cl.filteredrevs | obsrevs if r <= maxrev)
384 if revs:
384 if revs:
385 s = hashutil.sha1()
385 s = hashutil.sha1()
386 for rev in revs:
386 for rev in revs:
387 s.update(b'%d;' % rev)
387 s.update(b'%d;' % rev)
388 result = s.digest()
388 result = s.digest()
389 cl._filteredrevs_hashcache[key] = result
389 cl._filteredrevs_hashcache[key] = result
390 return result
390 return result
391
391
392
392
393 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
393 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
394 """yield every hg repository under path, always recursively.
394 """yield every hg repository under path, always recursively.
395 The recurse flag will only control recursion into repo working dirs"""
395 The recurse flag will only control recursion into repo working dirs"""
396
396
397 def errhandler(err):
397 def errhandler(err):
398 if err.filename == path:
398 if err.filename == path:
399 raise err
399 raise err
400
400
401 samestat = getattr(os.path, 'samestat', None)
401 samestat = getattr(os.path, 'samestat', None)
402 if followsym and samestat is not None:
402 if followsym and samestat is not None:
403
403
404 def adddir(dirlst, dirname):
404 def adddir(dirlst, dirname):
405 dirstat = os.stat(dirname)
405 dirstat = os.stat(dirname)
406 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
406 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
407 if not match:
407 if not match:
408 dirlst.append(dirstat)
408 dirlst.append(dirstat)
409 return not match
409 return not match
410
410
411 else:
411 else:
412 followsym = False
412 followsym = False
413
413
414 if (seen_dirs is None) and followsym:
414 if (seen_dirs is None) and followsym:
415 seen_dirs = []
415 seen_dirs = []
416 adddir(seen_dirs, path)
416 adddir(seen_dirs, path)
417 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
417 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
418 dirs.sort()
418 dirs.sort()
419 if b'.hg' in dirs:
419 if b'.hg' in dirs:
420 yield root # found a repository
420 yield root # found a repository
421 qroot = os.path.join(root, b'.hg', b'patches')
421 qroot = os.path.join(root, b'.hg', b'patches')
422 if os.path.isdir(os.path.join(qroot, b'.hg')):
422 if os.path.isdir(os.path.join(qroot, b'.hg')):
423 yield qroot # we have a patch queue repo here
423 yield qroot # we have a patch queue repo here
424 if recurse:
424 if recurse:
425 # avoid recursing inside the .hg directory
425 # avoid recursing inside the .hg directory
426 dirs.remove(b'.hg')
426 dirs.remove(b'.hg')
427 else:
427 else:
428 dirs[:] = [] # don't descend further
428 dirs[:] = [] # don't descend further
429 elif followsym:
429 elif followsym:
430 newdirs = []
430 newdirs = []
431 for d in dirs:
431 for d in dirs:
432 fname = os.path.join(root, d)
432 fname = os.path.join(root, d)
433 if adddir(seen_dirs, fname):
433 if adddir(seen_dirs, fname):
434 if os.path.islink(fname):
434 if os.path.islink(fname):
435 for hgname in walkrepos(fname, True, seen_dirs):
435 for hgname in walkrepos(fname, True, seen_dirs):
436 yield hgname
436 yield hgname
437 else:
437 else:
438 newdirs.append(d)
438 newdirs.append(d)
439 dirs[:] = newdirs
439 dirs[:] = newdirs
440
440
441
441
442 def binnode(ctx):
442 def binnode(ctx):
443 """Return binary node id for a given basectx"""
443 """Return binary node id for a given basectx"""
444 node = ctx.node()
444 node = ctx.node()
445 if node is None:
445 if node is None:
446 return ctx.repo().nodeconstants.wdirid
446 return ctx.repo().nodeconstants.wdirid
447 return node
447 return node
448
448
449
449
450 def intrev(ctx):
450 def intrev(ctx):
451 """Return integer for a given basectx that can be used in comparison or
451 """Return integer for a given basectx that can be used in comparison or
452 arithmetic operation"""
452 arithmetic operation"""
453 rev = ctx.rev()
453 rev = ctx.rev()
454 if rev is None:
454 if rev is None:
455 return wdirrev
455 return wdirrev
456 return rev
456 return rev
457
457
458
458
459 def formatchangeid(ctx):
459 def formatchangeid(ctx):
460 """Format changectx as '{rev}:{node|formatnode}', which is the default
460 """Format changectx as '{rev}:{node|formatnode}', which is the default
461 template provided by logcmdutil.changesettemplater"""
461 template provided by logcmdutil.changesettemplater"""
462 repo = ctx.repo()
462 repo = ctx.repo()
463 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
463 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
464
464
465
465
466 def formatrevnode(ui, rev, node):
466 def formatrevnode(ui, rev, node):
467 """Format given revision and node depending on the current verbosity"""
467 """Format given revision and node depending on the current verbosity"""
468 if ui.debugflag:
468 if ui.debugflag:
469 hexfunc = hex
469 hexfunc = hex
470 else:
470 else:
471 hexfunc = short
471 hexfunc = short
472 return b'%d:%s' % (rev, hexfunc(node))
472 return b'%d:%s' % (rev, hexfunc(node))
473
473
474
474
475 def resolvehexnodeidprefix(repo, prefix):
475 def resolvehexnodeidprefix(repo, prefix):
476 if prefix.startswith(b'x'):
476 if prefix.startswith(b'x'):
477 prefix = prefix[1:]
477 prefix = prefix[1:]
478 try:
478 try:
479 # Uses unfiltered repo because it's faster when prefix is ambiguous/
479 # Uses unfiltered repo because it's faster when prefix is ambiguous/
480 # This matches the shortesthexnodeidprefix() function below.
480 # This matches the shortesthexnodeidprefix() function below.
481 node = repo.unfiltered().changelog._partialmatch(prefix)
481 node = repo.unfiltered().changelog._partialmatch(prefix)
482 except error.AmbiguousPrefixLookupError:
482 except error.AmbiguousPrefixLookupError:
483 revset = repo.ui.config(
483 revset = repo.ui.config(
484 b'experimental', b'revisions.disambiguatewithin'
484 b'experimental', b'revisions.disambiguatewithin'
485 )
485 )
486 if revset:
486 if revset:
487 # Clear config to avoid infinite recursion
487 # Clear config to avoid infinite recursion
488 configoverrides = {
488 configoverrides = {
489 (b'experimental', b'revisions.disambiguatewithin'): None
489 (b'experimental', b'revisions.disambiguatewithin'): None
490 }
490 }
491 with repo.ui.configoverride(configoverrides):
491 with repo.ui.configoverride(configoverrides):
492 revs = repo.anyrevs([revset], user=True)
492 revs = repo.anyrevs([revset], user=True)
493 matches = []
493 matches = []
494 for rev in revs:
494 for rev in revs:
495 node = repo.changelog.node(rev)
495 node = repo.changelog.node(rev)
496 if hex(node).startswith(prefix):
496 if hex(node).startswith(prefix):
497 matches.append(node)
497 matches.append(node)
498 if len(matches) == 1:
498 if len(matches) == 1:
499 return matches[0]
499 return matches[0]
500 raise
500 raise
501 if node is None:
501 if node is None:
502 return
502 return
503 repo.changelog.rev(node) # make sure node isn't filtered
503 repo.changelog.rev(node) # make sure node isn't filtered
504 return node
504 return node
505
505
506
506
507 def mayberevnum(repo, prefix):
507 def mayberevnum(repo, prefix):
508 """Checks if the given prefix may be mistaken for a revision number"""
508 """Checks if the given prefix may be mistaken for a revision number"""
509 try:
509 try:
510 i = int(prefix)
510 i = int(prefix)
511 # if we are a pure int, then starting with zero will not be
511 # if we are a pure int, then starting with zero will not be
512 # confused as a rev; or, obviously, if the int is larger
512 # confused as a rev; or, obviously, if the int is larger
513 # than the value of the tip rev. We still need to disambiguate if
513 # than the value of the tip rev. We still need to disambiguate if
514 # prefix == '0', since that *is* a valid revnum.
514 # prefix == '0', since that *is* a valid revnum.
515 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
515 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
516 return False
516 return False
517 return True
517 return True
518 except ValueError:
518 except ValueError:
519 return False
519 return False
520
520
521
521
522 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
522 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
523 """Find the shortest unambiguous prefix that matches hexnode.
523 """Find the shortest unambiguous prefix that matches hexnode.
524
524
525 If "cache" is not None, it must be a dictionary that can be used for
525 If "cache" is not None, it must be a dictionary that can be used for
526 caching between calls to this method.
526 caching between calls to this method.
527 """
527 """
528 # _partialmatch() of filtered changelog could take O(len(repo)) time,
528 # _partialmatch() of filtered changelog could take O(len(repo)) time,
529 # which would be unacceptably slow. so we look for hash collision in
529 # which would be unacceptably slow. so we look for hash collision in
530 # unfiltered space, which means some hashes may be slightly longer.
530 # unfiltered space, which means some hashes may be slightly longer.
531
531
532 minlength = max(minlength, 1)
532 minlength = max(minlength, 1)
533
533
534 def disambiguate(prefix):
534 def disambiguate(prefix):
535 """Disambiguate against revnums."""
535 """Disambiguate against revnums."""
536 if repo.ui.configbool(b'experimental', b'revisions.prefixhexnode'):
536 if repo.ui.configbool(b'experimental', b'revisions.prefixhexnode'):
537 if mayberevnum(repo, prefix):
537 if mayberevnum(repo, prefix):
538 return b'x' + prefix
538 return b'x' + prefix
539 else:
539 else:
540 return prefix
540 return prefix
541
541
542 hexnode = hex(node)
542 hexnode = hex(node)
543 for length in range(len(prefix), len(hexnode) + 1):
543 for length in range(len(prefix), len(hexnode) + 1):
544 prefix = hexnode[:length]
544 prefix = hexnode[:length]
545 if not mayberevnum(repo, prefix):
545 if not mayberevnum(repo, prefix):
546 return prefix
546 return prefix
547
547
548 cl = repo.unfiltered().changelog
548 cl = repo.unfiltered().changelog
549 revset = repo.ui.config(b'experimental', b'revisions.disambiguatewithin')
549 revset = repo.ui.config(b'experimental', b'revisions.disambiguatewithin')
550 if revset:
550 if revset:
551 revs = None
551 revs = None
552 if cache is not None:
552 if cache is not None:
553 revs = cache.get(b'disambiguationrevset')
553 revs = cache.get(b'disambiguationrevset')
554 if revs is None:
554 if revs is None:
555 revs = repo.anyrevs([revset], user=True)
555 revs = repo.anyrevs([revset], user=True)
556 if cache is not None:
556 if cache is not None:
557 cache[b'disambiguationrevset'] = revs
557 cache[b'disambiguationrevset'] = revs
558 if cl.rev(node) in revs:
558 if cl.rev(node) in revs:
559 hexnode = hex(node)
559 hexnode = hex(node)
560 nodetree = None
560 nodetree = None
561 if cache is not None:
561 if cache is not None:
562 nodetree = cache.get(b'disambiguationnodetree')
562 nodetree = cache.get(b'disambiguationnodetree')
563 if not nodetree:
563 if not nodetree:
564 if util.safehasattr(parsers, 'nodetree'):
564 if util.safehasattr(parsers, 'nodetree'):
565 # The CExt is the only implementation to provide a nodetree
565 # The CExt is the only implementation to provide a nodetree
566 # class so far.
566 # class so far.
567 index = cl.index
567 index = cl.index
568 if util.safehasattr(index, 'get_cindex'):
568 if util.safehasattr(index, 'get_cindex'):
569 # the rust wrapped need to give access to its internal index
569 # the rust wrapped need to give access to its internal index
570 index = index.get_cindex()
570 index = index.get_cindex()
571 nodetree = parsers.nodetree(index, len(revs))
571 nodetree = parsers.nodetree(index, len(revs))
572 for r in revs:
572 for r in revs:
573 nodetree.insert(r)
573 nodetree.insert(r)
574 if cache is not None:
574 if cache is not None:
575 cache[b'disambiguationnodetree'] = nodetree
575 cache[b'disambiguationnodetree'] = nodetree
576 if nodetree is not None:
576 if nodetree is not None:
577 length = max(nodetree.shortest(node), minlength)
577 length = max(nodetree.shortest(node), minlength)
578 prefix = hexnode[:length]
578 prefix = hexnode[:length]
579 return disambiguate(prefix)
579 return disambiguate(prefix)
580 for length in range(minlength, len(hexnode) + 1):
580 for length in range(minlength, len(hexnode) + 1):
581 matches = []
581 matches = []
582 prefix = hexnode[:length]
582 prefix = hexnode[:length]
583 for rev in revs:
583 for rev in revs:
584 otherhexnode = repo[rev].hex()
584 otherhexnode = repo[rev].hex()
585 if prefix == otherhexnode[:length]:
585 if prefix == otherhexnode[:length]:
586 matches.append(otherhexnode)
586 matches.append(otherhexnode)
587 if len(matches) == 1:
587 if len(matches) == 1:
588 return disambiguate(prefix)
588 return disambiguate(prefix)
589
589
590 try:
590 try:
591 return disambiguate(cl.shortest(node, minlength))
591 return disambiguate(cl.shortest(node, minlength))
592 except error.LookupError:
592 except error.LookupError:
593 raise error.RepoLookupError()
593 raise error.RepoLookupError()
594
594
595
595
596 def isrevsymbol(repo, symbol):
596 def isrevsymbol(repo, symbol):
597 """Checks if a symbol exists in the repo.
597 """Checks if a symbol exists in the repo.
598
598
599 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
599 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
600 symbol is an ambiguous nodeid prefix.
600 symbol is an ambiguous nodeid prefix.
601 """
601 """
602 try:
602 try:
603 revsymbol(repo, symbol)
603 revsymbol(repo, symbol)
604 return True
604 return True
605 except error.RepoLookupError:
605 except error.RepoLookupError:
606 return False
606 return False
607
607
608
608
609 def revsymbol(repo, symbol):
609 def revsymbol(repo, symbol):
610 """Returns a context given a single revision symbol (as string).
610 """Returns a context given a single revision symbol (as string).
611
611
612 This is similar to revsingle(), but accepts only a single revision symbol,
612 This is similar to revsingle(), but accepts only a single revision symbol,
613 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
613 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
614 not "max(public())".
614 not "max(public())".
615 """
615 """
616 if not isinstance(symbol, bytes):
616 if not isinstance(symbol, bytes):
617 msg = (
617 msg = (
618 b"symbol (%s of type %s) was not a string, did you mean "
618 b"symbol (%s of type %s) was not a string, did you mean "
619 b"repo[symbol]?" % (symbol, type(symbol))
619 b"repo[symbol]?" % (symbol, type(symbol))
620 )
620 )
621 raise error.ProgrammingError(msg)
621 raise error.ProgrammingError(msg)
622 try:
622 try:
623 if symbol in (b'.', b'tip', b'null'):
623 if symbol in (b'.', b'tip', b'null'):
624 return repo[symbol]
624 return repo[symbol]
625
625
626 try:
626 try:
627 r = int(symbol)
627 r = int(symbol)
628 if b'%d' % r != symbol:
628 if b'%d' % r != symbol:
629 raise ValueError
629 raise ValueError
630 l = len(repo.changelog)
630 l = len(repo.changelog)
631 if r < 0:
631 if r < 0:
632 r += l
632 r += l
633 if r < 0 or r >= l and r != wdirrev:
633 if r < 0 or r >= l and r != wdirrev:
634 raise ValueError
634 raise ValueError
635 return repo[r]
635 return repo[r]
636 except error.FilteredIndexError:
636 except error.FilteredIndexError:
637 raise
637 raise
638 except (ValueError, OverflowError, IndexError):
638 except (ValueError, OverflowError, IndexError):
639 pass
639 pass
640
640
641 if len(symbol) == 2 * repo.nodeconstants.nodelen:
641 if len(symbol) == 2 * repo.nodeconstants.nodelen:
642 try:
642 try:
643 node = bin(symbol)
643 node = bin(symbol)
644 rev = repo.changelog.rev(node)
644 rev = repo.changelog.rev(node)
645 return repo[rev]
645 return repo[rev]
646 except error.FilteredLookupError:
646 except error.FilteredLookupError:
647 raise
647 raise
648 except (binascii.Error, LookupError):
648 except (binascii.Error, LookupError):
649 pass
649 pass
650
650
651 # look up bookmarks through the name interface
651 # look up bookmarks through the name interface
652 try:
652 try:
653 node = repo.names.singlenode(repo, symbol)
653 node = repo.names.singlenode(repo, symbol)
654 rev = repo.changelog.rev(node)
654 rev = repo.changelog.rev(node)
655 return repo[rev]
655 return repo[rev]
656 except KeyError:
656 except KeyError:
657 pass
657 pass
658
658
659 node = resolvehexnodeidprefix(repo, symbol)
659 node = resolvehexnodeidprefix(repo, symbol)
660 if node is not None:
660 if node is not None:
661 rev = repo.changelog.rev(node)
661 rev = repo.changelog.rev(node)
662 return repo[rev]
662 return repo[rev]
663
663
664 raise error.RepoLookupError(_(b"unknown revision '%s'") % symbol)
664 raise error.RepoLookupError(_(b"unknown revision '%s'") % symbol)
665
665
666 except error.WdirUnsupported:
666 except error.WdirUnsupported:
667 return repo[None]
667 return repo[None]
668 except (
668 except (
669 error.FilteredIndexError,
669 error.FilteredIndexError,
670 error.FilteredLookupError,
670 error.FilteredLookupError,
671 error.FilteredRepoLookupError,
671 error.FilteredRepoLookupError,
672 ):
672 ):
673 raise _filterederror(repo, symbol)
673 raise _filterederror(repo, symbol)
674
674
675
675
676 def _filterederror(repo, changeid):
676 def _filterederror(repo, changeid):
677 """build an exception to be raised about a filtered changeid
677 """build an exception to be raised about a filtered changeid
678
678
679 This is extracted in a function to help extensions (eg: evolve) to
679 This is extracted in a function to help extensions (eg: evolve) to
680 experiment with various message variants."""
680 experiment with various message variants."""
681 if repo.filtername.startswith(b'visible'):
681 if repo.filtername.startswith(b'visible'):
682
682
683 # Check if the changeset is obsolete
683 # Check if the changeset is obsolete
684 unfilteredrepo = repo.unfiltered()
684 unfilteredrepo = repo.unfiltered()
685 ctx = revsymbol(unfilteredrepo, changeid)
685 ctx = revsymbol(unfilteredrepo, changeid)
686
686
687 # If the changeset is obsolete, enrich the message with the reason
687 # If the changeset is obsolete, enrich the message with the reason
688 # that made this changeset not visible
688 # that made this changeset not visible
689 if ctx.obsolete():
689 if ctx.obsolete():
690 msg = obsutil._getfilteredreason(repo, changeid, ctx)
690 msg = obsutil._getfilteredreason(repo, changeid, ctx)
691 else:
691 else:
692 msg = _(b"hidden revision '%s'") % changeid
692 msg = _(b"hidden revision '%s'") % changeid
693
693
694 hint = _(b'use --hidden to access hidden revisions')
694 hint = _(b'use --hidden to access hidden revisions')
695
695
696 return error.FilteredRepoLookupError(msg, hint=hint)
696 return error.FilteredRepoLookupError(msg, hint=hint)
697 msg = _(b"filtered revision '%s' (not in '%s' subset)")
697 msg = _(b"filtered revision '%s' (not in '%s' subset)")
698 msg %= (changeid, repo.filtername)
698 msg %= (changeid, repo.filtername)
699 return error.FilteredRepoLookupError(msg)
699 return error.FilteredRepoLookupError(msg)
700
700
701
701
702 def revsingle(repo, revspec, default=b'.', localalias=None):
702 def revsingle(repo, revspec, default=b'.', localalias=None):
703 if not revspec and revspec != 0:
703 if not revspec and revspec != 0:
704 return repo[default]
704 return repo[default]
705
705
706 l = revrange(repo, [revspec], localalias=localalias)
706 l = revrange(repo, [revspec], localalias=localalias)
707 if not l:
707 if not l:
708 raise error.InputError(_(b'empty revision set'))
708 raise error.InputError(_(b'empty revision set'))
709 return repo[l.last()]
709 return repo[l.last()]
710
710
711
711
712 def _pairspec(revspec):
712 def _pairspec(revspec):
713 tree = revsetlang.parse(revspec)
713 tree = revsetlang.parse(revspec)
714 return tree and tree[0] in (
714 return tree and tree[0] in (
715 b'range',
715 b'range',
716 b'rangepre',
716 b'rangepre',
717 b'rangepost',
717 b'rangepost',
718 b'rangeall',
718 b'rangeall',
719 )
719 )
720
720
721
721
722 def revpair(repo, revs):
722 def revpair(repo, revs):
723 if not revs:
723 if not revs:
724 return repo[b'.'], repo[None]
724 return repo[b'.'], repo[None]
725
725
726 l = revrange(repo, revs)
726 l = revrange(repo, revs)
727
727
728 if not l:
728 if not l:
729 raise error.InputError(_(b'empty revision range'))
729 raise error.InputError(_(b'empty revision range'))
730
730
731 first = l.first()
731 first = l.first()
732 second = l.last()
732 second = l.last()
733
733
734 if (
734 if (
735 first == second
735 first == second
736 and len(revs) >= 2
736 and len(revs) >= 2
737 and not all(revrange(repo, [r]) for r in revs)
737 and not all(revrange(repo, [r]) for r in revs)
738 ):
738 ):
739 raise error.InputError(_(b'empty revision on one side of range'))
739 raise error.InputError(_(b'empty revision on one side of range'))
740
740
741 # if top-level is range expression, the result must always be a pair
741 # if top-level is range expression, the result must always be a pair
742 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
742 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
743 return repo[first], repo[None]
743 return repo[first], repo[None]
744
744
745 return repo[first], repo[second]
745 return repo[first], repo[second]
746
746
747
747
748 def revrange(repo, specs, localalias=None):
748 def revrange(repo, specs, localalias=None):
749 """Execute 1 to many revsets and return the union.
749 """Execute 1 to many revsets and return the union.
750
750
751 This is the preferred mechanism for executing revsets using user-specified
751 This is the preferred mechanism for executing revsets using user-specified
752 config options, such as revset aliases.
752 config options, such as revset aliases.
753
753
754 The revsets specified by ``specs`` will be executed via a chained ``OR``
754 The revsets specified by ``specs`` will be executed via a chained ``OR``
755 expression. If ``specs`` is empty, an empty result is returned.
755 expression. If ``specs`` is empty, an empty result is returned.
756
756
757 ``specs`` can contain integers, in which case they are assumed to be
757 ``specs`` can contain integers, in which case they are assumed to be
758 revision numbers.
758 revision numbers.
759
759
760 It is assumed the revsets are already formatted. If you have arguments
760 It is assumed the revsets are already formatted. If you have arguments
761 that need to be expanded in the revset, call ``revsetlang.formatspec()``
761 that need to be expanded in the revset, call ``revsetlang.formatspec()``
762 and pass the result as an element of ``specs``.
762 and pass the result as an element of ``specs``.
763
763
764 Specifying a single revset is allowed.
764 Specifying a single revset is allowed.
765
765
766 Returns a ``smartset.abstractsmartset`` which is a list-like interface over
766 Returns a ``smartset.abstractsmartset`` which is a list-like interface over
767 integer revisions.
767 integer revisions.
768 """
768 """
769 allspecs = []
769 allspecs = []
770 for spec in specs:
770 for spec in specs:
771 if isinstance(spec, int):
771 if isinstance(spec, int):
772 spec = revsetlang.formatspec(b'%d', spec)
772 spec = revsetlang.formatspec(b'%d', spec)
773 allspecs.append(spec)
773 allspecs.append(spec)
774 return repo.anyrevs(allspecs, user=True, localalias=localalias)
774 return repo.anyrevs(allspecs, user=True, localalias=localalias)
775
775
776
776
777 def increasingwindows(windowsize=8, sizelimit=512):
777 def increasingwindows(windowsize=8, sizelimit=512):
778 while True:
778 while True:
779 yield windowsize
779 yield windowsize
780 if windowsize < sizelimit:
780 if windowsize < sizelimit:
781 windowsize *= 2
781 windowsize *= 2
782
782
783
783
784 def walkchangerevs(repo, revs, makefilematcher, prepare):
784 def walkchangerevs(repo, revs, makefilematcher, prepare):
785 """Iterate over files and the revs in a "windowed" way.
785 """Iterate over files and the revs in a "windowed" way.
786
786
787 Callers most commonly need to iterate backwards over the history
787 Callers most commonly need to iterate backwards over the history
788 in which they are interested. Doing so has awful (quadratic-looking)
788 in which they are interested. Doing so has awful (quadratic-looking)
789 performance, so we use iterators in a "windowed" way.
789 performance, so we use iterators in a "windowed" way.
790
790
791 We walk a window of revisions in the desired order. Within the
791 We walk a window of revisions in the desired order. Within the
792 window, we first walk forwards to gather data, then in the desired
792 window, we first walk forwards to gather data, then in the desired
793 order (usually backwards) to display it.
793 order (usually backwards) to display it.
794
794
795 This function returns an iterator yielding contexts. Before
795 This function returns an iterator yielding contexts. Before
796 yielding each context, the iterator will first call the prepare
796 yielding each context, the iterator will first call the prepare
797 function on each context in the window in forward order."""
797 function on each context in the window in forward order."""
798
798
799 if not revs:
799 if not revs:
800 return []
800 return []
801 change = repo.__getitem__
801 change = repo.__getitem__
802
802
803 def iterate():
803 def iterate():
804 it = iter(revs)
804 it = iter(revs)
805 stopiteration = False
805 stopiteration = False
806 for windowsize in increasingwindows():
806 for windowsize in increasingwindows():
807 nrevs = []
807 nrevs = []
808 for i in range(windowsize):
808 for i in range(windowsize):
809 rev = next(it, None)
809 rev = next(it, None)
810 if rev is None:
810 if rev is None:
811 stopiteration = True
811 stopiteration = True
812 break
812 break
813 nrevs.append(rev)
813 nrevs.append(rev)
814 for rev in sorted(nrevs):
814 for rev in sorted(nrevs):
815 ctx = change(rev)
815 ctx = change(rev)
816 prepare(ctx, makefilematcher(ctx))
816 prepare(ctx, makefilematcher(ctx))
817 for rev in nrevs:
817 for rev in nrevs:
818 yield change(rev)
818 yield change(rev)
819
819
820 if stopiteration:
820 if stopiteration:
821 break
821 break
822
822
823 return iterate()
823 return iterate()
824
824
825
825
826 def meaningfulparents(repo, ctx):
826 def meaningfulparents(repo, ctx):
827 """Return list of meaningful (or all if debug) parentrevs for rev.
827 """Return list of meaningful (or all if debug) parentrevs for rev.
828
828
829 For merges (two non-nullrev revisions) both parents are meaningful.
829 For merges (two non-nullrev revisions) both parents are meaningful.
830 Otherwise the first parent revision is considered meaningful if it
830 Otherwise the first parent revision is considered meaningful if it
831 is not the preceding revision.
831 is not the preceding revision.
832 """
832 """
833 parents = ctx.parents()
833 parents = ctx.parents()
834 if len(parents) > 1:
834 if len(parents) > 1:
835 return parents
835 return parents
836 if repo.ui.debugflag:
836 if repo.ui.debugflag:
837 return [parents[0], repo[nullrev]]
837 return [parents[0], repo[nullrev]]
838 if parents[0].rev() >= intrev(ctx) - 1:
838 if parents[0].rev() >= intrev(ctx) - 1:
839 return []
839 return []
840 return parents
840 return parents
841
841
842
842
843 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
843 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
844 """Return a function that produced paths for presenting to the user.
844 """Return a function that produced paths for presenting to the user.
845
845
846 The returned function takes a repo-relative path and produces a path
846 The returned function takes a repo-relative path and produces a path
847 that can be presented in the UI.
847 that can be presented in the UI.
848
848
849 Depending on the value of ui.relative-paths, either a repo-relative or
849 Depending on the value of ui.relative-paths, either a repo-relative or
850 cwd-relative path will be produced.
850 cwd-relative path will be produced.
851
851
852 legacyrelativevalue is the value to use if ui.relative-paths=legacy
852 legacyrelativevalue is the value to use if ui.relative-paths=legacy
853
853
854 If forcerelativevalue is not None, then that value will be used regardless
854 If forcerelativevalue is not None, then that value will be used regardless
855 of what ui.relative-paths is set to.
855 of what ui.relative-paths is set to.
856 """
856 """
857 if forcerelativevalue is not None:
857 if forcerelativevalue is not None:
858 relative = forcerelativevalue
858 relative = forcerelativevalue
859 else:
859 else:
860 config = repo.ui.config(b'ui', b'relative-paths')
860 config = repo.ui.config(b'ui', b'relative-paths')
861 if config == b'legacy':
861 if config == b'legacy':
862 relative = legacyrelativevalue
862 relative = legacyrelativevalue
863 else:
863 else:
864 relative = stringutil.parsebool(config)
864 relative = stringutil.parsebool(config)
865 if relative is None:
865 if relative is None:
866 raise error.ConfigError(
866 raise error.ConfigError(
867 _(b"ui.relative-paths is not a boolean ('%s')") % config
867 _(b"ui.relative-paths is not a boolean ('%s')") % config
868 )
868 )
869
869
870 if relative:
870 if relative:
871 cwd = repo.getcwd()
871 cwd = repo.getcwd()
872 if cwd != b'':
872 if cwd != b'':
873 # this branch would work even if cwd == b'' (ie cwd = repo
873 # this branch would work even if cwd == b'' (ie cwd = repo
874 # root), but its generality makes the returned function slower
874 # root), but its generality makes the returned function slower
875 pathto = repo.pathto
875 pathto = repo.pathto
876 return lambda f: pathto(f, cwd)
876 return lambda f: pathto(f, cwd)
877 if repo.ui.configbool(b'ui', b'slash'):
877 if repo.ui.configbool(b'ui', b'slash'):
878 return lambda f: f
878 return lambda f: f
879 else:
879 else:
880 return util.localpath
880 return util.localpath
881
881
882
882
883 def subdiruipathfn(subpath, uipathfn):
883 def subdiruipathfn(subpath, uipathfn):
884 '''Create a new uipathfn that treats the file as relative to subpath.'''
884 '''Create a new uipathfn that treats the file as relative to subpath.'''
885 return lambda f: uipathfn(posixpath.join(subpath, f))
885 return lambda f: uipathfn(posixpath.join(subpath, f))
886
886
887
887
888 def anypats(pats, opts):
888 def anypats(pats, opts):
889 """Checks if any patterns, including --include and --exclude were given.
889 """Checks if any patterns, including --include and --exclude were given.
890
890
891 Some commands (e.g. addremove) use this condition for deciding whether to
891 Some commands (e.g. addremove) use this condition for deciding whether to
892 print absolute or relative paths.
892 print absolute or relative paths.
893 """
893 """
894 return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
894 return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
895
895
896
896
897 def expandpats(pats):
897 def expandpats(pats):
898 """Expand bare globs when running on windows.
898 """Expand bare globs when running on windows.
899 On posix we assume it already has already been done by sh."""
899 On posix we assume it already has already been done by sh."""
900 if not util.expandglobs:
900 if not util.expandglobs:
901 return list(pats)
901 return list(pats)
902 ret = []
902 ret = []
903 for kindpat in pats:
903 for kindpat in pats:
904 kind, pat = matchmod._patsplit(kindpat, None)
904 kind, pat = matchmod._patsplit(kindpat, None)
905 if kind is None:
905 if kind is None:
906 try:
906 try:
907 globbed = glob.glob(pat)
907 globbed = glob.glob(pat)
908 except re.error:
908 except re.error:
909 globbed = [pat]
909 globbed = [pat]
910 if globbed:
910 if globbed:
911 ret.extend(globbed)
911 ret.extend(globbed)
912 continue
912 continue
913 ret.append(kindpat)
913 ret.append(kindpat)
914 return ret
914 return ret
915
915
916
916
917 def matchandpats(
917 def matchandpats(
918 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
918 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
919 ):
919 ):
920 """Return a matcher and the patterns that were used.
920 """Return a matcher and the patterns that were used.
921 The matcher will warn about bad matches, unless an alternate badfn callback
921 The matcher will warn about bad matches, unless an alternate badfn callback
922 is provided."""
922 is provided."""
923 if opts is None:
923 if opts is None:
924 opts = {}
924 opts = {}
925 if not globbed and default == b'relpath':
925 if not globbed and default == b'relpath':
926 pats = expandpats(pats or [])
926 pats = expandpats(pats or [])
927
927
928 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
928 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
929
929
930 def bad(f, msg):
930 def bad(f, msg):
931 ctx.repo().ui.warn(b"%s: %s\n" % (uipathfn(f), msg))
931 ctx.repo().ui.warn(b"%s: %s\n" % (uipathfn(f), msg))
932
932
933 if badfn is None:
933 if badfn is None:
934 badfn = bad
934 badfn = bad
935
935
936 m = ctx.match(
936 m = ctx.match(
937 pats,
937 pats,
938 opts.get(b'include'),
938 opts.get(b'include'),
939 opts.get(b'exclude'),
939 opts.get(b'exclude'),
940 default,
940 default,
941 listsubrepos=opts.get(b'subrepos'),
941 listsubrepos=opts.get(b'subrepos'),
942 badfn=badfn,
942 badfn=badfn,
943 )
943 )
944
944
945 if m.always():
945 if m.always():
946 pats = []
946 pats = []
947 return m, pats
947 return m, pats
948
948
949
949
950 def match(
950 def match(
951 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
951 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
952 ):
952 ):
953 '''Return a matcher that will warn about bad matches.'''
953 '''Return a matcher that will warn about bad matches.'''
954 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
954 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
955
955
956
956
957 def matchall(repo):
957 def matchall(repo):
958 '''Return a matcher that will efficiently match everything.'''
958 '''Return a matcher that will efficiently match everything.'''
959 return matchmod.always()
959 return matchmod.always()
960
960
961
961
962 def matchfiles(repo, files, badfn=None):
962 def matchfiles(repo, files, badfn=None):
963 '''Return a matcher that will efficiently match exactly these files.'''
963 '''Return a matcher that will efficiently match exactly these files.'''
964 return matchmod.exact(files, badfn=badfn)
964 return matchmod.exact(files, badfn=badfn)
965
965
966
966
967 def parsefollowlinespattern(repo, rev, pat, msg):
967 def parsefollowlinespattern(repo, rev, pat, msg):
968 """Return a file name from `pat` pattern suitable for usage in followlines
968 """Return a file name from `pat` pattern suitable for usage in followlines
969 logic.
969 logic.
970 """
970 """
971 if not matchmod.patkind(pat):
971 if not matchmod.patkind(pat):
972 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
972 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
973 else:
973 else:
974 ctx = repo[rev]
974 ctx = repo[rev]
975 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
975 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
976 files = [f for f in ctx if m(f)]
976 files = [f for f in ctx if m(f)]
977 if len(files) != 1:
977 if len(files) != 1:
978 raise error.ParseError(msg)
978 raise error.ParseError(msg)
979 return files[0]
979 return files[0]
980
980
981
981
982 def getorigvfs(ui, repo):
982 def getorigvfs(ui, repo):
983 """return a vfs suitable to save 'orig' file
983 """return a vfs suitable to save 'orig' file
984
984
985 return None if no special directory is configured"""
985 return None if no special directory is configured"""
986 origbackuppath = ui.config(b'ui', b'origbackuppath')
986 origbackuppath = ui.config(b'ui', b'origbackuppath')
987 if not origbackuppath:
987 if not origbackuppath:
988 return None
988 return None
989 return vfs.vfs(repo.wvfs.join(origbackuppath))
989 return vfs.vfs(repo.wvfs.join(origbackuppath))
990
990
991
991
992 def backuppath(ui, repo, filepath):
992 def backuppath(ui, repo, filepath):
993 """customize where working copy backup files (.orig files) are created
993 """customize where working copy backup files (.orig files) are created
994
994
995 Fetch user defined path from config file: [ui] origbackuppath = <path>
995 Fetch user defined path from config file: [ui] origbackuppath = <path>
996 Fall back to default (filepath with .orig suffix) if not specified
996 Fall back to default (filepath with .orig suffix) if not specified
997
997
998 filepath is repo-relative
998 filepath is repo-relative
999
999
1000 Returns an absolute path
1000 Returns an absolute path
1001 """
1001 """
1002 origvfs = getorigvfs(ui, repo)
1002 origvfs = getorigvfs(ui, repo)
1003 if origvfs is None:
1003 if origvfs is None:
1004 return repo.wjoin(filepath + b".orig")
1004 return repo.wjoin(filepath + b".orig")
1005
1005
1006 origbackupdir = origvfs.dirname(filepath)
1006 origbackupdir = origvfs.dirname(filepath)
1007 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
1007 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
1008 ui.note(_(b'creating directory: %s\n') % origvfs.join(origbackupdir))
1008 ui.note(_(b'creating directory: %s\n') % origvfs.join(origbackupdir))
1009
1009
1010 # Remove any files that conflict with the backup file's path
1010 # Remove any files that conflict with the backup file's path
1011 for f in reversed(list(pathutil.finddirs(filepath))):
1011 for f in reversed(list(pathutil.finddirs(filepath))):
1012 if origvfs.isfileorlink(f):
1012 if origvfs.isfileorlink(f):
1013 ui.note(_(b'removing conflicting file: %s\n') % origvfs.join(f))
1013 ui.note(_(b'removing conflicting file: %s\n') % origvfs.join(f))
1014 origvfs.unlink(f)
1014 origvfs.unlink(f)
1015 break
1015 break
1016
1016
1017 origvfs.makedirs(origbackupdir)
1017 origvfs.makedirs(origbackupdir)
1018
1018
1019 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
1019 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
1020 ui.note(
1020 ui.note(
1021 _(b'removing conflicting directory: %s\n') % origvfs.join(filepath)
1021 _(b'removing conflicting directory: %s\n') % origvfs.join(filepath)
1022 )
1022 )
1023 origvfs.rmtree(filepath, forcibly=True)
1023 origvfs.rmtree(filepath, forcibly=True)
1024
1024
1025 return origvfs.join(filepath)
1025 return origvfs.join(filepath)
1026
1026
1027
1027
1028 class _containsnode:
1028 class _containsnode:
1029 """proxy __contains__(node) to container.__contains__ which accepts revs"""
1029 """proxy __contains__(node) to container.__contains__ which accepts revs"""
1030
1030
1031 def __init__(self, repo, revcontainer):
1031 def __init__(self, repo, revcontainer):
1032 self._torev = repo.changelog.rev
1032 self._torev = repo.changelog.rev
1033 self._revcontains = revcontainer.__contains__
1033 self._revcontains = revcontainer.__contains__
1034
1034
1035 def __contains__(self, node):
1035 def __contains__(self, node):
1036 return self._revcontains(self._torev(node))
1036 return self._revcontains(self._torev(node))
1037
1037
1038
1038
1039 def cleanupnodes(
1039 def cleanupnodes(
1040 repo,
1040 repo,
1041 replacements,
1041 replacements,
1042 operation,
1042 operation,
1043 moves=None,
1043 moves=None,
1044 metadata=None,
1044 metadata=None,
1045 fixphase=False,
1045 fixphase=False,
1046 targetphase=None,
1046 targetphase=None,
1047 backup=True,
1047 backup=True,
1048 ):
1048 ):
1049 """do common cleanups when old nodes are replaced by new nodes
1049 """do common cleanups when old nodes are replaced by new nodes
1050
1050
1051 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
1051 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
1052 (we might also want to move working directory parent in the future)
1052 (we might also want to move working directory parent in the future)
1053
1053
1054 By default, bookmark moves are calculated automatically from 'replacements',
1054 By default, bookmark moves are calculated automatically from 'replacements',
1055 but 'moves' can be used to override that. Also, 'moves' may include
1055 but 'moves' can be used to override that. Also, 'moves' may include
1056 additional bookmark moves that should not have associated obsmarkers.
1056 additional bookmark moves that should not have associated obsmarkers.
1057
1057
1058 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
1058 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
1059 have replacements. operation is a string, like "rebase".
1059 have replacements. operation is a string, like "rebase".
1060
1060
1061 metadata is dictionary containing metadata to be stored in obsmarker if
1061 metadata is dictionary containing metadata to be stored in obsmarker if
1062 obsolescence is enabled.
1062 obsolescence is enabled.
1063 """
1063 """
1064 assert fixphase or targetphase is None
1064 assert fixphase or targetphase is None
1065 if not replacements and not moves:
1065 if not replacements and not moves:
1066 return
1066 return
1067
1067
1068 # translate mapping's other forms
1068 # translate mapping's other forms
1069 if not util.safehasattr(replacements, b'items'):
1069 if not util.safehasattr(replacements, b'items'):
1070 replacements = {(n,): () for n in replacements}
1070 replacements = {(n,): () for n in replacements}
1071 else:
1071 else:
1072 # upgrading non tuple "source" to tuple ones for BC
1072 # upgrading non tuple "source" to tuple ones for BC
1073 repls = {}
1073 repls = {}
1074 for key, value in replacements.items():
1074 for key, value in replacements.items():
1075 if not isinstance(key, tuple):
1075 if not isinstance(key, tuple):
1076 key = (key,)
1076 key = (key,)
1077 repls[key] = value
1077 repls[key] = value
1078 replacements = repls
1078 replacements = repls
1079
1079
1080 # Unfiltered repo is needed since nodes in replacements might be hidden.
1080 # Unfiltered repo is needed since nodes in replacements might be hidden.
1081 unfi = repo.unfiltered()
1081 unfi = repo.unfiltered()
1082
1082
1083 # Calculate bookmark movements
1083 # Calculate bookmark movements
1084 if moves is None:
1084 if moves is None:
1085 moves = {}
1085 moves = {}
1086 for oldnodes, newnodes in replacements.items():
1086 for oldnodes, newnodes in replacements.items():
1087 for oldnode in oldnodes:
1087 for oldnode in oldnodes:
1088 if oldnode in moves:
1088 if oldnode in moves:
1089 continue
1089 continue
1090 if len(newnodes) > 1:
1090 if len(newnodes) > 1:
1091 # usually a split, take the one with biggest rev number
1091 # usually a split, take the one with biggest rev number
1092 newnode = next(unfi.set(b'max(%ln)', newnodes)).node()
1092 newnode = next(unfi.set(b'max(%ln)', newnodes)).node()
1093 elif len(newnodes) == 0:
1093 elif len(newnodes) == 0:
1094 # move bookmark backwards
1094 # move bookmark backwards
1095 allreplaced = []
1095 allreplaced = []
1096 for rep in replacements:
1096 for rep in replacements:
1097 allreplaced.extend(rep)
1097 allreplaced.extend(rep)
1098 roots = list(
1098 roots = list(
1099 unfi.set(b'max((::%n) - %ln)', oldnode, allreplaced)
1099 unfi.set(b'max((::%n) - %ln)', oldnode, allreplaced)
1100 )
1100 )
1101 if roots:
1101 if roots:
1102 newnode = roots[0].node()
1102 newnode = roots[0].node()
1103 else:
1103 else:
1104 newnode = repo.nullid
1104 newnode = repo.nullid
1105 else:
1105 else:
1106 newnode = newnodes[0]
1106 newnode = newnodes[0]
1107 moves[oldnode] = newnode
1107 moves[oldnode] = newnode
1108
1108
1109 allnewnodes = [n for ns in replacements.values() for n in ns]
1109 allnewnodes = [n for ns in replacements.values() for n in ns]
1110 toretract = {}
1110 toretract = {}
1111 toadvance = {}
1111 toadvance = {}
1112 if fixphase:
1112 if fixphase:
1113 precursors = {}
1113 precursors = {}
1114 for oldnodes, newnodes in replacements.items():
1114 for oldnodes, newnodes in replacements.items():
1115 for oldnode in oldnodes:
1115 for oldnode in oldnodes:
1116 for newnode in newnodes:
1116 for newnode in newnodes:
1117 precursors.setdefault(newnode, []).append(oldnode)
1117 precursors.setdefault(newnode, []).append(oldnode)
1118
1118
1119 allnewnodes.sort(key=lambda n: unfi[n].rev())
1119 allnewnodes.sort(key=lambda n: unfi[n].rev())
1120 newphases = {}
1120 newphases = {}
1121
1121
1122 def phase(ctx):
1122 def phase(ctx):
1123 return newphases.get(ctx.node(), ctx.phase())
1123 return newphases.get(ctx.node(), ctx.phase())
1124
1124
1125 for newnode in allnewnodes:
1125 for newnode in allnewnodes:
1126 ctx = unfi[newnode]
1126 ctx = unfi[newnode]
1127 parentphase = max(phase(p) for p in ctx.parents())
1127 parentphase = max(phase(p) for p in ctx.parents())
1128 if targetphase is None:
1128 if targetphase is None:
1129 oldphase = max(
1129 oldphase = max(
1130 unfi[oldnode].phase() for oldnode in precursors[newnode]
1130 unfi[oldnode].phase() for oldnode in precursors[newnode]
1131 )
1131 )
1132 newphase = max(oldphase, parentphase)
1132 newphase = max(oldphase, parentphase)
1133 else:
1133 else:
1134 newphase = max(targetphase, parentphase)
1134 newphase = max(targetphase, parentphase)
1135 newphases[newnode] = newphase
1135 newphases[newnode] = newphase
1136 if newphase > ctx.phase():
1136 if newphase > ctx.phase():
1137 toretract.setdefault(newphase, []).append(newnode)
1137 toretract.setdefault(newphase, []).append(newnode)
1138 elif newphase < ctx.phase():
1138 elif newphase < ctx.phase():
1139 toadvance.setdefault(newphase, []).append(newnode)
1139 toadvance.setdefault(newphase, []).append(newnode)
1140
1140
1141 with repo.transaction(b'cleanup') as tr:
1141 with repo.transaction(b'cleanup') as tr:
1142 # Move bookmarks
1142 # Move bookmarks
1143 bmarks = repo._bookmarks
1143 bmarks = repo._bookmarks
1144 bmarkchanges = []
1144 bmarkchanges = []
1145 for oldnode, newnode in moves.items():
1145 for oldnode, newnode in moves.items():
1146 oldbmarks = repo.nodebookmarks(oldnode)
1146 oldbmarks = repo.nodebookmarks(oldnode)
1147 if not oldbmarks:
1147 if not oldbmarks:
1148 continue
1148 continue
1149 from . import bookmarks # avoid import cycle
1149 from . import bookmarks # avoid import cycle
1150
1150
1151 repo.ui.debug(
1151 repo.ui.debug(
1152 b'moving bookmarks %r from %s to %s\n'
1152 b'moving bookmarks %r from %s to %s\n'
1153 % (
1153 % (
1154 pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1154 pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1155 hex(oldnode),
1155 hex(oldnode),
1156 hex(newnode),
1156 hex(newnode),
1157 )
1157 )
1158 )
1158 )
1159 # Delete divergent bookmarks being parents of related newnodes
1159 # Delete divergent bookmarks being parents of related newnodes
1160 deleterevs = repo.revs(
1160 deleterevs = repo.revs(
1161 b'parents(roots(%ln & (::%n))) - parents(%n)',
1161 b'parents(roots(%ln & (::%n))) - parents(%n)',
1162 allnewnodes,
1162 allnewnodes,
1163 newnode,
1163 newnode,
1164 oldnode,
1164 oldnode,
1165 )
1165 )
1166 deletenodes = _containsnode(repo, deleterevs)
1166 deletenodes = _containsnode(repo, deleterevs)
1167 for name in oldbmarks:
1167 for name in oldbmarks:
1168 bmarkchanges.append((name, newnode))
1168 bmarkchanges.append((name, newnode))
1169 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1169 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1170 bmarkchanges.append((b, None))
1170 bmarkchanges.append((b, None))
1171
1171
1172 if bmarkchanges:
1172 if bmarkchanges:
1173 bmarks.applychanges(repo, tr, bmarkchanges)
1173 bmarks.applychanges(repo, tr, bmarkchanges)
1174
1174
1175 for phase, nodes in toretract.items():
1175 for phase, nodes in toretract.items():
1176 phases.retractboundary(repo, tr, phase, nodes)
1176 phases.retractboundary(repo, tr, phase, nodes)
1177 for phase, nodes in toadvance.items():
1177 for phase, nodes in toadvance.items():
1178 phases.advanceboundary(repo, tr, phase, nodes)
1178 phases.advanceboundary(repo, tr, phase, nodes)
1179
1179
1180 mayusearchived = repo.ui.config(b'experimental', b'cleanup-as-archived')
1180 mayusearchived = repo.ui.config(b'experimental', b'cleanup-as-archived')
1181 # Obsolete or strip nodes
1181 # Obsolete or strip nodes
1182 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1182 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1183 # If a node is already obsoleted, and we want to obsolete it
1183 # If a node is already obsoleted, and we want to obsolete it
1184 # without a successor, skip that obssolete request since it's
1184 # without a successor, skip that obssolete request since it's
1185 # unnecessary. That's the "if s or not isobs(n)" check below.
1185 # unnecessary. That's the "if s or not isobs(n)" check below.
1186 # Also sort the node in topology order, that might be useful for
1186 # Also sort the node in topology order, that might be useful for
1187 # some obsstore logic.
1187 # some obsstore logic.
1188 # NOTE: the sorting might belong to createmarkers.
1188 # NOTE: the sorting might belong to createmarkers.
1189 torev = unfi.changelog.rev
1189 torev = unfi.changelog.rev
1190 sortfunc = lambda ns: torev(ns[0][0])
1190 sortfunc = lambda ns: torev(ns[0][0])
1191 rels = []
1191 rels = []
1192 for ns, s in sorted(replacements.items(), key=sortfunc):
1192 for ns, s in sorted(replacements.items(), key=sortfunc):
1193 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1193 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1194 rels.append(rel)
1194 rels.append(rel)
1195 if rels:
1195 if rels:
1196 obsolete.createmarkers(
1196 obsolete.createmarkers(
1197 repo, rels, operation=operation, metadata=metadata
1197 repo, rels, operation=operation, metadata=metadata
1198 )
1198 )
1199 elif phases.supportarchived(repo) and mayusearchived:
1199 elif phases.supportarchived(repo) and mayusearchived:
1200 # this assume we do not have "unstable" nodes above the cleaned ones
1200 # this assume we do not have "unstable" nodes above the cleaned ones
1201 allreplaced = set()
1201 allreplaced = set()
1202 for ns in replacements.keys():
1202 for ns in replacements.keys():
1203 allreplaced.update(ns)
1203 allreplaced.update(ns)
1204 if backup:
1204 if backup:
1205 from . import repair # avoid import cycle
1205 from . import repair # avoid import cycle
1206
1206
1207 node = min(allreplaced, key=repo.changelog.rev)
1207 node = min(allreplaced, key=repo.changelog.rev)
1208 repair.backupbundle(
1208 repair.backupbundle(
1209 repo, allreplaced, allreplaced, node, operation
1209 repo, allreplaced, allreplaced, node, operation
1210 )
1210 )
1211 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1211 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1212 else:
1212 else:
1213 from . import repair # avoid import cycle
1213 from . import repair # avoid import cycle
1214
1214
1215 tostrip = list(n for ns in replacements for n in ns)
1215 tostrip = list(n for ns in replacements for n in ns)
1216 if tostrip:
1216 if tostrip:
1217 repair.delayedstrip(
1217 repair.delayedstrip(
1218 repo.ui, repo, tostrip, operation, backup=backup
1218 repo.ui, repo, tostrip, operation, backup=backup
1219 )
1219 )
1220
1220
1221
1221
1222 def addremove(repo, matcher, prefix, uipathfn, opts=None, open_tr=None):
1222 def addremove(repo, matcher, prefix, uipathfn, opts=None, open_tr=None):
1223 if opts is None:
1223 if opts is None:
1224 opts = {}
1224 opts = {}
1225 m = matcher
1225 m = matcher
1226 dry_run = opts.get(b'dry_run')
1226 dry_run = opts.get(b'dry_run')
1227 try:
1227 try:
1228 similarity = float(opts.get(b'similarity') or 0)
1228 similarity = float(opts.get(b'similarity') or 0)
1229 except ValueError:
1229 except ValueError:
1230 raise error.InputError(_(b'similarity must be a number'))
1230 raise error.InputError(_(b'similarity must be a number'))
1231 if similarity < 0 or similarity > 100:
1231 if similarity < 0 or similarity > 100:
1232 raise error.InputError(_(b'similarity must be between 0 and 100'))
1232 raise error.InputError(_(b'similarity must be between 0 and 100'))
1233 similarity /= 100.0
1233 similarity /= 100.0
1234
1234
1235 ret = 0
1235 ret = 0
1236
1236
1237 wctx = repo[None]
1237 wctx = repo[None]
1238 for subpath in sorted(wctx.substate):
1238 for subpath in sorted(wctx.substate):
1239 submatch = matchmod.subdirmatcher(subpath, m)
1239 submatch = matchmod.subdirmatcher(subpath, m)
1240 if opts.get(b'subrepos') or m.exact(subpath) or any(submatch.files()):
1240 if opts.get(b'subrepos') or m.exact(subpath) or any(submatch.files()):
1241 sub = wctx.sub(subpath)
1241 sub = wctx.sub(subpath)
1242 subprefix = repo.wvfs.reljoin(prefix, subpath)
1242 subprefix = repo.wvfs.reljoin(prefix, subpath)
1243 subuipathfn = subdiruipathfn(subpath, uipathfn)
1243 subuipathfn = subdiruipathfn(subpath, uipathfn)
1244 try:
1244 try:
1245 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1245 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1246 ret = 1
1246 ret = 1
1247 except error.LookupError:
1247 except error.LookupError:
1248 repo.ui.status(
1248 repo.ui.status(
1249 _(b"skipping missing subrepository: %s\n")
1249 _(b"skipping missing subrepository: %s\n")
1250 % uipathfn(subpath)
1250 % uipathfn(subpath)
1251 )
1251 )
1252
1252
1253 rejected = []
1253 rejected = []
1254
1254
1255 def badfn(f, msg):
1255 def badfn(f, msg):
1256 if f in m.files():
1256 if f in m.files():
1257 m.bad(f, msg)
1257 m.bad(f, msg)
1258 rejected.append(f)
1258 rejected.append(f)
1259
1259
1260 badmatch = matchmod.badmatch(m, badfn)
1260 badmatch = matchmod.badmatch(m, badfn)
1261 added, unknown, deleted, removed, forgotten = _interestingfiles(
1261 added, unknown, deleted, removed, forgotten = _interestingfiles(
1262 repo, badmatch
1262 repo, badmatch
1263 )
1263 )
1264
1264
1265 unknownset = set(unknown + forgotten)
1265 unknownset = set(unknown + forgotten)
1266 toprint = unknownset.copy()
1266 toprint = unknownset.copy()
1267 toprint.update(deleted)
1267 toprint.update(deleted)
1268 for abs in sorted(toprint):
1268 for abs in sorted(toprint):
1269 if repo.ui.verbose or not m.exact(abs):
1269 if repo.ui.verbose or not m.exact(abs):
1270 if abs in unknownset:
1270 if abs in unknownset:
1271 status = _(b'adding %s\n') % uipathfn(abs)
1271 status = _(b'adding %s\n') % uipathfn(abs)
1272 label = b'ui.addremove.added'
1272 label = b'ui.addremove.added'
1273 else:
1273 else:
1274 status = _(b'removing %s\n') % uipathfn(abs)
1274 status = _(b'removing %s\n') % uipathfn(abs)
1275 label = b'ui.addremove.removed'
1275 label = b'ui.addremove.removed'
1276 repo.ui.status(status, label=label)
1276 repo.ui.status(status, label=label)
1277
1277
1278 renames = _findrenames(
1278 renames = _findrenames(
1279 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1279 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1280 )
1280 )
1281
1281
1282 if not dry_run and (unknown or forgotten or deleted or renames):
1282 if not dry_run and (unknown or forgotten or deleted or renames):
1283 if open_tr is not None:
1283 if open_tr is not None:
1284 open_tr()
1284 open_tr()
1285 _markchanges(repo, unknown + forgotten, deleted, renames)
1285 _markchanges(repo, unknown + forgotten, deleted, renames)
1286
1286
1287 for f in rejected:
1287 for f in rejected:
1288 if f in m.files():
1288 if f in m.files():
1289 return 1
1289 return 1
1290 return ret
1290 return ret
1291
1291
1292
1292
1293 def marktouched(repo, files, similarity=0.0):
1293 def marktouched(repo, files, similarity=0.0):
1294 """Assert that files have somehow been operated upon. files are relative to
1294 """Assert that files have somehow been operated upon. files are relative to
1295 the repo root."""
1295 the repo root."""
1296 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1296 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1297 rejected = []
1297 rejected = []
1298
1298
1299 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1299 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1300
1300
1301 if repo.ui.verbose:
1301 if repo.ui.verbose:
1302 unknownset = set(unknown + forgotten)
1302 unknownset = set(unknown + forgotten)
1303 toprint = unknownset.copy()
1303 toprint = unknownset.copy()
1304 toprint.update(deleted)
1304 toprint.update(deleted)
1305 for abs in sorted(toprint):
1305 for abs in sorted(toprint):
1306 if abs in unknownset:
1306 if abs in unknownset:
1307 status = _(b'adding %s\n') % abs
1307 status = _(b'adding %s\n') % abs
1308 else:
1308 else:
1309 status = _(b'removing %s\n') % abs
1309 status = _(b'removing %s\n') % abs
1310 repo.ui.status(status)
1310 repo.ui.status(status)
1311
1311
1312 # TODO: We should probably have the caller pass in uipathfn and apply it to
1312 # TODO: We should probably have the caller pass in uipathfn and apply it to
1313 # the messages above too. legacyrelativevalue=True is consistent with how
1313 # the messages above too. legacyrelativevalue=True is consistent with how
1314 # it used to work.
1314 # it used to work.
1315 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1315 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1316 renames = _findrenames(
1316 renames = _findrenames(
1317 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1317 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1318 )
1318 )
1319
1319
1320 _markchanges(repo, unknown + forgotten, deleted, renames)
1320 _markchanges(repo, unknown + forgotten, deleted, renames)
1321
1321
1322 for f in rejected:
1322 for f in rejected:
1323 if f in m.files():
1323 if f in m.files():
1324 return 1
1324 return 1
1325 return 0
1325 return 0
1326
1326
1327
1327
1328 def _interestingfiles(repo, matcher):
1328 def _interestingfiles(repo, matcher):
1329 """Walk dirstate with matcher, looking for files that addremove would care
1329 """Walk dirstate with matcher, looking for files that addremove would care
1330 about.
1330 about.
1331
1331
1332 This is different from dirstate.status because it doesn't care about
1332 This is different from dirstate.status because it doesn't care about
1333 whether files are modified or clean."""
1333 whether files are modified or clean."""
1334 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1334 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1335 audit_path = pathutil.pathauditor(repo.root, cached=True)
1335 audit_path = pathutil.pathauditor(repo.root, cached=True)
1336
1336
1337 ctx = repo[None]
1337 ctx = repo[None]
1338 dirstate = repo.dirstate
1338 dirstate = repo.dirstate
1339 matcher = repo.narrowmatch(matcher, includeexact=True)
1339 matcher = repo.narrowmatch(matcher, includeexact=True)
1340 walkresults = dirstate.walk(
1340 walkresults = dirstate.walk(
1341 matcher,
1341 matcher,
1342 subrepos=sorted(ctx.substate),
1342 subrepos=sorted(ctx.substate),
1343 unknown=True,
1343 unknown=True,
1344 ignored=False,
1344 ignored=False,
1345 full=False,
1345 full=False,
1346 )
1346 )
1347 for abs, st in walkresults.items():
1347 for abs, st in walkresults.items():
1348 entry = dirstate.get_entry(abs)
1348 entry = dirstate.get_entry(abs)
1349 if (not entry.any_tracked) and audit_path.check(abs):
1349 if (not entry.any_tracked) and audit_path.check(abs):
1350 unknown.append(abs)
1350 unknown.append(abs)
1351 elif (not entry.removed) and not st:
1351 elif (not entry.removed) and not st:
1352 deleted.append(abs)
1352 deleted.append(abs)
1353 elif entry.removed and st:
1353 elif entry.removed and st:
1354 forgotten.append(abs)
1354 forgotten.append(abs)
1355 # for finding renames
1355 # for finding renames
1356 elif entry.removed and not st:
1356 elif entry.removed and not st:
1357 removed.append(abs)
1357 removed.append(abs)
1358 elif entry.added:
1358 elif entry.added:
1359 added.append(abs)
1359 added.append(abs)
1360
1360
1361 return added, unknown, deleted, removed, forgotten
1361 return added, unknown, deleted, removed, forgotten
1362
1362
1363
1363
1364 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1364 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1365 '''Find renames from removed files to added ones.'''
1365 '''Find renames from removed files to added ones.'''
1366 renames = {}
1366 renames = {}
1367 if similarity > 0:
1367 if similarity > 0:
1368 for old, new, score in similar.findrenames(
1368 for old, new, score in similar.findrenames(
1369 repo, added, removed, similarity
1369 repo, added, removed, similarity
1370 ):
1370 ):
1371 if (
1371 if (
1372 repo.ui.verbose
1372 repo.ui.verbose
1373 or not matcher.exact(old)
1373 or not matcher.exact(old)
1374 or not matcher.exact(new)
1374 or not matcher.exact(new)
1375 ):
1375 ):
1376 repo.ui.status(
1376 repo.ui.status(
1377 _(
1377 _(
1378 b'recording removal of %s as rename to %s '
1378 b'recording removal of %s as rename to %s '
1379 b'(%d%% similar)\n'
1379 b'(%d%% similar)\n'
1380 )
1380 )
1381 % (uipathfn(old), uipathfn(new), score * 100)
1381 % (uipathfn(old), uipathfn(new), score * 100)
1382 )
1382 )
1383 renames[new] = old
1383 renames[new] = old
1384 return renames
1384 return renames
1385
1385
1386
1386
1387 def _markchanges(repo, unknown, deleted, renames):
1387 def _markchanges(repo, unknown, deleted, renames):
1388 """Marks the files in unknown as added, the files in deleted as removed,
1388 """Marks the files in unknown as added, the files in deleted as removed,
1389 and the files in renames as copied."""
1389 and the files in renames as copied."""
1390 wctx = repo[None]
1390 wctx = repo[None]
1391 with repo.wlock():
1391 with repo.wlock():
1392 wctx.forget(deleted)
1392 wctx.forget(deleted)
1393 wctx.add(unknown)
1393 wctx.add(unknown)
1394 for new, old in renames.items():
1394 for new, old in renames.items():
1395 wctx.copy(old, new)
1395 wctx.copy(old, new)
1396
1396
1397
1397
1398 def getrenamedfn(repo, endrev=None):
1398 def getrenamedfn(repo, endrev=None):
1399 if copiesmod.usechangesetcentricalgo(repo):
1399 if copiesmod.usechangesetcentricalgo(repo):
1400
1400
1401 def getrenamed(fn, rev):
1401 def getrenamed(fn, rev):
1402 ctx = repo[rev]
1402 ctx = repo[rev]
1403 p1copies = ctx.p1copies()
1403 p1copies = ctx.p1copies()
1404 if fn in p1copies:
1404 if fn in p1copies:
1405 return p1copies[fn]
1405 return p1copies[fn]
1406 p2copies = ctx.p2copies()
1406 p2copies = ctx.p2copies()
1407 if fn in p2copies:
1407 if fn in p2copies:
1408 return p2copies[fn]
1408 return p2copies[fn]
1409 return None
1409 return None
1410
1410
1411 return getrenamed
1411 return getrenamed
1412
1412
1413 rcache = {}
1413 rcache = {}
1414 if endrev is None:
1414 if endrev is None:
1415 endrev = len(repo)
1415 endrev = len(repo)
1416
1416
1417 def getrenamed(fn, rev):
1417 def getrenamed(fn, rev):
1418 """looks up all renames for a file (up to endrev) the first
1418 """looks up all renames for a file (up to endrev) the first
1419 time the file is given. It indexes on the changerev and only
1419 time the file is given. It indexes on the changerev and only
1420 parses the manifest if linkrev != changerev.
1420 parses the manifest if linkrev != changerev.
1421 Returns rename info for fn at changerev rev."""
1421 Returns rename info for fn at changerev rev."""
1422 if fn not in rcache:
1422 if fn not in rcache:
1423 rcache[fn] = {}
1423 rcache[fn] = {}
1424 fl = repo.file(fn)
1424 fl = repo.file(fn)
1425 for i in fl:
1425 for i in fl:
1426 lr = fl.linkrev(i)
1426 lr = fl.linkrev(i)
1427 renamed = fl.renamed(fl.node(i))
1427 renamed = fl.renamed(fl.node(i))
1428 rcache[fn][lr] = renamed and renamed[0]
1428 rcache[fn][lr] = renamed and renamed[0]
1429 if lr >= endrev:
1429 if lr >= endrev:
1430 break
1430 break
1431 if rev in rcache[fn]:
1431 if rev in rcache[fn]:
1432 return rcache[fn][rev]
1432 return rcache[fn][rev]
1433
1433
1434 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1434 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1435 # filectx logic.
1435 # filectx logic.
1436 try:
1436 try:
1437 return repo[rev][fn].copysource()
1437 return repo[rev][fn].copysource()
1438 except error.LookupError:
1438 except error.LookupError:
1439 return None
1439 return None
1440
1440
1441 return getrenamed
1441 return getrenamed
1442
1442
1443
1443
1444 def getcopiesfn(repo, endrev=None):
1444 def getcopiesfn(repo, endrev=None):
1445 if copiesmod.usechangesetcentricalgo(repo):
1445 if copiesmod.usechangesetcentricalgo(repo):
1446
1446
1447 def copiesfn(ctx):
1447 def copiesfn(ctx):
1448 if ctx.p2copies():
1448 if ctx.p2copies():
1449 allcopies = ctx.p1copies().copy()
1449 allcopies = ctx.p1copies().copy()
1450 # There should be no overlap
1450 # There should be no overlap
1451 allcopies.update(ctx.p2copies())
1451 allcopies.update(ctx.p2copies())
1452 return sorted(allcopies.items())
1452 return sorted(allcopies.items())
1453 else:
1453 else:
1454 return sorted(ctx.p1copies().items())
1454 return sorted(ctx.p1copies().items())
1455
1455
1456 else:
1456 else:
1457 getrenamed = getrenamedfn(repo, endrev)
1457 getrenamed = getrenamedfn(repo, endrev)
1458
1458
1459 def copiesfn(ctx):
1459 def copiesfn(ctx):
1460 copies = []
1460 copies = []
1461 for fn in ctx.files():
1461 for fn in ctx.files():
1462 rename = getrenamed(fn, ctx.rev())
1462 rename = getrenamed(fn, ctx.rev())
1463 if rename:
1463 if rename:
1464 copies.append((fn, rename))
1464 copies.append((fn, rename))
1465 return copies
1465 return copies
1466
1466
1467 return copiesfn
1467 return copiesfn
1468
1468
1469
1469
1470 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1470 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1471 """Update the dirstate to reflect the intent of copying src to dst. For
1471 """Update the dirstate to reflect the intent of copying src to dst. For
1472 different reasons it might not end with dst being marked as copied from src.
1472 different reasons it might not end with dst being marked as copied from src.
1473 """
1473 """
1474 origsrc = repo.dirstate.copied(src) or src
1474 origsrc = repo.dirstate.copied(src) or src
1475 if dst == origsrc: # copying back a copy?
1475 if dst == origsrc: # copying back a copy?
1476 entry = repo.dirstate.get_entry(dst)
1476 entry = repo.dirstate.get_entry(dst)
1477 if (entry.added or not entry.tracked) and not dryrun:
1477 if (entry.added or not entry.tracked) and not dryrun:
1478 repo.dirstate.set_tracked(dst)
1478 repo.dirstate.set_tracked(dst)
1479 else:
1479 else:
1480 if repo.dirstate.get_entry(origsrc).added and origsrc == src:
1480 if repo.dirstate.get_entry(origsrc).added and origsrc == src:
1481 if not ui.quiet:
1481 if not ui.quiet:
1482 ui.warn(
1482 ui.warn(
1483 _(
1483 _(
1484 b"%s has not been committed yet, so no copy "
1484 b"%s has not been committed yet, so no copy "
1485 b"data will be stored for %s.\n"
1485 b"data will be stored for %s.\n"
1486 )
1486 )
1487 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
1487 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
1488 )
1488 )
1489 if not repo.dirstate.get_entry(dst).tracked and not dryrun:
1489 if not repo.dirstate.get_entry(dst).tracked and not dryrun:
1490 wctx.add([dst])
1490 wctx.add([dst])
1491 elif not dryrun:
1491 elif not dryrun:
1492 wctx.copy(origsrc, dst)
1492 wctx.copy(origsrc, dst)
1493
1493
1494
1494
1495 def movedirstate(repo, newctx, match=None):
1495 def movedirstate(repo, newctx, match=None):
1496 """Move the dirstate to newctx and adjust it as necessary.
1496 """Move the dirstate to newctx and adjust it as necessary.
1497
1497
1498 A matcher can be provided as an optimization. It is probably a bug to pass
1498 A matcher can be provided as an optimization. It is probably a bug to pass
1499 a matcher that doesn't match all the differences between the parent of the
1499 a matcher that doesn't match all the differences between the parent of the
1500 working copy and newctx.
1500 working copy and newctx.
1501 """
1501 """
1502 oldctx = repo[b'.']
1502 oldctx = repo[b'.']
1503 ds = repo.dirstate
1503 ds = repo.dirstate
1504 copies = dict(ds.copies())
1504 copies = dict(ds.copies())
1505 ds.setparents(newctx.node(), repo.nullid)
1505 ds.setparents(newctx.node(), repo.nullid)
1506 s = newctx.status(oldctx, match=match)
1506 s = newctx.status(oldctx, match=match)
1507
1507
1508 for f in s.modified:
1508 for f in s.modified:
1509 ds.update_file_p1(f, p1_tracked=True)
1509 ds.update_file_p1(f, p1_tracked=True)
1510
1510
1511 for f in s.added:
1511 for f in s.added:
1512 ds.update_file_p1(f, p1_tracked=False)
1512 ds.update_file_p1(f, p1_tracked=False)
1513
1513
1514 for f in s.removed:
1514 for f in s.removed:
1515 ds.update_file_p1(f, p1_tracked=True)
1515 ds.update_file_p1(f, p1_tracked=True)
1516
1516
1517 # Merge old parent and old working dir copies
1517 # Merge old parent and old working dir copies
1518 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1518 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1519 oldcopies.update(copies)
1519 oldcopies.update(copies)
1520 copies = {dst: oldcopies.get(src, src) for dst, src in oldcopies.items()}
1520 copies = {dst: oldcopies.get(src, src) for dst, src in oldcopies.items()}
1521 # Adjust the dirstate copies
1521 # Adjust the dirstate copies
1522 for dst, src in copies.items():
1522 for dst, src in copies.items():
1523 if src not in newctx or dst in newctx or not ds.get_entry(dst).added:
1523 if src not in newctx or dst in newctx or not ds.get_entry(dst).added:
1524 src = None
1524 src = None
1525 ds.copy(src, dst)
1525 ds.copy(src, dst)
1526 repo._quick_access_changeid_invalidate()
1526 repo._quick_access_changeid_invalidate()
1527
1527
1528
1528
1529 def filterrequirements(requirements):
1529 def filterrequirements(requirements):
1530 """filters the requirements into two sets:
1530 """filters the requirements into two sets:
1531
1531
1532 wcreq: requirements which should be written in .hg/requires
1532 wcreq: requirements which should be written in .hg/requires
1533 storereq: which should be written in .hg/store/requires
1533 storereq: which should be written in .hg/store/requires
1534
1534
1535 Returns (wcreq, storereq)
1535 Returns (wcreq, storereq)
1536 """
1536 """
1537 if requirementsmod.SHARESAFE_REQUIREMENT in requirements:
1537 if requirementsmod.SHARESAFE_REQUIREMENT in requirements:
1538 wc, store = set(), set()
1538 wc, store = set(), set()
1539 for r in requirements:
1539 for r in requirements:
1540 if r in requirementsmod.WORKING_DIR_REQUIREMENTS:
1540 if r in requirementsmod.WORKING_DIR_REQUIREMENTS:
1541 wc.add(r)
1541 wc.add(r)
1542 else:
1542 else:
1543 store.add(r)
1543 store.add(r)
1544 return wc, store
1544 return wc, store
1545 return requirements, None
1545 return requirements, None
1546
1546
1547
1547
1548 def istreemanifest(repo):
1548 def istreemanifest(repo):
1549 """returns whether the repository is using treemanifest or not"""
1549 """returns whether the repository is using treemanifest or not"""
1550 return requirementsmod.TREEMANIFEST_REQUIREMENT in repo.requirements
1550 return requirementsmod.TREEMANIFEST_REQUIREMENT in repo.requirements
1551
1551
1552
1552
1553 def writereporequirements(repo, requirements=None):
1553 def writereporequirements(repo, requirements=None):
1554 """writes requirements for the repo
1554 """writes requirements for the repo
1555
1555
1556 Requirements are written to .hg/requires and .hg/store/requires based
1556 Requirements are written to .hg/requires and .hg/store/requires based
1557 on whether share-safe mode is enabled and which requirements are wdir
1557 on whether share-safe mode is enabled and which requirements are wdir
1558 requirements and which are store requirements
1558 requirements and which are store requirements
1559 """
1559 """
1560 if requirements:
1560 if requirements:
1561 repo.requirements = requirements
1561 repo.requirements = requirements
1562 wcreq, storereq = filterrequirements(repo.requirements)
1562 wcreq, storereq = filterrequirements(repo.requirements)
1563 if wcreq is not None:
1563 if wcreq is not None:
1564 writerequires(repo.vfs, wcreq)
1564 writerequires(repo.vfs, wcreq)
1565 if storereq is not None:
1565 if storereq is not None:
1566 writerequires(repo.svfs, storereq)
1566 writerequires(repo.svfs, storereq)
1567 elif repo.ui.configbool(b'format', b'usestore'):
1567 elif repo.ui.configbool(b'format', b'usestore'):
1568 # only remove store requires if we are using store
1568 # only remove store requires if we are using store
1569 repo.svfs.tryunlink(b'requires')
1569 repo.svfs.tryunlink(b'requires')
1570
1570
1571
1571
1572 def writerequires(opener, requirements):
1572 def writerequires(opener, requirements):
1573 with opener(b'requires', b'w', atomictemp=True) as fp:
1573 with opener(b'requires', b'w', atomictemp=True) as fp:
1574 for r in sorted(requirements):
1574 for r in sorted(requirements):
1575 fp.write(b"%s\n" % r)
1575 fp.write(b"%s\n" % r)
1576
1576
1577
1577
1578 class filecachesubentry:
1578 class filecachesubentry:
1579 def __init__(self, path, stat):
1579 def __init__(self, path, stat):
1580 self.path = path
1580 self.path = path
1581 self.cachestat = None
1581 self.cachestat = None
1582 self._cacheable = None
1582 self._cacheable = None
1583
1583
1584 if stat:
1584 if stat:
1585 self.cachestat = filecachesubentry.stat(self.path)
1585 self.cachestat = filecachesubentry.stat(self.path)
1586
1586
1587 if self.cachestat:
1587 if self.cachestat:
1588 self._cacheable = self.cachestat.cacheable()
1588 self._cacheable = self.cachestat.cacheable()
1589 else:
1589 else:
1590 # None means we don't know yet
1590 # None means we don't know yet
1591 self._cacheable = None
1591 self._cacheable = None
1592
1592
1593 def refresh(self):
1593 def refresh(self):
1594 if self.cacheable():
1594 if self.cacheable():
1595 self.cachestat = filecachesubentry.stat(self.path)
1595 self.cachestat = filecachesubentry.stat(self.path)
1596
1596
1597 def cacheable(self):
1597 def cacheable(self):
1598 if self._cacheable is not None:
1598 if self._cacheable is not None:
1599 return self._cacheable
1599 return self._cacheable
1600
1600
1601 # we don't know yet, assume it is for now
1601 # we don't know yet, assume it is for now
1602 return True
1602 return True
1603
1603
1604 def changed(self):
1604 def changed(self):
1605 # no point in going further if we can't cache it
1605 # no point in going further if we can't cache it
1606 if not self.cacheable():
1606 if not self.cacheable():
1607 return True
1607 return True
1608
1608
1609 newstat = filecachesubentry.stat(self.path)
1609 newstat = filecachesubentry.stat(self.path)
1610
1610
1611 # we may not know if it's cacheable yet, check again now
1611 # we may not know if it's cacheable yet, check again now
1612 if newstat and self._cacheable is None:
1612 if newstat and self._cacheable is None:
1613 self._cacheable = newstat.cacheable()
1613 self._cacheable = newstat.cacheable()
1614
1614
1615 # check again
1615 # check again
1616 if not self._cacheable:
1616 if not self._cacheable:
1617 return True
1617 return True
1618
1618
1619 if self.cachestat != newstat:
1619 if self.cachestat != newstat:
1620 self.cachestat = newstat
1620 self.cachestat = newstat
1621 return True
1621 return True
1622 else:
1622 else:
1623 return False
1623 return False
1624
1624
1625 @staticmethod
1625 @staticmethod
1626 def stat(path):
1626 def stat(path):
1627 try:
1627 try:
1628 return util.cachestat(path)
1628 return util.cachestat(path)
1629 except FileNotFoundError:
1629 except FileNotFoundError:
1630 pass
1630 pass
1631
1631
1632
1632
1633 class filecacheentry:
1633 class filecacheentry:
1634 def __init__(self, paths, stat=True):
1634 def __init__(self, paths, stat=True):
1635 self._entries = []
1635 self._entries = []
1636 for path in paths:
1636 for path in paths:
1637 self._entries.append(filecachesubentry(path, stat))
1637 self._entries.append(filecachesubentry(path, stat))
1638
1638
1639 def changed(self):
1639 def changed(self):
1640 '''true if any entry has changed'''
1640 '''true if any entry has changed'''
1641 for entry in self._entries:
1641 for entry in self._entries:
1642 if entry.changed():
1642 if entry.changed():
1643 return True
1643 return True
1644 return False
1644 return False
1645
1645
1646 def refresh(self):
1646 def refresh(self):
1647 for entry in self._entries:
1647 for entry in self._entries:
1648 entry.refresh()
1648 entry.refresh()
1649
1649
1650
1650
1651 class filecache:
1651 class filecache:
1652 """A property like decorator that tracks files under .hg/ for updates.
1652 """A property like decorator that tracks files under .hg/ for updates.
1653
1653
1654 On first access, the files defined as arguments are stat()ed and the
1654 On first access, the files defined as arguments are stat()ed and the
1655 results cached. The decorated function is called. The results are stashed
1655 results cached. The decorated function is called. The results are stashed
1656 away in a ``_filecache`` dict on the object whose method is decorated.
1656 away in a ``_filecache`` dict on the object whose method is decorated.
1657
1657
1658 On subsequent access, the cached result is used as it is set to the
1658 On subsequent access, the cached result is used as it is set to the
1659 instance dictionary.
1659 instance dictionary.
1660
1660
1661 On external property set/delete operations, the caller must update the
1661 On external property set/delete operations, the caller must update the
1662 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1662 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1663 instead of directly setting <attr>.
1663 instead of directly setting <attr>.
1664
1664
1665 When using the property API, the cached data is always used if available.
1665 When using the property API, the cached data is always used if available.
1666 No stat() is performed to check if the file has changed.
1666 No stat() is performed to check if the file has changed.
1667
1667
1668 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1668 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1669 can populate an entry before the property's getter is called. In this case,
1669 can populate an entry before the property's getter is called. In this case,
1670 entries in ``_filecache`` will be used during property operations,
1670 entries in ``_filecache`` will be used during property operations,
1671 if available. If the underlying file changes, it is up to external callers
1671 if available. If the underlying file changes, it is up to external callers
1672 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1672 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1673 method result as well as possibly calling ``del obj._filecache[attr]`` to
1673 method result as well as possibly calling ``del obj._filecache[attr]`` to
1674 remove the ``filecacheentry``.
1674 remove the ``filecacheentry``.
1675 """
1675 """
1676
1676
1677 def __init__(self, *paths):
1677 def __init__(self, *paths):
1678 self.paths = paths
1678 self.paths = paths
1679
1679
1680 def tracked_paths(self, obj):
1680 def tracked_paths(self, obj):
1681 return [self.join(obj, path) for path in self.paths]
1681 return [self.join(obj, path) for path in self.paths]
1682
1682
1683 def join(self, obj, fname):
1683 def join(self, obj, fname):
1684 """Used to compute the runtime path of a cached file.
1684 """Used to compute the runtime path of a cached file.
1685
1685
1686 Users should subclass filecache and provide their own version of this
1686 Users should subclass filecache and provide their own version of this
1687 function to call the appropriate join function on 'obj' (an instance
1687 function to call the appropriate join function on 'obj' (an instance
1688 of the class that its member function was decorated).
1688 of the class that its member function was decorated).
1689 """
1689 """
1690 raise NotImplementedError
1690 raise NotImplementedError
1691
1691
1692 def __call__(self, func):
1692 def __call__(self, func):
1693 self.func = func
1693 self.func = func
1694 self.sname = func.__name__
1694 self.sname = func.__name__
1695 self.name = pycompat.sysbytes(self.sname)
1695 self.name = pycompat.sysbytes(self.sname)
1696 return self
1696 return self
1697
1697
1698 def __get__(self, obj, type=None):
1698 def __get__(self, obj, type=None):
1699 # if accessed on the class, return the descriptor itself.
1699 # if accessed on the class, return the descriptor itself.
1700 if obj is None:
1700 if obj is None:
1701 return self
1701 return self
1702
1702
1703 assert self.sname not in obj.__dict__
1703 assert self.sname not in obj.__dict__
1704
1704
1705 entry = obj._filecache.get(self.name)
1705 entry = obj._filecache.get(self.name)
1706
1706
1707 if entry:
1707 if entry:
1708 if entry.changed():
1708 if entry.changed():
1709 entry.obj = self.func(obj)
1709 entry.obj = self.func(obj)
1710 else:
1710 else:
1711 paths = self.tracked_paths(obj)
1711 paths = self.tracked_paths(obj)
1712
1712
1713 # We stat -before- creating the object so our cache doesn't lie if
1713 # We stat -before- creating the object so our cache doesn't lie if
1714 # a writer modified between the time we read and stat
1714 # a writer modified between the time we read and stat
1715 entry = filecacheentry(paths, True)
1715 entry = filecacheentry(paths, True)
1716 entry.obj = self.func(obj)
1716 entry.obj = self.func(obj)
1717
1717
1718 obj._filecache[self.name] = entry
1718 obj._filecache[self.name] = entry
1719
1719
1720 obj.__dict__[self.sname] = entry.obj
1720 obj.__dict__[self.sname] = entry.obj
1721 return entry.obj
1721 return entry.obj
1722
1722
1723 # don't implement __set__(), which would make __dict__ lookup as slow as
1723 # don't implement __set__(), which would make __dict__ lookup as slow as
1724 # function call.
1724 # function call.
1725
1725
1726 def set(self, obj, value):
1726 def set(self, obj, value):
1727 if self.name not in obj._filecache:
1727 if self.name not in obj._filecache:
1728 # we add an entry for the missing value because X in __dict__
1728 # we add an entry for the missing value because X in __dict__
1729 # implies X in _filecache
1729 # implies X in _filecache
1730 paths = self.tracked_paths(obj)
1730 paths = self.tracked_paths(obj)
1731 ce = filecacheentry(paths, False)
1731 ce = filecacheentry(paths, False)
1732 obj._filecache[self.name] = ce
1732 obj._filecache[self.name] = ce
1733 else:
1733 else:
1734 ce = obj._filecache[self.name]
1734 ce = obj._filecache[self.name]
1735
1735
1736 ce.obj = value # update cached copy
1736 ce.obj = value # update cached copy
1737 obj.__dict__[self.sname] = value # update copy returned by obj.x
1737 obj.__dict__[self.sname] = value # update copy returned by obj.x
1738
1738
1739
1739
1740 def extdatasource(repo, source):
1740 def extdatasource(repo, source):
1741 """Gather a map of rev -> value dict from the specified source
1741 """Gather a map of rev -> value dict from the specified source
1742
1742
1743 A source spec is treated as a URL, with a special case shell: type
1743 A source spec is treated as a URL, with a special case shell: type
1744 for parsing the output from a shell command.
1744 for parsing the output from a shell command.
1745
1745
1746 The data is parsed as a series of newline-separated records where
1746 The data is parsed as a series of newline-separated records where
1747 each record is a revision specifier optionally followed by a space
1747 each record is a revision specifier optionally followed by a space
1748 and a freeform string value. If the revision is known locally, it
1748 and a freeform string value. If the revision is known locally, it
1749 is converted to a rev, otherwise the record is skipped.
1749 is converted to a rev, otherwise the record is skipped.
1750
1750
1751 Note that both key and value are treated as UTF-8 and converted to
1751 Note that both key and value are treated as UTF-8 and converted to
1752 the local encoding. This allows uniformity between local and
1752 the local encoding. This allows uniformity between local and
1753 remote data sources.
1753 remote data sources.
1754 """
1754 """
1755
1755
1756 spec = repo.ui.config(b"extdata", source)
1756 spec = repo.ui.config(b"extdata", source)
1757 if not spec:
1757 if not spec:
1758 raise error.Abort(_(b"unknown extdata source '%s'") % source)
1758 raise error.Abort(_(b"unknown extdata source '%s'") % source)
1759
1759
1760 data = {}
1760 data = {}
1761 src = proc = None
1761 src = proc = None
1762 try:
1762 try:
1763 if spec.startswith(b"shell:"):
1763 if spec.startswith(b"shell:"):
1764 # external commands should be run relative to the repo root
1764 # external commands should be run relative to the repo root
1765 cmd = spec[6:]
1765 cmd = spec[6:]
1766 proc = subprocess.Popen(
1766 proc = subprocess.Popen(
1767 procutil.tonativestr(cmd),
1767 procutil.tonativestr(cmd),
1768 shell=True,
1768 shell=True,
1769 bufsize=-1,
1769 bufsize=-1,
1770 close_fds=procutil.closefds,
1770 close_fds=procutil.closefds,
1771 stdout=subprocess.PIPE,
1771 stdout=subprocess.PIPE,
1772 cwd=procutil.tonativestr(repo.root),
1772 cwd=procutil.tonativestr(repo.root),
1773 )
1773 )
1774 src = proc.stdout
1774 src = proc.stdout
1775 else:
1775 else:
1776 # treat as a URL or file
1776 # treat as a URL or file
1777 src = url.open(repo.ui, spec)
1777 src = url.open(repo.ui, spec)
1778 for l in src:
1778 for l in src:
1779 if b" " in l:
1779 if b" " in l:
1780 k, v = l.strip().split(b" ", 1)
1780 k, v = l.strip().split(b" ", 1)
1781 else:
1781 else:
1782 k, v = l.strip(), b""
1782 k, v = l.strip(), b""
1783
1783
1784 k = encoding.tolocal(k)
1784 k = encoding.tolocal(k)
1785 try:
1785 try:
1786 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1786 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1787 except (error.LookupError, error.RepoLookupError, error.InputError):
1787 except (error.LookupError, error.RepoLookupError, error.InputError):
1788 pass # we ignore data for nodes that don't exist locally
1788 pass # we ignore data for nodes that don't exist locally
1789 finally:
1789 finally:
1790 if proc:
1790 if proc:
1791 try:
1791 try:
1792 proc.communicate()
1792 proc.communicate()
1793 except ValueError:
1793 except ValueError:
1794 # This happens if we started iterating src and then
1794 # This happens if we started iterating src and then
1795 # get a parse error on a line. It should be safe to ignore.
1795 # get a parse error on a line. It should be safe to ignore.
1796 pass
1796 pass
1797 if src:
1797 if src:
1798 src.close()
1798 src.close()
1799 if proc and proc.returncode != 0:
1799 if proc and proc.returncode != 0:
1800 raise error.Abort(
1800 raise error.Abort(
1801 _(b"extdata command '%s' failed: %s")
1801 _(b"extdata command '%s' failed: %s")
1802 % (cmd, procutil.explainexit(proc.returncode))
1802 % (cmd, procutil.explainexit(proc.returncode))
1803 )
1803 )
1804
1804
1805 return data
1805 return data
1806
1806
1807
1807
1808 class progress:
1808 class progress:
1809 def __init__(self, ui, updatebar, topic, unit=b"", total=None):
1809 def __init__(self, ui, updatebar, topic, unit=b"", total=None):
1810 self.ui = ui
1810 self.ui = ui
1811 self.pos = 0
1811 self.pos = 0
1812 self.topic = topic
1812 self.topic = topic
1813 self.unit = unit
1813 self.unit = unit
1814 self.total = total
1814 self.total = total
1815 self.debug = ui.configbool(b'progress', b'debug')
1815 self.debug = ui.configbool(b'progress', b'debug')
1816 self._updatebar = updatebar
1816 self._updatebar = updatebar
1817
1817
1818 def __enter__(self):
1818 def __enter__(self):
1819 return self
1819 return self
1820
1820
1821 def __exit__(self, exc_type, exc_value, exc_tb):
1821 def __exit__(self, exc_type, exc_value, exc_tb):
1822 self.complete()
1822 self.complete()
1823
1823
1824 def update(self, pos, item=b"", total=None):
1824 def update(self, pos, item=b"", total=None):
1825 assert pos is not None
1825 assert pos is not None
1826 if total:
1826 if total:
1827 self.total = total
1827 self.total = total
1828 self.pos = pos
1828 self.pos = pos
1829 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1829 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1830 if self.debug:
1830 if self.debug:
1831 self._printdebug(item)
1831 self._printdebug(item)
1832
1832
1833 def increment(self, step=1, item=b"", total=None):
1833 def increment(self, step=1, item=b"", total=None):
1834 self.update(self.pos + step, item, total)
1834 self.update(self.pos + step, item, total)
1835
1835
1836 def complete(self):
1836 def complete(self):
1837 self.pos = None
1837 self.pos = None
1838 self.unit = b""
1838 self.unit = b""
1839 self.total = None
1839 self.total = None
1840 self._updatebar(self.topic, self.pos, b"", self.unit, self.total)
1840 self._updatebar(self.topic, self.pos, b"", self.unit, self.total)
1841
1841
1842 def _printdebug(self, item):
1842 def _printdebug(self, item):
1843 unit = b''
1843 unit = b''
1844 if self.unit:
1844 if self.unit:
1845 unit = b' ' + self.unit
1845 unit = b' ' + self.unit
1846 if item:
1846 if item:
1847 item = b' ' + item
1847 item = b' ' + item
1848
1848
1849 if self.total:
1849 if self.total:
1850 pct = 100.0 * self.pos / self.total
1850 pct = 100.0 * self.pos / self.total
1851 self.ui.debug(
1851 self.ui.debug(
1852 b'%s:%s %d/%d%s (%4.2f%%)\n'
1852 b'%s:%s %d/%d%s (%4.2f%%)\n'
1853 % (self.topic, item, self.pos, self.total, unit, pct)
1853 % (self.topic, item, self.pos, self.total, unit, pct)
1854 )
1854 )
1855 else:
1855 else:
1856 self.ui.debug(b'%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1856 self.ui.debug(b'%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1857
1857
1858
1858
1859 def gdinitconfig(ui):
1859 def gdinitconfig(ui):
1860 """helper function to know if a repo should be created as general delta"""
1860 """helper function to know if a repo should be created as general delta"""
1861 # experimental config: format.generaldelta
1861 # experimental config: format.generaldelta
1862 return ui.configbool(b'format', b'generaldelta') or ui.configbool(
1862 return ui.configbool(b'format', b'generaldelta') or ui.configbool(
1863 b'format', b'usegeneraldelta'
1863 b'format', b'usegeneraldelta'
1864 )
1864 )
1865
1865
1866
1866
1867 def gddeltaconfig(ui):
1867 def gddeltaconfig(ui):
1868 """helper function to know if incoming deltas should be optimized
1868 """helper function to know if incoming deltas should be optimized
1869
1869
1870 The `format.generaldelta` config is an old form of the config that also
1870 The `format.generaldelta` config is an old form of the config that also
1871 implies that incoming delta-bases should be never be trusted. This function
1871 implies that incoming delta-bases should be never be trusted. This function
1872 exists for this purpose.
1872 exists for this purpose.
1873 """
1873 """
1874 # experimental config: format.generaldelta
1874 # experimental config: format.generaldelta
1875 return ui.configbool(b'format', b'generaldelta')
1875 return ui.configbool(b'format', b'generaldelta')
1876
1876
1877
1877
1878 class simplekeyvaluefile:
1878 class simplekeyvaluefile:
1879 """A simple file with key=value lines
1879 """A simple file with key=value lines
1880
1880
1881 Keys must be alphanumerics and start with a letter, values must not
1881 Keys must be alphanumerics and start with a letter, values must not
1882 contain '\n' characters"""
1882 contain '\n' characters"""
1883
1883
1884 firstlinekey = b'__firstline'
1884 firstlinekey = b'__firstline'
1885
1885
1886 def __init__(self, vfs, path, keys=None):
1886 def __init__(self, vfs, path, keys=None):
1887 self.vfs = vfs
1887 self.vfs = vfs
1888 self.path = path
1888 self.path = path
1889
1889
1890 def read(self, firstlinenonkeyval=False):
1890 def read(self, firstlinenonkeyval=False):
1891 """Read the contents of a simple key-value file
1891 """Read the contents of a simple key-value file
1892
1892
1893 'firstlinenonkeyval' indicates whether the first line of file should
1893 'firstlinenonkeyval' indicates whether the first line of file should
1894 be treated as a key-value pair or reuturned fully under the
1894 be treated as a key-value pair or reuturned fully under the
1895 __firstline key."""
1895 __firstline key."""
1896 lines = self.vfs.readlines(self.path)
1896 lines = self.vfs.readlines(self.path)
1897 d = {}
1897 d = {}
1898 if firstlinenonkeyval:
1898 if firstlinenonkeyval:
1899 if not lines:
1899 if not lines:
1900 e = _(b"empty simplekeyvalue file")
1900 e = _(b"empty simplekeyvalue file")
1901 raise error.CorruptedState(e)
1901 raise error.CorruptedState(e)
1902 # we don't want to include '\n' in the __firstline
1902 # we don't want to include '\n' in the __firstline
1903 d[self.firstlinekey] = lines[0][:-1]
1903 d[self.firstlinekey] = lines[0][:-1]
1904 del lines[0]
1904 del lines[0]
1905
1905
1906 try:
1906 try:
1907 # the 'if line.strip()' part prevents us from failing on empty
1907 # the 'if line.strip()' part prevents us from failing on empty
1908 # lines which only contain '\n' therefore are not skipped
1908 # lines which only contain '\n' therefore are not skipped
1909 # by 'if line'
1909 # by 'if line'
1910 updatedict = dict(
1910 updatedict = dict(
1911 line[:-1].split(b'=', 1) for line in lines if line.strip()
1911 line[:-1].split(b'=', 1) for line in lines if line.strip()
1912 )
1912 )
1913 if self.firstlinekey in updatedict:
1913 if self.firstlinekey in updatedict:
1914 e = _(b"%r can't be used as a key")
1914 e = _(b"%r can't be used as a key")
1915 raise error.CorruptedState(e % self.firstlinekey)
1915 raise error.CorruptedState(e % self.firstlinekey)
1916 d.update(updatedict)
1916 d.update(updatedict)
1917 except ValueError as e:
1917 except ValueError as e:
1918 raise error.CorruptedState(stringutil.forcebytestr(e))
1918 raise error.CorruptedState(stringutil.forcebytestr(e))
1919 return d
1919 return d
1920
1920
1921 def write(self, data, firstline=None):
1921 def write(self, data, firstline=None):
1922 """Write key=>value mapping to a file
1922 """Write key=>value mapping to a file
1923 data is a dict. Keys must be alphanumerical and start with a letter.
1923 data is a dict. Keys must be alphanumerical and start with a letter.
1924 Values must not contain newline characters.
1924 Values must not contain newline characters.
1925
1925
1926 If 'firstline' is not None, it is written to file before
1926 If 'firstline' is not None, it is written to file before
1927 everything else, as it is, not in a key=value form"""
1927 everything else, as it is, not in a key=value form"""
1928 lines = []
1928 lines = []
1929 if firstline is not None:
1929 if firstline is not None:
1930 lines.append(b'%s\n' % firstline)
1930 lines.append(b'%s\n' % firstline)
1931
1931
1932 for k, v in data.items():
1932 for k, v in data.items():
1933 if k == self.firstlinekey:
1933 if k == self.firstlinekey:
1934 e = b"key name '%s' is reserved" % self.firstlinekey
1934 e = b"key name '%s' is reserved" % self.firstlinekey
1935 raise error.ProgrammingError(e)
1935 raise error.ProgrammingError(e)
1936 if not k[0:1].isalpha():
1936 if not k[0:1].isalpha():
1937 e = b"keys must start with a letter in a key-value file"
1937 e = b"keys must start with a letter in a key-value file"
1938 raise error.ProgrammingError(e)
1938 raise error.ProgrammingError(e)
1939 if not k.isalnum():
1939 if not k.isalnum():
1940 e = b"invalid key name in a simple key-value file"
1940 e = b"invalid key name in a simple key-value file"
1941 raise error.ProgrammingError(e)
1941 raise error.ProgrammingError(e)
1942 if b'\n' in v:
1942 if b'\n' in v:
1943 e = b"invalid value in a simple key-value file"
1943 e = b"invalid value in a simple key-value file"
1944 raise error.ProgrammingError(e)
1944 raise error.ProgrammingError(e)
1945 lines.append(b"%s=%s\n" % (k, v))
1945 lines.append(b"%s=%s\n" % (k, v))
1946 with self.vfs(self.path, mode=b'wb', atomictemp=True) as fp:
1946 with self.vfs(self.path, mode=b'wb', atomictemp=True) as fp:
1947 fp.write(b''.join(lines))
1947 fp.write(b''.join(lines))
1948
1948
1949
1949
1950 _reportobsoletedsource = [
1950 _reportobsoletedsource = [
1951 b'debugobsolete',
1951 b'debugobsolete',
1952 b'pull',
1952 b'pull',
1953 b'push',
1953 b'push',
1954 b'serve',
1954 b'serve',
1955 b'unbundle',
1955 b'unbundle',
1956 ]
1956 ]
1957
1957
1958 _reportnewcssource = [
1958 _reportnewcssource = [
1959 b'pull',
1959 b'pull',
1960 b'unbundle',
1960 b'unbundle',
1961 ]
1961 ]
1962
1962
1963
1963
1964 def prefetchfiles(repo, revmatches):
1964 def prefetchfiles(repo, revmatches):
1965 """Invokes the registered file prefetch functions, allowing extensions to
1965 """Invokes the registered file prefetch functions, allowing extensions to
1966 ensure the corresponding files are available locally, before the command
1966 ensure the corresponding files are available locally, before the command
1967 uses them.
1967 uses them.
1968
1968
1969 Args:
1969 Args:
1970 revmatches: a list of (revision, match) tuples to indicate the files to
1970 revmatches: a list of (revision, match) tuples to indicate the files to
1971 fetch at each revision. If any of the match elements is None, it matches
1971 fetch at each revision. If any of the match elements is None, it matches
1972 all files.
1972 all files.
1973 """
1973 """
1974
1974
1975 def _matcher(m):
1975 def _matcher(m):
1976 if m:
1976 if m:
1977 assert isinstance(m, matchmod.basematcher)
1977 assert isinstance(m, matchmod.basematcher)
1978 # The command itself will complain about files that don't exist, so
1978 # The command itself will complain about files that don't exist, so
1979 # don't duplicate the message.
1979 # don't duplicate the message.
1980 return matchmod.badmatch(m, lambda fn, msg: None)
1980 return matchmod.badmatch(m, lambda fn, msg: None)
1981 else:
1981 else:
1982 return matchall(repo)
1982 return matchall(repo)
1983
1983
1984 revbadmatches = [(rev, _matcher(match)) for (rev, match) in revmatches]
1984 revbadmatches = [(rev, _matcher(match)) for (rev, match) in revmatches]
1985
1985
1986 fileprefetchhooks(repo, revbadmatches)
1986 fileprefetchhooks(repo, revbadmatches)
1987
1987
1988
1988
1989 # a list of (repo, revs, match) prefetch functions
1989 # a list of (repo, revs, match) prefetch functions
1990 fileprefetchhooks = util.hooks()
1990 fileprefetchhooks = util.hooks()
1991
1991
1992 # A marker that tells the evolve extension to suppress its own reporting
1992 # A marker that tells the evolve extension to suppress its own reporting
1993 _reportstroubledchangesets = True
1993 _reportstroubledchangesets = True
1994
1994
1995
1995
1996 def registersummarycallback(repo, otr, txnname=b'', as_validator=False):
1996 def registersummarycallback(repo, otr, txnname=b'', as_validator=False):
1997 """register a callback to issue a summary after the transaction is closed
1997 """register a callback to issue a summary after the transaction is closed
1998
1998
1999 If as_validator is true, then the callbacks are registered as transaction
1999 If as_validator is true, then the callbacks are registered as transaction
2000 validators instead
2000 validators instead
2001 """
2001 """
2002
2002
2003 def txmatch(sources):
2003 def txmatch(sources):
2004 return any(txnname.startswith(source) for source in sources)
2004 return any(txnname.startswith(source) for source in sources)
2005
2005
2006 categories = []
2006 categories = []
2007
2007
2008 def reportsummary(func):
2008 def reportsummary(func):
2009 """decorator for report callbacks."""
2009 """decorator for report callbacks."""
2010 # The repoview life cycle is shorter than the one of the actual
2010 # The repoview life cycle is shorter than the one of the actual
2011 # underlying repository. So the filtered object can die before the
2011 # underlying repository. So the filtered object can die before the
2012 # weakref is used leading to troubles. We keep a reference to the
2012 # weakref is used leading to troubles. We keep a reference to the
2013 # unfiltered object and restore the filtering when retrieving the
2013 # unfiltered object and restore the filtering when retrieving the
2014 # repository through the weakref.
2014 # repository through the weakref.
2015 filtername = repo.filtername
2015 filtername = repo.filtername
2016 reporef = weakref.ref(repo.unfiltered())
2016 reporef = weakref.ref(repo.unfiltered())
2017
2017
2018 def wrapped(tr):
2018 def wrapped(tr):
2019 repo = reporef()
2019 repo = reporef()
2020 if filtername:
2020 if filtername:
2021 assert repo is not None # help pytype
2021 assert repo is not None # help pytype
2022 repo = repo.filtered(filtername)
2022 repo = repo.filtered(filtername)
2023 func(repo, tr)
2023 func(repo, tr)
2024
2024
2025 newcat = b'%02i-txnreport' % len(categories)
2025 newcat = b'%02i-txnreport' % len(categories)
2026 if as_validator:
2026 if as_validator:
2027 otr.addvalidator(newcat, wrapped)
2027 otr.addvalidator(newcat, wrapped)
2028 else:
2028 else:
2029 otr.addpostclose(newcat, wrapped)
2029 otr.addpostclose(newcat, wrapped)
2030 categories.append(newcat)
2030 categories.append(newcat)
2031 return wrapped
2031 return wrapped
2032
2032
2033 @reportsummary
2033 @reportsummary
2034 def reportchangegroup(repo, tr):
2034 def reportchangegroup(repo, tr):
2035 cgchangesets = tr.changes.get(b'changegroup-count-changesets', 0)
2035 cgchangesets = tr.changes.get(b'changegroup-count-changesets', 0)
2036 cgrevisions = tr.changes.get(b'changegroup-count-revisions', 0)
2036 cgrevisions = tr.changes.get(b'changegroup-count-revisions', 0)
2037 cgfiles = tr.changes.get(b'changegroup-count-files', 0)
2037 cgfiles = tr.changes.get(b'changegroup-count-files', 0)
2038 cgheads = tr.changes.get(b'changegroup-count-heads', 0)
2038 cgheads = tr.changes.get(b'changegroup-count-heads', 0)
2039 if cgchangesets or cgrevisions or cgfiles:
2039 if cgchangesets or cgrevisions or cgfiles:
2040 htext = b""
2040 htext = b""
2041 if cgheads:
2041 if cgheads:
2042 htext = _(b" (%+d heads)") % cgheads
2042 htext = _(b" (%+d heads)") % cgheads
2043 msg = _(b"added %d changesets with %d changes to %d files%s\n")
2043 msg = _(b"added %d changesets with %d changes to %d files%s\n")
2044 if as_validator:
2044 if as_validator:
2045 msg = _(b"adding %d changesets with %d changes to %d files%s\n")
2045 msg = _(b"adding %d changesets with %d changes to %d files%s\n")
2046 assert repo is not None # help pytype
2046 assert repo is not None # help pytype
2047 repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
2047 repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
2048
2048
2049 if txmatch(_reportobsoletedsource):
2049 if txmatch(_reportobsoletedsource):
2050
2050
2051 @reportsummary
2051 @reportsummary
2052 def reportobsoleted(repo, tr):
2052 def reportobsoleted(repo, tr):
2053 obsoleted = obsutil.getobsoleted(repo, tr)
2053 obsoleted = obsutil.getobsoleted(repo, tr)
2054 newmarkers = len(tr.changes.get(b'obsmarkers', ()))
2054 newmarkers = len(tr.changes.get(b'obsmarkers', ()))
2055 if newmarkers:
2055 if newmarkers:
2056 repo.ui.status(_(b'%i new obsolescence markers\n') % newmarkers)
2056 repo.ui.status(_(b'%i new obsolescence markers\n') % newmarkers)
2057 if obsoleted:
2057 if obsoleted:
2058 msg = _(b'obsoleted %i changesets\n')
2058 msg = _(b'obsoleted %i changesets\n')
2059 if as_validator:
2059 if as_validator:
2060 msg = _(b'obsoleting %i changesets\n')
2060 msg = _(b'obsoleting %i changesets\n')
2061 repo.ui.status(msg % len(obsoleted))
2061 repo.ui.status(msg % len(obsoleted))
2062
2062
2063 if obsolete.isenabled(
2063 if obsolete.isenabled(
2064 repo, obsolete.createmarkersopt
2064 repo, obsolete.createmarkersopt
2065 ) and repo.ui.configbool(
2065 ) and repo.ui.configbool(
2066 b'experimental', b'evolution.report-instabilities'
2066 b'experimental', b'evolution.report-instabilities'
2067 ):
2067 ):
2068 instabilitytypes = [
2068 instabilitytypes = [
2069 (b'orphan', b'orphan'),
2069 (b'orphan', b'orphan'),
2070 (b'phase-divergent', b'phasedivergent'),
2070 (b'phase-divergent', b'phasedivergent'),
2071 (b'content-divergent', b'contentdivergent'),
2071 (b'content-divergent', b'contentdivergent'),
2072 ]
2072 ]
2073
2073
2074 def getinstabilitycounts(repo):
2074 def getinstabilitycounts(repo):
2075 filtered = repo.changelog.filteredrevs
2075 filtered = repo.changelog.filteredrevs
2076 counts = {}
2076 counts = {}
2077 for instability, revset in instabilitytypes:
2077 for instability, revset in instabilitytypes:
2078 counts[instability] = len(
2078 counts[instability] = len(
2079 set(obsolete.getrevs(repo, revset)) - filtered
2079 set(obsolete.getrevs(repo, revset)) - filtered
2080 )
2080 )
2081 return counts
2081 return counts
2082
2082
2083 oldinstabilitycounts = getinstabilitycounts(repo)
2083 oldinstabilitycounts = getinstabilitycounts(repo)
2084
2084
2085 @reportsummary
2085 @reportsummary
2086 def reportnewinstabilities(repo, tr):
2086 def reportnewinstabilities(repo, tr):
2087 newinstabilitycounts = getinstabilitycounts(repo)
2087 newinstabilitycounts = getinstabilitycounts(repo)
2088 for instability, revset in instabilitytypes:
2088 for instability, revset in instabilitytypes:
2089 delta = (
2089 delta = (
2090 newinstabilitycounts[instability]
2090 newinstabilitycounts[instability]
2091 - oldinstabilitycounts[instability]
2091 - oldinstabilitycounts[instability]
2092 )
2092 )
2093 msg = getinstabilitymessage(delta, instability)
2093 msg = getinstabilitymessage(delta, instability)
2094 if msg:
2094 if msg:
2095 repo.ui.warn(msg)
2095 repo.ui.warn(msg)
2096
2096
2097 if txmatch(_reportnewcssource):
2097 if txmatch(_reportnewcssource):
2098
2098
2099 @reportsummary
2099 @reportsummary
2100 def reportnewcs(repo, tr):
2100 def reportnewcs(repo, tr):
2101 """Report the range of new revisions pulled/unbundled."""
2101 """Report the range of new revisions pulled/unbundled."""
2102 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2102 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2103 unfi = repo.unfiltered()
2103 unfi = repo.unfiltered()
2104 if origrepolen >= len(unfi):
2104 if origrepolen >= len(unfi):
2105 return
2105 return
2106
2106
2107 # Compute the bounds of new visible revisions' range.
2107 # Compute the bounds of new visible revisions' range.
2108 revs = smartset.spanset(repo, start=origrepolen)
2108 revs = smartset.spanset(repo, start=origrepolen)
2109 if revs:
2109 if revs:
2110 minrev, maxrev = repo[revs.min()], repo[revs.max()]
2110 minrev, maxrev = repo[revs.min()], repo[revs.max()]
2111
2111
2112 if minrev == maxrev:
2112 if minrev == maxrev:
2113 revrange = minrev
2113 revrange = minrev
2114 else:
2114 else:
2115 revrange = b'%s:%s' % (minrev, maxrev)
2115 revrange = b'%s:%s' % (minrev, maxrev)
2116 draft = len(repo.revs(b'%ld and draft()', revs))
2116 draft = len(repo.revs(b'%ld and draft()', revs))
2117 secret = len(repo.revs(b'%ld and secret()', revs))
2117 secret = len(repo.revs(b'%ld and secret()', revs))
2118 if not (draft or secret):
2118 if not (draft or secret):
2119 msg = _(b'new changesets %s\n') % revrange
2119 msg = _(b'new changesets %s\n') % revrange
2120 elif draft and secret:
2120 elif draft and secret:
2121 msg = _(b'new changesets %s (%d drafts, %d secrets)\n')
2121 msg = _(b'new changesets %s (%d drafts, %d secrets)\n')
2122 msg %= (revrange, draft, secret)
2122 msg %= (revrange, draft, secret)
2123 elif draft:
2123 elif draft:
2124 msg = _(b'new changesets %s (%d drafts)\n')
2124 msg = _(b'new changesets %s (%d drafts)\n')
2125 msg %= (revrange, draft)
2125 msg %= (revrange, draft)
2126 elif secret:
2126 elif secret:
2127 msg = _(b'new changesets %s (%d secrets)\n')
2127 msg = _(b'new changesets %s (%d secrets)\n')
2128 msg %= (revrange, secret)
2128 msg %= (revrange, secret)
2129 else:
2129 else:
2130 errormsg = b'entered unreachable condition'
2130 errormsg = b'entered unreachable condition'
2131 raise error.ProgrammingError(errormsg)
2131 raise error.ProgrammingError(errormsg)
2132 repo.ui.status(msg)
2132 repo.ui.status(msg)
2133
2133
2134 # search new changesets directly pulled as obsolete
2134 # search new changesets directly pulled as obsolete
2135 duplicates = tr.changes.get(b'revduplicates', ())
2135 duplicates = tr.changes.get(b'revduplicates', ())
2136 obsadded = unfi.revs(
2136 obsadded = unfi.revs(
2137 b'(%d: + %ld) and obsolete()', origrepolen, duplicates
2137 b'(%d: + %ld) and obsolete()', origrepolen, duplicates
2138 )
2138 )
2139 cl = repo.changelog
2139 cl = repo.changelog
2140 extinctadded = [r for r in obsadded if r not in cl]
2140 extinctadded = [r for r in obsadded if r not in cl]
2141 if extinctadded:
2141 if extinctadded:
2142 # They are not just obsolete, but obsolete and invisible
2142 # They are not just obsolete, but obsolete and invisible
2143 # we call them "extinct" internally but the terms have not been
2143 # we call them "extinct" internally but the terms have not been
2144 # exposed to users.
2144 # exposed to users.
2145 msg = b'(%d other changesets obsolete on arrival)\n'
2145 msg = b'(%d other changesets obsolete on arrival)\n'
2146 repo.ui.status(msg % len(extinctadded))
2146 repo.ui.status(msg % len(extinctadded))
2147
2147
2148 @reportsummary
2148 @reportsummary
2149 def reportphasechanges(repo, tr):
2149 def reportphasechanges(repo, tr):
2150 """Report statistics of phase changes for changesets pre-existing
2150 """Report statistics of phase changes for changesets pre-existing
2151 pull/unbundle.
2151 pull/unbundle.
2152 """
2152 """
2153 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2153 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2154 published = []
2154 published = []
2155 for revs, (old, new) in tr.changes.get(b'phases', []):
2155 for revs, (old, new) in tr.changes.get(b'phases', []):
2156 if new != phases.public:
2156 if new != phases.public:
2157 continue
2157 continue
2158 published.extend(rev for rev in revs if rev < origrepolen)
2158 published.extend(rev for rev in revs if rev < origrepolen)
2159 if not published:
2159 if not published:
2160 return
2160 return
2161 msg = _(b'%d local changesets published\n')
2161 msg = _(b'%d local changesets published\n')
2162 if as_validator:
2162 if as_validator:
2163 msg = _(b'%d local changesets will be published\n')
2163 msg = _(b'%d local changesets will be published\n')
2164 repo.ui.status(msg % len(published))
2164 repo.ui.status(msg % len(published))
2165
2165
2166
2166
2167 def getinstabilitymessage(delta, instability):
2167 def getinstabilitymessage(delta, instability):
2168 """function to return the message to show warning about new instabilities
2168 """function to return the message to show warning about new instabilities
2169
2169
2170 exists as a separate function so that extension can wrap to show more
2170 exists as a separate function so that extension can wrap to show more
2171 information like how to fix instabilities"""
2171 information like how to fix instabilities"""
2172 if delta > 0:
2172 if delta > 0:
2173 return _(b'%i new %s changesets\n') % (delta, instability)
2173 return _(b'%i new %s changesets\n') % (delta, instability)
2174
2174
2175
2175
2176 def nodesummaries(repo, nodes, maxnumnodes=4):
2176 def nodesummaries(repo, nodes, maxnumnodes=4):
2177 if len(nodes) <= maxnumnodes or repo.ui.verbose:
2177 if len(nodes) <= maxnumnodes or repo.ui.verbose:
2178 return b' '.join(short(h) for h in nodes)
2178 return b' '.join(short(h) for h in nodes)
2179 first = b' '.join(short(h) for h in nodes[:maxnumnodes])
2179 first = b' '.join(short(h) for h in nodes[:maxnumnodes])
2180 return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes)
2180 return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes)
2181
2181
2182
2182
2183 def enforcesinglehead(repo, tr, desc, accountclosed, filtername):
2183 def enforcesinglehead(repo, tr, desc, accountclosed, filtername):
2184 """check that no named branch has multiple heads"""
2184 """check that no named branch has multiple heads"""
2185 if desc in (b'strip', b'repair'):
2185 if desc in (b'strip', b'repair'):
2186 # skip the logic during strip
2186 # skip the logic during strip
2187 return
2187 return
2188 visible = repo.filtered(filtername)
2188 visible = repo.filtered(filtername)
2189 # possible improvement: we could restrict the check to affected branch
2189 # possible improvement: we could restrict the check to affected branch
2190 bm = visible.branchmap()
2190 bm = visible.branchmap()
2191 for name in bm:
2191 for name in bm:
2192 heads = bm.branchheads(name, closed=accountclosed)
2192 heads = bm.branchheads(name, closed=accountclosed)
2193 if len(heads) > 1:
2193 if len(heads) > 1:
2194 msg = _(b'rejecting multiple heads on branch "%s"')
2194 msg = _(b'rejecting multiple heads on branch "%s"')
2195 msg %= name
2195 msg %= name
2196 hint = _(b'%d heads: %s')
2196 hint = _(b'%d heads: %s')
2197 hint %= (len(heads), nodesummaries(repo, heads))
2197 hint %= (len(heads), nodesummaries(repo, heads))
2198 raise error.Abort(msg, hint=hint)
2198 raise error.Abort(msg, hint=hint)
2199
2199
2200
2200
2201 def wrapconvertsink(sink):
2201 def wrapconvertsink(sink):
2202 """Allow extensions to wrap the sink returned by convcmd.convertsink()
2202 """Allow extensions to wrap the sink returned by convcmd.convertsink()
2203 before it is used, whether or not the convert extension was formally loaded.
2203 before it is used, whether or not the convert extension was formally loaded.
2204 """
2204 """
2205 return sink
2205 return sink
2206
2206
2207
2207
2208 def unhidehashlikerevs(repo, specs, hiddentype):
2208 def unhidehashlikerevs(repo, specs, hiddentype):
2209 """parse the user specs and unhide changesets whose hash or revision number
2209 """parse the user specs and unhide changesets whose hash or revision number
2210 is passed.
2210 is passed.
2211
2211
2212 hiddentype can be: 1) 'warn': warn while unhiding changesets
2212 hiddentype can be: 1) 'warn': warn while unhiding changesets
2213 2) 'nowarn': don't warn while unhiding changesets
2213 2) 'nowarn': don't warn while unhiding changesets
2214
2214
2215 returns a repo object with the required changesets unhidden
2215 returns a repo object with the required changesets unhidden
2216 """
2216 """
2217 if not specs:
2217 if not specs:
2218 return repo
2218 return repo
2219
2219
2220 if not repo.filtername or not repo.ui.configbool(
2220 if not repo.filtername or not repo.ui.configbool(
2221 b'experimental', b'directaccess'
2221 b'experimental', b'directaccess'
2222 ):
2222 ):
2223 return repo
2223 return repo
2224
2224
2225 if repo.filtername not in (b'visible', b'visible-hidden'):
2225 if repo.filtername not in (b'visible', b'visible-hidden'):
2226 return repo
2226 return repo
2227
2227
2228 symbols = set()
2228 symbols = set()
2229 for spec in specs:
2229 for spec in specs:
2230 try:
2230 try:
2231 tree = revsetlang.parse(spec)
2231 tree = revsetlang.parse(spec)
2232 except error.ParseError: # will be reported by scmutil.revrange()
2232 except error.ParseError: # will be reported by scmutil.revrange()
2233 continue
2233 continue
2234
2234
2235 symbols.update(revsetlang.gethashlikesymbols(tree))
2235 symbols.update(revsetlang.gethashlikesymbols(tree))
2236
2236
2237 if not symbols:
2237 if not symbols:
2238 return repo
2238 return repo
2239
2239
2240 revs = _getrevsfromsymbols(repo, symbols)
2240 revs = _getrevsfromsymbols(repo, symbols)
2241
2241
2242 if not revs:
2242 if not revs:
2243 return repo
2243 return repo
2244
2244
2245 if hiddentype == b'warn':
2245 if hiddentype == b'warn':
2246 unfi = repo.unfiltered()
2246 unfi = repo.unfiltered()
2247 revstr = b", ".join([pycompat.bytestr(unfi[l]) for l in revs])
2247 revstr = b", ".join([pycompat.bytestr(unfi[l]) for l in revs])
2248 repo.ui.warn(
2248 repo.ui.warn(
2249 _(
2249 _(
2250 b"warning: accessing hidden changesets for write "
2250 b"warning: accessing hidden changesets for write "
2251 b"operation: %s\n"
2251 b"operation: %s\n"
2252 )
2252 )
2253 % revstr
2253 % revstr
2254 )
2254 )
2255
2255
2256 # we have to use new filtername to separate branch/tags cache until we can
2256 # we have to use new filtername to separate branch/tags cache until we can
2257 # disbale these cache when revisions are dynamically pinned.
2257 # disbale these cache when revisions are dynamically pinned.
2258 return repo.filtered(b'visible-hidden', revs)
2258 return repo.filtered(b'visible-hidden', revs)
2259
2259
2260
2260
2261 def _getrevsfromsymbols(repo, symbols):
2261 def _getrevsfromsymbols(repo, symbols):
2262 """parse the list of symbols and returns a set of revision numbers of hidden
2262 """parse the list of symbols and returns a set of revision numbers of hidden
2263 changesets present in symbols"""
2263 changesets present in symbols"""
2264 revs = set()
2264 revs = set()
2265 unfi = repo.unfiltered()
2265 unfi = repo.unfiltered()
2266 unficl = unfi.changelog
2266 unficl = unfi.changelog
2267 cl = repo.changelog
2267 cl = repo.changelog
2268 tiprev = len(unficl)
2268 tiprev = len(unficl)
2269 allowrevnums = repo.ui.configbool(b'experimental', b'directaccess.revnums')
2269 allowrevnums = repo.ui.configbool(b'experimental', b'directaccess.revnums')
2270 for s in symbols:
2270 for s in symbols:
2271 try:
2271 try:
2272 n = int(s)
2272 n = int(s)
2273 if n <= tiprev:
2273 if n <= tiprev:
2274 if not allowrevnums:
2274 if not allowrevnums:
2275 continue
2275 continue
2276 else:
2276 else:
2277 if n not in cl:
2277 if n not in cl:
2278 revs.add(n)
2278 revs.add(n)
2279 continue
2279 continue
2280 except ValueError:
2280 except ValueError:
2281 pass
2281 pass
2282
2282
2283 try:
2283 try:
2284 s = resolvehexnodeidprefix(unfi, s)
2284 s = resolvehexnodeidprefix(unfi, s)
2285 except (error.LookupError, error.WdirUnsupported):
2285 except (error.LookupError, error.WdirUnsupported):
2286 s = None
2286 s = None
2287
2287
2288 if s is not None:
2288 if s is not None:
2289 rev = unficl.rev(s)
2289 rev = unficl.rev(s)
2290 if rev not in cl:
2290 if rev not in cl:
2291 revs.add(rev)
2291 revs.add(rev)
2292
2292
2293 return revs
2293 return revs
2294
2294
2295
2295
2296 def bookmarkrevs(repo, mark):
2296 def bookmarkrevs(repo, mark):
2297 """Select revisions reachable by a given bookmark
2297 """Select revisions reachable by a given bookmark
2298
2298
2299 If the bookmarked revision isn't a head, an empty set will be returned.
2299 If the bookmarked revision isn't a head, an empty set will be returned.
2300 """
2300 """
2301 return repo.revs(format_bookmark_revspec(mark))
2301 return repo.revs(format_bookmark_revspec(mark))
2302
2302
2303
2303
2304 def format_bookmark_revspec(mark):
2304 def format_bookmark_revspec(mark):
2305 """Build a revset expression to select revisions reachable by a given
2305 """Build a revset expression to select revisions reachable by a given
2306 bookmark"""
2306 bookmark"""
2307 mark = b'literal:' + mark
2307 mark = b'literal:' + mark
2308 return revsetlang.formatspec(
2308 return revsetlang.formatspec(
2309 b"ancestors(bookmark(%s)) - "
2309 b"ancestors(bookmark(%s)) - "
2310 b"ancestors(head() and not bookmark(%s)) - "
2310 b"ancestors(head() and not bookmark(%s)) - "
2311 b"ancestors(bookmark() and not bookmark(%s))",
2311 b"ancestors(bookmark() and not bookmark(%s))",
2312 mark,
2312 mark,
2313 mark,
2313 mark,
2314 mark,
2314 mark,
2315 )
2315 )
2316
2317
2318 def ismember(ui, username, userlist):
2319 """Check if username is a member of userlist.
2320
2321 If userlist has a single '*' member, all users are considered members.
2322 Can be overridden by extensions to provide more complex authorization
2323 schemes.
2324 """
2325 return userlist == [b'*'] or username in userlist
General Comments 0
You need to be logged in to leave comments. Login now