##// END OF EJS Templates
configitems: register the 'format.usegeneraldelta' config
marmoute -
r33243:4d9458e0 default
parent child Browse files
Show More
@@ -1,195 +1,198 b''
1 # configitems.py - centralized declaration of configuration option
1 # configitems.py - centralized declaration of configuration option
2 #
2 #
3 # Copyright 2017 Pierre-Yves David <pierre-yves.david@octobus.net>
3 # Copyright 2017 Pierre-Yves David <pierre-yves.david@octobus.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import functools
10 import functools
11
11
12 from . import (
12 from . import (
13 error,
13 error,
14 )
14 )
15
15
16 def loadconfigtable(ui, extname, configtable):
16 def loadconfigtable(ui, extname, configtable):
17 """update config item known to the ui with the extension ones"""
17 """update config item known to the ui with the extension ones"""
18 for section, items in configtable.items():
18 for section, items in configtable.items():
19 knownitems = ui._knownconfig.setdefault(section, {})
19 knownitems = ui._knownconfig.setdefault(section, {})
20 knownkeys = set(knownitems)
20 knownkeys = set(knownitems)
21 newkeys = set(items)
21 newkeys = set(items)
22 for key in sorted(knownkeys & newkeys):
22 for key in sorted(knownkeys & newkeys):
23 msg = "extension '%s' overwrite config item '%s.%s'"
23 msg = "extension '%s' overwrite config item '%s.%s'"
24 msg %= (extname, section, key)
24 msg %= (extname, section, key)
25 ui.develwarn(msg, config='warn-config')
25 ui.develwarn(msg, config='warn-config')
26
26
27 knownitems.update(items)
27 knownitems.update(items)
28
28
29 class configitem(object):
29 class configitem(object):
30 """represent a known config item
30 """represent a known config item
31
31
32 :section: the official config section where to find this item,
32 :section: the official config section where to find this item,
33 :name: the official name within the section,
33 :name: the official name within the section,
34 :default: default value for this item,
34 :default: default value for this item,
35 """
35 """
36
36
37 def __init__(self, section, name, default=None):
37 def __init__(self, section, name, default=None):
38 self.section = section
38 self.section = section
39 self.name = name
39 self.name = name
40 self.default = default
40 self.default = default
41
41
42 coreitems = {}
42 coreitems = {}
43
43
44 def _register(configtable, *args, **kwargs):
44 def _register(configtable, *args, **kwargs):
45 item = configitem(*args, **kwargs)
45 item = configitem(*args, **kwargs)
46 section = configtable.setdefault(item.section, {})
46 section = configtable.setdefault(item.section, {})
47 if item.name in section:
47 if item.name in section:
48 msg = "duplicated config item registration for '%s.%s'"
48 msg = "duplicated config item registration for '%s.%s'"
49 raise error.ProgrammingError(msg % (item.section, item.name))
49 raise error.ProgrammingError(msg % (item.section, item.name))
50 section[item.name] = item
50 section[item.name] = item
51
51
52 # Registering actual config items
52 # Registering actual config items
53
53
54 def getitemregister(configtable):
54 def getitemregister(configtable):
55 return functools.partial(_register, configtable)
55 return functools.partial(_register, configtable)
56
56
57 coreconfigitem = getitemregister(coreitems)
57 coreconfigitem = getitemregister(coreitems)
58
58
59 coreconfigitem('auth', 'cookiefile',
59 coreconfigitem('auth', 'cookiefile',
60 default=None,
60 default=None,
61 )
61 )
62 # bookmarks.pushing: internal hack for discovery
62 # bookmarks.pushing: internal hack for discovery
63 coreconfigitem('bookmarks', 'pushing',
63 coreconfigitem('bookmarks', 'pushing',
64 default=list,
64 default=list,
65 )
65 )
66 # bundle.mainreporoot: internal hack for bundlerepo
66 # bundle.mainreporoot: internal hack for bundlerepo
67 coreconfigitem('bundle', 'mainreporoot',
67 coreconfigitem('bundle', 'mainreporoot',
68 default='',
68 default='',
69 )
69 )
70 # bundle.reorder: experimental config
70 # bundle.reorder: experimental config
71 coreconfigitem('bundle', 'reorder',
71 coreconfigitem('bundle', 'reorder',
72 default='auto',
72 default='auto',
73 )
73 )
74 coreconfigitem('color', 'mode',
74 coreconfigitem('color', 'mode',
75 default='auto',
75 default='auto',
76 )
76 )
77 coreconfigitem('devel', 'all-warnings',
77 coreconfigitem('devel', 'all-warnings',
78 default=False,
78 default=False,
79 )
79 )
80 coreconfigitem('devel', 'bundle2.debug',
80 coreconfigitem('devel', 'bundle2.debug',
81 default=False,
81 default=False,
82 )
82 )
83 coreconfigitem('devel', 'check-locks',
83 coreconfigitem('devel', 'check-locks',
84 default=False,
84 default=False,
85 )
85 )
86 coreconfigitem('devel', 'check-relroot',
86 coreconfigitem('devel', 'check-relroot',
87 default=False,
87 default=False,
88 )
88 )
89 coreconfigitem('devel', 'disableloaddefaultcerts',
89 coreconfigitem('devel', 'disableloaddefaultcerts',
90 default=False,
90 default=False,
91 )
91 )
92 coreconfigitem('devel', 'legacy.exchange',
92 coreconfigitem('devel', 'legacy.exchange',
93 default=list,
93 default=list,
94 )
94 )
95 coreconfigitem('devel', 'servercafile',
95 coreconfigitem('devel', 'servercafile',
96 default='',
96 default='',
97 )
97 )
98 coreconfigitem('devel', 'serverexactprotocol',
98 coreconfigitem('devel', 'serverexactprotocol',
99 default='',
99 default='',
100 )
100 )
101 coreconfigitem('devel', 'serverrequirecert',
101 coreconfigitem('devel', 'serverrequirecert',
102 default=False,
102 default=False,
103 )
103 )
104 coreconfigitem('devel', 'strip-obsmarkers',
104 coreconfigitem('devel', 'strip-obsmarkers',
105 default=True,
105 default=True,
106 )
106 )
107 coreconfigitem('format', 'aggressivemergedeltas',
107 coreconfigitem('format', 'aggressivemergedeltas',
108 default=False,
108 default=False,
109 )
109 )
110 coreconfigitem('format', 'chunkcachesize',
110 coreconfigitem('format', 'chunkcachesize',
111 default=None,
111 default=None,
112 )
112 )
113 coreconfigitem('format', 'dotencode',
113 coreconfigitem('format', 'dotencode',
114 default=True,
114 default=True,
115 )
115 )
116 coreconfigitem('format', 'generaldelta',
116 coreconfigitem('format', 'generaldelta',
117 default=False,
117 default=False,
118 )
118 )
119 coreconfigitem('format', 'manifestcachesize',
119 coreconfigitem('format', 'manifestcachesize',
120 default=None,
120 default=None,
121 )
121 )
122 coreconfigitem('format', 'maxchainlen',
122 coreconfigitem('format', 'maxchainlen',
123 default=None,
123 default=None,
124 )
124 )
125 coreconfigitem('format', 'obsstore-version',
125 coreconfigitem('format', 'obsstore-version',
126 default=None,
126 default=None,
127 )
127 )
128 coreconfigitem('format', 'usefncache',
128 coreconfigitem('format', 'usefncache',
129 default=True,
129 default=True,
130 )
130 )
131 coreconfigitem('format', 'usegeneraldelta',
132 default=True,
133 )
131 coreconfigitem('hostsecurity', 'ciphers',
134 coreconfigitem('hostsecurity', 'ciphers',
132 default=None,
135 default=None,
133 )
136 )
134 coreconfigitem('hostsecurity', 'disabletls10warning',
137 coreconfigitem('hostsecurity', 'disabletls10warning',
135 default=False,
138 default=False,
136 )
139 )
137 coreconfigitem('patch', 'eol',
140 coreconfigitem('patch', 'eol',
138 default='strict',
141 default='strict',
139 )
142 )
140 coreconfigitem('patch', 'fuzz',
143 coreconfigitem('patch', 'fuzz',
141 default=2,
144 default=2,
142 )
145 )
143 coreconfigitem('server', 'bundle1',
146 coreconfigitem('server', 'bundle1',
144 default=True,
147 default=True,
145 )
148 )
146 coreconfigitem('server', 'bundle1gd',
149 coreconfigitem('server', 'bundle1gd',
147 default=None,
150 default=None,
148 )
151 )
149 coreconfigitem('server', 'compressionengines',
152 coreconfigitem('server', 'compressionengines',
150 default=list,
153 default=list,
151 )
154 )
152 coreconfigitem('server', 'concurrent-push-mode',
155 coreconfigitem('server', 'concurrent-push-mode',
153 default='strict',
156 default='strict',
154 )
157 )
155 coreconfigitem('server', 'disablefullbundle',
158 coreconfigitem('server', 'disablefullbundle',
156 default=False,
159 default=False,
157 )
160 )
158 coreconfigitem('server', 'maxhttpheaderlen',
161 coreconfigitem('server', 'maxhttpheaderlen',
159 default=1024,
162 default=1024,
160 )
163 )
161 coreconfigitem('server', 'preferuncompressed',
164 coreconfigitem('server', 'preferuncompressed',
162 default=False,
165 default=False,
163 )
166 )
164 coreconfigitem('server', 'uncompressedallowsecret',
167 coreconfigitem('server', 'uncompressedallowsecret',
165 default=False,
168 default=False,
166 )
169 )
167 coreconfigitem('server', 'validate',
170 coreconfigitem('server', 'validate',
168 default=False,
171 default=False,
169 )
172 )
170 coreconfigitem('server', 'zliblevel',
173 coreconfigitem('server', 'zliblevel',
171 default=-1,
174 default=-1,
172 )
175 )
173 coreconfigitem('ui', 'clonebundleprefers',
176 coreconfigitem('ui', 'clonebundleprefers',
174 default=list,
177 default=list,
175 )
178 )
176 coreconfigitem('ui', 'interactive',
179 coreconfigitem('ui', 'interactive',
177 default=None,
180 default=None,
178 )
181 )
179 coreconfigitem('ui', 'quiet',
182 coreconfigitem('ui', 'quiet',
180 default=False,
183 default=False,
181 )
184 )
182 # Windows defaults to a limit of 512 open files. A buffer of 128
185 # Windows defaults to a limit of 512 open files. A buffer of 128
183 # should give us enough headway.
186 # should give us enough headway.
184 coreconfigitem('worker', 'backgroundclosemaxqueue',
187 coreconfigitem('worker', 'backgroundclosemaxqueue',
185 default=384,
188 default=384,
186 )
189 )
187 coreconfigitem('worker', 'backgroundcloseminfilecount',
190 coreconfigitem('worker', 'backgroundcloseminfilecount',
188 default=2048,
191 default=2048,
189 )
192 )
190 coreconfigitem('worker', 'backgroundclosethreadcount',
193 coreconfigitem('worker', 'backgroundclosethreadcount',
191 default=4,
194 default=4,
192 )
195 )
193 coreconfigitem('worker', 'numcpus',
196 coreconfigitem('worker', 'numcpus',
194 default=None,
197 default=None,
195 )
198 )
@@ -1,1061 +1,1061 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16
16
17 from .i18n import _
17 from .i18n import _
18 from .node import (
18 from .node import (
19 hex,
19 hex,
20 nullid,
20 nullid,
21 wdirid,
21 wdirid,
22 wdirrev,
22 wdirrev,
23 )
23 )
24
24
25 from . import (
25 from . import (
26 encoding,
26 encoding,
27 error,
27 error,
28 match as matchmod,
28 match as matchmod,
29 obsolete,
29 obsolete,
30 pathutil,
30 pathutil,
31 phases,
31 phases,
32 pycompat,
32 pycompat,
33 revsetlang,
33 revsetlang,
34 similar,
34 similar,
35 util,
35 util,
36 )
36 )
37
37
38 if pycompat.osname == 'nt':
38 if pycompat.osname == 'nt':
39 from . import scmwindows as scmplatform
39 from . import scmwindows as scmplatform
40 else:
40 else:
41 from . import scmposix as scmplatform
41 from . import scmposix as scmplatform
42
42
43 termsize = scmplatform.termsize
43 termsize = scmplatform.termsize
44
44
45 class status(tuple):
45 class status(tuple):
46 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
46 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
47 and 'ignored' properties are only relevant to the working copy.
47 and 'ignored' properties are only relevant to the working copy.
48 '''
48 '''
49
49
50 __slots__ = ()
50 __slots__ = ()
51
51
52 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
52 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
53 clean):
53 clean):
54 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
54 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
55 ignored, clean))
55 ignored, clean))
56
56
57 @property
57 @property
58 def modified(self):
58 def modified(self):
59 '''files that have been modified'''
59 '''files that have been modified'''
60 return self[0]
60 return self[0]
61
61
62 @property
62 @property
63 def added(self):
63 def added(self):
64 '''files that have been added'''
64 '''files that have been added'''
65 return self[1]
65 return self[1]
66
66
67 @property
67 @property
68 def removed(self):
68 def removed(self):
69 '''files that have been removed'''
69 '''files that have been removed'''
70 return self[2]
70 return self[2]
71
71
72 @property
72 @property
73 def deleted(self):
73 def deleted(self):
74 '''files that are in the dirstate, but have been deleted from the
74 '''files that are in the dirstate, but have been deleted from the
75 working copy (aka "missing")
75 working copy (aka "missing")
76 '''
76 '''
77 return self[3]
77 return self[3]
78
78
79 @property
79 @property
80 def unknown(self):
80 def unknown(self):
81 '''files not in the dirstate that are not ignored'''
81 '''files not in the dirstate that are not ignored'''
82 return self[4]
82 return self[4]
83
83
84 @property
84 @property
85 def ignored(self):
85 def ignored(self):
86 '''files not in the dirstate that are ignored (by _dirignore())'''
86 '''files not in the dirstate that are ignored (by _dirignore())'''
87 return self[5]
87 return self[5]
88
88
89 @property
89 @property
90 def clean(self):
90 def clean(self):
91 '''files that have not been modified'''
91 '''files that have not been modified'''
92 return self[6]
92 return self[6]
93
93
94 def __repr__(self, *args, **kwargs):
94 def __repr__(self, *args, **kwargs):
95 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
95 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
96 'unknown=%r, ignored=%r, clean=%r>') % self)
96 'unknown=%r, ignored=%r, clean=%r>') % self)
97
97
98 def itersubrepos(ctx1, ctx2):
98 def itersubrepos(ctx1, ctx2):
99 """find subrepos in ctx1 or ctx2"""
99 """find subrepos in ctx1 or ctx2"""
100 # Create a (subpath, ctx) mapping where we prefer subpaths from
100 # Create a (subpath, ctx) mapping where we prefer subpaths from
101 # ctx1. The subpaths from ctx2 are important when the .hgsub file
101 # ctx1. The subpaths from ctx2 are important when the .hgsub file
102 # has been modified (in ctx2) but not yet committed (in ctx1).
102 # has been modified (in ctx2) but not yet committed (in ctx1).
103 subpaths = dict.fromkeys(ctx2.substate, ctx2)
103 subpaths = dict.fromkeys(ctx2.substate, ctx2)
104 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
104 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
105
105
106 missing = set()
106 missing = set()
107
107
108 for subpath in ctx2.substate:
108 for subpath in ctx2.substate:
109 if subpath not in ctx1.substate:
109 if subpath not in ctx1.substate:
110 del subpaths[subpath]
110 del subpaths[subpath]
111 missing.add(subpath)
111 missing.add(subpath)
112
112
113 for subpath, ctx in sorted(subpaths.iteritems()):
113 for subpath, ctx in sorted(subpaths.iteritems()):
114 yield subpath, ctx.sub(subpath)
114 yield subpath, ctx.sub(subpath)
115
115
116 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
116 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
117 # status and diff will have an accurate result when it does
117 # status and diff will have an accurate result when it does
118 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
118 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
119 # against itself.
119 # against itself.
120 for subpath in missing:
120 for subpath in missing:
121 yield subpath, ctx2.nullsub(subpath, ctx1)
121 yield subpath, ctx2.nullsub(subpath, ctx1)
122
122
123 def nochangesfound(ui, repo, excluded=None):
123 def nochangesfound(ui, repo, excluded=None):
124 '''Report no changes for push/pull, excluded is None or a list of
124 '''Report no changes for push/pull, excluded is None or a list of
125 nodes excluded from the push/pull.
125 nodes excluded from the push/pull.
126 '''
126 '''
127 secretlist = []
127 secretlist = []
128 if excluded:
128 if excluded:
129 for n in excluded:
129 for n in excluded:
130 ctx = repo[n]
130 ctx = repo[n]
131 if ctx.phase() >= phases.secret and not ctx.extinct():
131 if ctx.phase() >= phases.secret and not ctx.extinct():
132 secretlist.append(n)
132 secretlist.append(n)
133
133
134 if secretlist:
134 if secretlist:
135 ui.status(_("no changes found (ignored %d secret changesets)\n")
135 ui.status(_("no changes found (ignored %d secret changesets)\n")
136 % len(secretlist))
136 % len(secretlist))
137 else:
137 else:
138 ui.status(_("no changes found\n"))
138 ui.status(_("no changes found\n"))
139
139
140 def callcatch(ui, func):
140 def callcatch(ui, func):
141 """call func() with global exception handling
141 """call func() with global exception handling
142
142
143 return func() if no exception happens. otherwise do some error handling
143 return func() if no exception happens. otherwise do some error handling
144 and return an exit code accordingly. does not handle all exceptions.
144 and return an exit code accordingly. does not handle all exceptions.
145 """
145 """
146 try:
146 try:
147 try:
147 try:
148 return func()
148 return func()
149 except: # re-raises
149 except: # re-raises
150 ui.traceback()
150 ui.traceback()
151 raise
151 raise
152 # Global exception handling, alphabetically
152 # Global exception handling, alphabetically
153 # Mercurial-specific first, followed by built-in and library exceptions
153 # Mercurial-specific first, followed by built-in and library exceptions
154 except error.LockHeld as inst:
154 except error.LockHeld as inst:
155 if inst.errno == errno.ETIMEDOUT:
155 if inst.errno == errno.ETIMEDOUT:
156 reason = _('timed out waiting for lock held by %r') % inst.locker
156 reason = _('timed out waiting for lock held by %r') % inst.locker
157 else:
157 else:
158 reason = _('lock held by %r') % inst.locker
158 reason = _('lock held by %r') % inst.locker
159 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
159 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
160 if not inst.locker:
160 if not inst.locker:
161 ui.warn(_("(lock might be very busy)\n"))
161 ui.warn(_("(lock might be very busy)\n"))
162 except error.LockUnavailable as inst:
162 except error.LockUnavailable as inst:
163 ui.warn(_("abort: could not lock %s: %s\n") %
163 ui.warn(_("abort: could not lock %s: %s\n") %
164 (inst.desc or inst.filename, inst.strerror))
164 (inst.desc or inst.filename, inst.strerror))
165 except error.OutOfBandError as inst:
165 except error.OutOfBandError as inst:
166 if inst.args:
166 if inst.args:
167 msg = _("abort: remote error:\n")
167 msg = _("abort: remote error:\n")
168 else:
168 else:
169 msg = _("abort: remote error\n")
169 msg = _("abort: remote error\n")
170 ui.warn(msg)
170 ui.warn(msg)
171 if inst.args:
171 if inst.args:
172 ui.warn(''.join(inst.args))
172 ui.warn(''.join(inst.args))
173 if inst.hint:
173 if inst.hint:
174 ui.warn('(%s)\n' % inst.hint)
174 ui.warn('(%s)\n' % inst.hint)
175 except error.RepoError as inst:
175 except error.RepoError as inst:
176 ui.warn(_("abort: %s!\n") % inst)
176 ui.warn(_("abort: %s!\n") % inst)
177 if inst.hint:
177 if inst.hint:
178 ui.warn(_("(%s)\n") % inst.hint)
178 ui.warn(_("(%s)\n") % inst.hint)
179 except error.ResponseError as inst:
179 except error.ResponseError as inst:
180 ui.warn(_("abort: %s") % inst.args[0])
180 ui.warn(_("abort: %s") % inst.args[0])
181 if not isinstance(inst.args[1], basestring):
181 if not isinstance(inst.args[1], basestring):
182 ui.warn(" %r\n" % (inst.args[1],))
182 ui.warn(" %r\n" % (inst.args[1],))
183 elif not inst.args[1]:
183 elif not inst.args[1]:
184 ui.warn(_(" empty string\n"))
184 ui.warn(_(" empty string\n"))
185 else:
185 else:
186 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
186 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
187 except error.CensoredNodeError as inst:
187 except error.CensoredNodeError as inst:
188 ui.warn(_("abort: file censored %s!\n") % inst)
188 ui.warn(_("abort: file censored %s!\n") % inst)
189 except error.RevlogError as inst:
189 except error.RevlogError as inst:
190 ui.warn(_("abort: %s!\n") % inst)
190 ui.warn(_("abort: %s!\n") % inst)
191 except error.InterventionRequired as inst:
191 except error.InterventionRequired as inst:
192 ui.warn("%s\n" % inst)
192 ui.warn("%s\n" % inst)
193 if inst.hint:
193 if inst.hint:
194 ui.warn(_("(%s)\n") % inst.hint)
194 ui.warn(_("(%s)\n") % inst.hint)
195 return 1
195 return 1
196 except error.WdirUnsupported:
196 except error.WdirUnsupported:
197 ui.warn(_("abort: working directory revision cannot be specified\n"))
197 ui.warn(_("abort: working directory revision cannot be specified\n"))
198 except error.Abort as inst:
198 except error.Abort as inst:
199 ui.warn(_("abort: %s\n") % inst)
199 ui.warn(_("abort: %s\n") % inst)
200 if inst.hint:
200 if inst.hint:
201 ui.warn(_("(%s)\n") % inst.hint)
201 ui.warn(_("(%s)\n") % inst.hint)
202 except ImportError as inst:
202 except ImportError as inst:
203 ui.warn(_("abort: %s!\n") % inst)
203 ui.warn(_("abort: %s!\n") % inst)
204 m = str(inst).split()[-1]
204 m = str(inst).split()[-1]
205 if m in "mpatch bdiff".split():
205 if m in "mpatch bdiff".split():
206 ui.warn(_("(did you forget to compile extensions?)\n"))
206 ui.warn(_("(did you forget to compile extensions?)\n"))
207 elif m in "zlib".split():
207 elif m in "zlib".split():
208 ui.warn(_("(is your Python install correct?)\n"))
208 ui.warn(_("(is your Python install correct?)\n"))
209 except IOError as inst:
209 except IOError as inst:
210 if util.safehasattr(inst, "code"):
210 if util.safehasattr(inst, "code"):
211 ui.warn(_("abort: %s\n") % inst)
211 ui.warn(_("abort: %s\n") % inst)
212 elif util.safehasattr(inst, "reason"):
212 elif util.safehasattr(inst, "reason"):
213 try: # usually it is in the form (errno, strerror)
213 try: # usually it is in the form (errno, strerror)
214 reason = inst.reason.args[1]
214 reason = inst.reason.args[1]
215 except (AttributeError, IndexError):
215 except (AttributeError, IndexError):
216 # it might be anything, for example a string
216 # it might be anything, for example a string
217 reason = inst.reason
217 reason = inst.reason
218 if isinstance(reason, unicode):
218 if isinstance(reason, unicode):
219 # SSLError of Python 2.7.9 contains a unicode
219 # SSLError of Python 2.7.9 contains a unicode
220 reason = encoding.unitolocal(reason)
220 reason = encoding.unitolocal(reason)
221 ui.warn(_("abort: error: %s\n") % reason)
221 ui.warn(_("abort: error: %s\n") % reason)
222 elif (util.safehasattr(inst, "args")
222 elif (util.safehasattr(inst, "args")
223 and inst.args and inst.args[0] == errno.EPIPE):
223 and inst.args and inst.args[0] == errno.EPIPE):
224 pass
224 pass
225 elif getattr(inst, "strerror", None):
225 elif getattr(inst, "strerror", None):
226 if getattr(inst, "filename", None):
226 if getattr(inst, "filename", None):
227 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
227 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
228 else:
228 else:
229 ui.warn(_("abort: %s\n") % inst.strerror)
229 ui.warn(_("abort: %s\n") % inst.strerror)
230 else:
230 else:
231 raise
231 raise
232 except OSError as inst:
232 except OSError as inst:
233 if getattr(inst, "filename", None) is not None:
233 if getattr(inst, "filename", None) is not None:
234 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
234 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
235 else:
235 else:
236 ui.warn(_("abort: %s\n") % inst.strerror)
236 ui.warn(_("abort: %s\n") % inst.strerror)
237 except MemoryError:
237 except MemoryError:
238 ui.warn(_("abort: out of memory\n"))
238 ui.warn(_("abort: out of memory\n"))
239 except SystemExit as inst:
239 except SystemExit as inst:
240 # Commands shouldn't sys.exit directly, but give a return code.
240 # Commands shouldn't sys.exit directly, but give a return code.
241 # Just in case catch this and and pass exit code to caller.
241 # Just in case catch this and and pass exit code to caller.
242 return inst.code
242 return inst.code
243 except socket.error as inst:
243 except socket.error as inst:
244 ui.warn(_("abort: %s\n") % inst.args[-1])
244 ui.warn(_("abort: %s\n") % inst.args[-1])
245
245
246 return -1
246 return -1
247
247
248 def checknewlabel(repo, lbl, kind):
248 def checknewlabel(repo, lbl, kind):
249 # Do not use the "kind" parameter in ui output.
249 # Do not use the "kind" parameter in ui output.
250 # It makes strings difficult to translate.
250 # It makes strings difficult to translate.
251 if lbl in ['tip', '.', 'null']:
251 if lbl in ['tip', '.', 'null']:
252 raise error.Abort(_("the name '%s' is reserved") % lbl)
252 raise error.Abort(_("the name '%s' is reserved") % lbl)
253 for c in (':', '\0', '\n', '\r'):
253 for c in (':', '\0', '\n', '\r'):
254 if c in lbl:
254 if c in lbl:
255 raise error.Abort(_("%r cannot be used in a name") % c)
255 raise error.Abort(_("%r cannot be used in a name") % c)
256 try:
256 try:
257 int(lbl)
257 int(lbl)
258 raise error.Abort(_("cannot use an integer as a name"))
258 raise error.Abort(_("cannot use an integer as a name"))
259 except ValueError:
259 except ValueError:
260 pass
260 pass
261
261
262 def checkfilename(f):
262 def checkfilename(f):
263 '''Check that the filename f is an acceptable filename for a tracked file'''
263 '''Check that the filename f is an acceptable filename for a tracked file'''
264 if '\r' in f or '\n' in f:
264 if '\r' in f or '\n' in f:
265 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
265 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
266
266
267 def checkportable(ui, f):
267 def checkportable(ui, f):
268 '''Check if filename f is portable and warn or abort depending on config'''
268 '''Check if filename f is portable and warn or abort depending on config'''
269 checkfilename(f)
269 checkfilename(f)
270 abort, warn = checkportabilityalert(ui)
270 abort, warn = checkportabilityalert(ui)
271 if abort or warn:
271 if abort or warn:
272 msg = util.checkwinfilename(f)
272 msg = util.checkwinfilename(f)
273 if msg:
273 if msg:
274 msg = "%s: %r" % (msg, f)
274 msg = "%s: %r" % (msg, f)
275 if abort:
275 if abort:
276 raise error.Abort(msg)
276 raise error.Abort(msg)
277 ui.warn(_("warning: %s\n") % msg)
277 ui.warn(_("warning: %s\n") % msg)
278
278
279 def checkportabilityalert(ui):
279 def checkportabilityalert(ui):
280 '''check if the user's config requests nothing, a warning, or abort for
280 '''check if the user's config requests nothing, a warning, or abort for
281 non-portable filenames'''
281 non-portable filenames'''
282 val = ui.config('ui', 'portablefilenames', 'warn')
282 val = ui.config('ui', 'portablefilenames', 'warn')
283 lval = val.lower()
283 lval = val.lower()
284 bval = util.parsebool(val)
284 bval = util.parsebool(val)
285 abort = pycompat.osname == 'nt' or lval == 'abort'
285 abort = pycompat.osname == 'nt' or lval == 'abort'
286 warn = bval or lval == 'warn'
286 warn = bval or lval == 'warn'
287 if bval is None and not (warn or abort or lval == 'ignore'):
287 if bval is None and not (warn or abort or lval == 'ignore'):
288 raise error.ConfigError(
288 raise error.ConfigError(
289 _("ui.portablefilenames value is invalid ('%s')") % val)
289 _("ui.portablefilenames value is invalid ('%s')") % val)
290 return abort, warn
290 return abort, warn
291
291
292 class casecollisionauditor(object):
292 class casecollisionauditor(object):
293 def __init__(self, ui, abort, dirstate):
293 def __init__(self, ui, abort, dirstate):
294 self._ui = ui
294 self._ui = ui
295 self._abort = abort
295 self._abort = abort
296 allfiles = '\0'.join(dirstate._map)
296 allfiles = '\0'.join(dirstate._map)
297 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
297 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
298 self._dirstate = dirstate
298 self._dirstate = dirstate
299 # The purpose of _newfiles is so that we don't complain about
299 # The purpose of _newfiles is so that we don't complain about
300 # case collisions if someone were to call this object with the
300 # case collisions if someone were to call this object with the
301 # same filename twice.
301 # same filename twice.
302 self._newfiles = set()
302 self._newfiles = set()
303
303
304 def __call__(self, f):
304 def __call__(self, f):
305 if f in self._newfiles:
305 if f in self._newfiles:
306 return
306 return
307 fl = encoding.lower(f)
307 fl = encoding.lower(f)
308 if fl in self._loweredfiles and f not in self._dirstate:
308 if fl in self._loweredfiles and f not in self._dirstate:
309 msg = _('possible case-folding collision for %s') % f
309 msg = _('possible case-folding collision for %s') % f
310 if self._abort:
310 if self._abort:
311 raise error.Abort(msg)
311 raise error.Abort(msg)
312 self._ui.warn(_("warning: %s\n") % msg)
312 self._ui.warn(_("warning: %s\n") % msg)
313 self._loweredfiles.add(fl)
313 self._loweredfiles.add(fl)
314 self._newfiles.add(f)
314 self._newfiles.add(f)
315
315
316 def filteredhash(repo, maxrev):
316 def filteredhash(repo, maxrev):
317 """build hash of filtered revisions in the current repoview.
317 """build hash of filtered revisions in the current repoview.
318
318
319 Multiple caches perform up-to-date validation by checking that the
319 Multiple caches perform up-to-date validation by checking that the
320 tiprev and tipnode stored in the cache file match the current repository.
320 tiprev and tipnode stored in the cache file match the current repository.
321 However, this is not sufficient for validating repoviews because the set
321 However, this is not sufficient for validating repoviews because the set
322 of revisions in the view may change without the repository tiprev and
322 of revisions in the view may change without the repository tiprev and
323 tipnode changing.
323 tipnode changing.
324
324
325 This function hashes all the revs filtered from the view and returns
325 This function hashes all the revs filtered from the view and returns
326 that SHA-1 digest.
326 that SHA-1 digest.
327 """
327 """
328 cl = repo.changelog
328 cl = repo.changelog
329 if not cl.filteredrevs:
329 if not cl.filteredrevs:
330 return None
330 return None
331 key = None
331 key = None
332 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
332 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
333 if revs:
333 if revs:
334 s = hashlib.sha1()
334 s = hashlib.sha1()
335 for rev in revs:
335 for rev in revs:
336 s.update('%d;' % rev)
336 s.update('%d;' % rev)
337 key = s.digest()
337 key = s.digest()
338 return key
338 return key
339
339
340 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
340 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
341 '''yield every hg repository under path, always recursively.
341 '''yield every hg repository under path, always recursively.
342 The recurse flag will only control recursion into repo working dirs'''
342 The recurse flag will only control recursion into repo working dirs'''
343 def errhandler(err):
343 def errhandler(err):
344 if err.filename == path:
344 if err.filename == path:
345 raise err
345 raise err
346 samestat = getattr(os.path, 'samestat', None)
346 samestat = getattr(os.path, 'samestat', None)
347 if followsym and samestat is not None:
347 if followsym and samestat is not None:
348 def adddir(dirlst, dirname):
348 def adddir(dirlst, dirname):
349 match = False
349 match = False
350 dirstat = os.stat(dirname)
350 dirstat = os.stat(dirname)
351 for lstdirstat in dirlst:
351 for lstdirstat in dirlst:
352 if samestat(dirstat, lstdirstat):
352 if samestat(dirstat, lstdirstat):
353 match = True
353 match = True
354 break
354 break
355 if not match:
355 if not match:
356 dirlst.append(dirstat)
356 dirlst.append(dirstat)
357 return not match
357 return not match
358 else:
358 else:
359 followsym = False
359 followsym = False
360
360
361 if (seen_dirs is None) and followsym:
361 if (seen_dirs is None) and followsym:
362 seen_dirs = []
362 seen_dirs = []
363 adddir(seen_dirs, path)
363 adddir(seen_dirs, path)
364 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
364 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
365 dirs.sort()
365 dirs.sort()
366 if '.hg' in dirs:
366 if '.hg' in dirs:
367 yield root # found a repository
367 yield root # found a repository
368 qroot = os.path.join(root, '.hg', 'patches')
368 qroot = os.path.join(root, '.hg', 'patches')
369 if os.path.isdir(os.path.join(qroot, '.hg')):
369 if os.path.isdir(os.path.join(qroot, '.hg')):
370 yield qroot # we have a patch queue repo here
370 yield qroot # we have a patch queue repo here
371 if recurse:
371 if recurse:
372 # avoid recursing inside the .hg directory
372 # avoid recursing inside the .hg directory
373 dirs.remove('.hg')
373 dirs.remove('.hg')
374 else:
374 else:
375 dirs[:] = [] # don't descend further
375 dirs[:] = [] # don't descend further
376 elif followsym:
376 elif followsym:
377 newdirs = []
377 newdirs = []
378 for d in dirs:
378 for d in dirs:
379 fname = os.path.join(root, d)
379 fname = os.path.join(root, d)
380 if adddir(seen_dirs, fname):
380 if adddir(seen_dirs, fname):
381 if os.path.islink(fname):
381 if os.path.islink(fname):
382 for hgname in walkrepos(fname, True, seen_dirs):
382 for hgname in walkrepos(fname, True, seen_dirs):
383 yield hgname
383 yield hgname
384 else:
384 else:
385 newdirs.append(d)
385 newdirs.append(d)
386 dirs[:] = newdirs
386 dirs[:] = newdirs
387
387
388 def binnode(ctx):
388 def binnode(ctx):
389 """Return binary node id for a given basectx"""
389 """Return binary node id for a given basectx"""
390 node = ctx.node()
390 node = ctx.node()
391 if node is None:
391 if node is None:
392 return wdirid
392 return wdirid
393 return node
393 return node
394
394
395 def intrev(ctx):
395 def intrev(ctx):
396 """Return integer for a given basectx that can be used in comparison or
396 """Return integer for a given basectx that can be used in comparison or
397 arithmetic operation"""
397 arithmetic operation"""
398 rev = ctx.rev()
398 rev = ctx.rev()
399 if rev is None:
399 if rev is None:
400 return wdirrev
400 return wdirrev
401 return rev
401 return rev
402
402
403 def revsingle(repo, revspec, default='.'):
403 def revsingle(repo, revspec, default='.'):
404 if not revspec and revspec != 0:
404 if not revspec and revspec != 0:
405 return repo[default]
405 return repo[default]
406
406
407 l = revrange(repo, [revspec])
407 l = revrange(repo, [revspec])
408 if not l:
408 if not l:
409 raise error.Abort(_('empty revision set'))
409 raise error.Abort(_('empty revision set'))
410 return repo[l.last()]
410 return repo[l.last()]
411
411
412 def _pairspec(revspec):
412 def _pairspec(revspec):
413 tree = revsetlang.parse(revspec)
413 tree = revsetlang.parse(revspec)
414 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
414 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
415
415
416 def revpair(repo, revs):
416 def revpair(repo, revs):
417 if not revs:
417 if not revs:
418 return repo.dirstate.p1(), None
418 return repo.dirstate.p1(), None
419
419
420 l = revrange(repo, revs)
420 l = revrange(repo, revs)
421
421
422 if not l:
422 if not l:
423 first = second = None
423 first = second = None
424 elif l.isascending():
424 elif l.isascending():
425 first = l.min()
425 first = l.min()
426 second = l.max()
426 second = l.max()
427 elif l.isdescending():
427 elif l.isdescending():
428 first = l.max()
428 first = l.max()
429 second = l.min()
429 second = l.min()
430 else:
430 else:
431 first = l.first()
431 first = l.first()
432 second = l.last()
432 second = l.last()
433
433
434 if first is None:
434 if first is None:
435 raise error.Abort(_('empty revision range'))
435 raise error.Abort(_('empty revision range'))
436 if (first == second and len(revs) >= 2
436 if (first == second and len(revs) >= 2
437 and not all(revrange(repo, [r]) for r in revs)):
437 and not all(revrange(repo, [r]) for r in revs)):
438 raise error.Abort(_('empty revision on one side of range'))
438 raise error.Abort(_('empty revision on one side of range'))
439
439
440 # if top-level is range expression, the result must always be a pair
440 # if top-level is range expression, the result must always be a pair
441 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
441 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
442 return repo.lookup(first), None
442 return repo.lookup(first), None
443
443
444 return repo.lookup(first), repo.lookup(second)
444 return repo.lookup(first), repo.lookup(second)
445
445
446 def revrange(repo, specs):
446 def revrange(repo, specs):
447 """Execute 1 to many revsets and return the union.
447 """Execute 1 to many revsets and return the union.
448
448
449 This is the preferred mechanism for executing revsets using user-specified
449 This is the preferred mechanism for executing revsets using user-specified
450 config options, such as revset aliases.
450 config options, such as revset aliases.
451
451
452 The revsets specified by ``specs`` will be executed via a chained ``OR``
452 The revsets specified by ``specs`` will be executed via a chained ``OR``
453 expression. If ``specs`` is empty, an empty result is returned.
453 expression. If ``specs`` is empty, an empty result is returned.
454
454
455 ``specs`` can contain integers, in which case they are assumed to be
455 ``specs`` can contain integers, in which case they are assumed to be
456 revision numbers.
456 revision numbers.
457
457
458 It is assumed the revsets are already formatted. If you have arguments
458 It is assumed the revsets are already formatted. If you have arguments
459 that need to be expanded in the revset, call ``revsetlang.formatspec()``
459 that need to be expanded in the revset, call ``revsetlang.formatspec()``
460 and pass the result as an element of ``specs``.
460 and pass the result as an element of ``specs``.
461
461
462 Specifying a single revset is allowed.
462 Specifying a single revset is allowed.
463
463
464 Returns a ``revset.abstractsmartset`` which is a list-like interface over
464 Returns a ``revset.abstractsmartset`` which is a list-like interface over
465 integer revisions.
465 integer revisions.
466 """
466 """
467 allspecs = []
467 allspecs = []
468 for spec in specs:
468 for spec in specs:
469 if isinstance(spec, int):
469 if isinstance(spec, int):
470 spec = revsetlang.formatspec('rev(%d)', spec)
470 spec = revsetlang.formatspec('rev(%d)', spec)
471 allspecs.append(spec)
471 allspecs.append(spec)
472 return repo.anyrevs(allspecs, user=True)
472 return repo.anyrevs(allspecs, user=True)
473
473
474 def meaningfulparents(repo, ctx):
474 def meaningfulparents(repo, ctx):
475 """Return list of meaningful (or all if debug) parentrevs for rev.
475 """Return list of meaningful (or all if debug) parentrevs for rev.
476
476
477 For merges (two non-nullrev revisions) both parents are meaningful.
477 For merges (two non-nullrev revisions) both parents are meaningful.
478 Otherwise the first parent revision is considered meaningful if it
478 Otherwise the first parent revision is considered meaningful if it
479 is not the preceding revision.
479 is not the preceding revision.
480 """
480 """
481 parents = ctx.parents()
481 parents = ctx.parents()
482 if len(parents) > 1:
482 if len(parents) > 1:
483 return parents
483 return parents
484 if repo.ui.debugflag:
484 if repo.ui.debugflag:
485 return [parents[0], repo['null']]
485 return [parents[0], repo['null']]
486 if parents[0].rev() >= intrev(ctx) - 1:
486 if parents[0].rev() >= intrev(ctx) - 1:
487 return []
487 return []
488 return parents
488 return parents
489
489
490 def expandpats(pats):
490 def expandpats(pats):
491 '''Expand bare globs when running on windows.
491 '''Expand bare globs when running on windows.
492 On posix we assume it already has already been done by sh.'''
492 On posix we assume it already has already been done by sh.'''
493 if not util.expandglobs:
493 if not util.expandglobs:
494 return list(pats)
494 return list(pats)
495 ret = []
495 ret = []
496 for kindpat in pats:
496 for kindpat in pats:
497 kind, pat = matchmod._patsplit(kindpat, None)
497 kind, pat = matchmod._patsplit(kindpat, None)
498 if kind is None:
498 if kind is None:
499 try:
499 try:
500 globbed = glob.glob(pat)
500 globbed = glob.glob(pat)
501 except re.error:
501 except re.error:
502 globbed = [pat]
502 globbed = [pat]
503 if globbed:
503 if globbed:
504 ret.extend(globbed)
504 ret.extend(globbed)
505 continue
505 continue
506 ret.append(kindpat)
506 ret.append(kindpat)
507 return ret
507 return ret
508
508
509 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
509 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
510 badfn=None):
510 badfn=None):
511 '''Return a matcher and the patterns that were used.
511 '''Return a matcher and the patterns that were used.
512 The matcher will warn about bad matches, unless an alternate badfn callback
512 The matcher will warn about bad matches, unless an alternate badfn callback
513 is provided.'''
513 is provided.'''
514 if pats == ("",):
514 if pats == ("",):
515 pats = []
515 pats = []
516 if opts is None:
516 if opts is None:
517 opts = {}
517 opts = {}
518 if not globbed and default == 'relpath':
518 if not globbed and default == 'relpath':
519 pats = expandpats(pats or [])
519 pats = expandpats(pats or [])
520
520
521 def bad(f, msg):
521 def bad(f, msg):
522 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
522 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
523
523
524 if badfn is None:
524 if badfn is None:
525 badfn = bad
525 badfn = bad
526
526
527 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
527 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
528 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
528 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
529
529
530 if m.always():
530 if m.always():
531 pats = []
531 pats = []
532 return m, pats
532 return m, pats
533
533
534 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
534 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
535 badfn=None):
535 badfn=None):
536 '''Return a matcher that will warn about bad matches.'''
536 '''Return a matcher that will warn about bad matches.'''
537 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
537 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
538
538
539 def matchall(repo):
539 def matchall(repo):
540 '''Return a matcher that will efficiently match everything.'''
540 '''Return a matcher that will efficiently match everything.'''
541 return matchmod.always(repo.root, repo.getcwd())
541 return matchmod.always(repo.root, repo.getcwd())
542
542
543 def matchfiles(repo, files, badfn=None):
543 def matchfiles(repo, files, badfn=None):
544 '''Return a matcher that will efficiently match exactly these files.'''
544 '''Return a matcher that will efficiently match exactly these files.'''
545 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
545 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
546
546
547 def origpath(ui, repo, filepath):
547 def origpath(ui, repo, filepath):
548 '''customize where .orig files are created
548 '''customize where .orig files are created
549
549
550 Fetch user defined path from config file: [ui] origbackuppath = <path>
550 Fetch user defined path from config file: [ui] origbackuppath = <path>
551 Fall back to default (filepath) if not specified
551 Fall back to default (filepath) if not specified
552 '''
552 '''
553 origbackuppath = ui.config('ui', 'origbackuppath', None)
553 origbackuppath = ui.config('ui', 'origbackuppath', None)
554 if origbackuppath is None:
554 if origbackuppath is None:
555 return filepath + ".orig"
555 return filepath + ".orig"
556
556
557 filepathfromroot = os.path.relpath(filepath, start=repo.root)
557 filepathfromroot = os.path.relpath(filepath, start=repo.root)
558 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
558 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
559
559
560 origbackupdir = repo.vfs.dirname(fullorigpath)
560 origbackupdir = repo.vfs.dirname(fullorigpath)
561 if not repo.vfs.exists(origbackupdir):
561 if not repo.vfs.exists(origbackupdir):
562 ui.note(_('creating directory: %s\n') % origbackupdir)
562 ui.note(_('creating directory: %s\n') % origbackupdir)
563 util.makedirs(origbackupdir)
563 util.makedirs(origbackupdir)
564
564
565 return fullorigpath + ".orig"
565 return fullorigpath + ".orig"
566
566
567 def cleanupnodes(repo, mapping, operation):
567 def cleanupnodes(repo, mapping, operation):
568 """do common cleanups when old nodes are replaced by new nodes
568 """do common cleanups when old nodes are replaced by new nodes
569
569
570 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
570 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
571 (we might also want to move working directory parent in the future)
571 (we might also want to move working directory parent in the future)
572
572
573 mapping is {oldnode: [newnode]} or a iterable of nodes if they do not have
573 mapping is {oldnode: [newnode]} or a iterable of nodes if they do not have
574 replacements. operation is a string, like "rebase".
574 replacements. operation is a string, like "rebase".
575 """
575 """
576 if not util.safehasattr(mapping, 'items'):
576 if not util.safehasattr(mapping, 'items'):
577 mapping = {n: () for n in mapping}
577 mapping = {n: () for n in mapping}
578
578
579 with repo.transaction('cleanup') as tr:
579 with repo.transaction('cleanup') as tr:
580 # Move bookmarks
580 # Move bookmarks
581 bmarks = repo._bookmarks
581 bmarks = repo._bookmarks
582 bmarkchanged = False
582 bmarkchanged = False
583 for oldnode, newnodes in mapping.items():
583 for oldnode, newnodes in mapping.items():
584 oldbmarks = repo.nodebookmarks(oldnode)
584 oldbmarks = repo.nodebookmarks(oldnode)
585 if not oldbmarks:
585 if not oldbmarks:
586 continue
586 continue
587 bmarkchanged = True
587 bmarkchanged = True
588 if len(newnodes) > 1:
588 if len(newnodes) > 1:
589 heads = list(repo.set('heads(%ln)', newnodes))
589 heads = list(repo.set('heads(%ln)', newnodes))
590 if len(heads) != 1:
590 if len(heads) != 1:
591 raise error.ProgrammingError(
591 raise error.ProgrammingError(
592 'cannot figure out bookmark movement')
592 'cannot figure out bookmark movement')
593 newnode = heads[0].node()
593 newnode = heads[0].node()
594 elif len(newnodes) == 0:
594 elif len(newnodes) == 0:
595 # move bookmark backwards
595 # move bookmark backwards
596 roots = list(repo.set('max((::%n) - %ln)', oldnode,
596 roots = list(repo.set('max((::%n) - %ln)', oldnode,
597 list(mapping)))
597 list(mapping)))
598 if roots:
598 if roots:
599 newnode = roots[0].node()
599 newnode = roots[0].node()
600 else:
600 else:
601 newnode = nullid
601 newnode = nullid
602 else:
602 else:
603 newnode = newnodes[0]
603 newnode = newnodes[0]
604 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
604 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
605 (oldbmarks, hex(oldnode), hex(newnode)))
605 (oldbmarks, hex(oldnode), hex(newnode)))
606 for name in oldbmarks:
606 for name in oldbmarks:
607 bmarks[name] = newnode
607 bmarks[name] = newnode
608 if bmarkchanged:
608 if bmarkchanged:
609 bmarks.recordchange(tr)
609 bmarks.recordchange(tr)
610
610
611 # Obsolete or strip nodes
611 # Obsolete or strip nodes
612 if obsolete.isenabled(repo, obsolete.createmarkersopt):
612 if obsolete.isenabled(repo, obsolete.createmarkersopt):
613 # If a node is already obsoleted, and we want to obsolete it
613 # If a node is already obsoleted, and we want to obsolete it
614 # without a successor, skip that obssolete request since it's
614 # without a successor, skip that obssolete request since it's
615 # unnecessary. That's the "if s or not isobs(n)" check below.
615 # unnecessary. That's the "if s or not isobs(n)" check below.
616 # Also sort the node in topology order, that might be useful for
616 # Also sort the node in topology order, that might be useful for
617 # some obsstore logic.
617 # some obsstore logic.
618 # NOTE: the filtering and sorting might belong to createmarkers.
618 # NOTE: the filtering and sorting might belong to createmarkers.
619 isobs = repo.obsstore.successors.__contains__
619 isobs = repo.obsstore.successors.__contains__
620 sortfunc = lambda ns: repo.changelog.rev(ns[0])
620 sortfunc = lambda ns: repo.changelog.rev(ns[0])
621 rels = [(repo[n], (repo[m] for m in s))
621 rels = [(repo[n], (repo[m] for m in s))
622 for n, s in sorted(mapping.items(), key=sortfunc)
622 for n, s in sorted(mapping.items(), key=sortfunc)
623 if s or not isobs(n)]
623 if s or not isobs(n)]
624 obsolete.createmarkers(repo, rels, operation=operation)
624 obsolete.createmarkers(repo, rels, operation=operation)
625 else:
625 else:
626 from . import repair # avoid import cycle
626 from . import repair # avoid import cycle
627 repair.delayedstrip(repo.ui, repo, list(mapping), operation)
627 repair.delayedstrip(repo.ui, repo, list(mapping), operation)
628
628
629 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
629 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
630 if opts is None:
630 if opts is None:
631 opts = {}
631 opts = {}
632 m = matcher
632 m = matcher
633 if dry_run is None:
633 if dry_run is None:
634 dry_run = opts.get('dry_run')
634 dry_run = opts.get('dry_run')
635 if similarity is None:
635 if similarity is None:
636 similarity = float(opts.get('similarity') or 0)
636 similarity = float(opts.get('similarity') or 0)
637
637
638 ret = 0
638 ret = 0
639 join = lambda f: os.path.join(prefix, f)
639 join = lambda f: os.path.join(prefix, f)
640
640
641 wctx = repo[None]
641 wctx = repo[None]
642 for subpath in sorted(wctx.substate):
642 for subpath in sorted(wctx.substate):
643 submatch = matchmod.subdirmatcher(subpath, m)
643 submatch = matchmod.subdirmatcher(subpath, m)
644 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
644 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
645 sub = wctx.sub(subpath)
645 sub = wctx.sub(subpath)
646 try:
646 try:
647 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
647 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
648 ret = 1
648 ret = 1
649 except error.LookupError:
649 except error.LookupError:
650 repo.ui.status(_("skipping missing subrepository: %s\n")
650 repo.ui.status(_("skipping missing subrepository: %s\n")
651 % join(subpath))
651 % join(subpath))
652
652
653 rejected = []
653 rejected = []
654 def badfn(f, msg):
654 def badfn(f, msg):
655 if f in m.files():
655 if f in m.files():
656 m.bad(f, msg)
656 m.bad(f, msg)
657 rejected.append(f)
657 rejected.append(f)
658
658
659 badmatch = matchmod.badmatch(m, badfn)
659 badmatch = matchmod.badmatch(m, badfn)
660 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
660 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
661 badmatch)
661 badmatch)
662
662
663 unknownset = set(unknown + forgotten)
663 unknownset = set(unknown + forgotten)
664 toprint = unknownset.copy()
664 toprint = unknownset.copy()
665 toprint.update(deleted)
665 toprint.update(deleted)
666 for abs in sorted(toprint):
666 for abs in sorted(toprint):
667 if repo.ui.verbose or not m.exact(abs):
667 if repo.ui.verbose or not m.exact(abs):
668 if abs in unknownset:
668 if abs in unknownset:
669 status = _('adding %s\n') % m.uipath(abs)
669 status = _('adding %s\n') % m.uipath(abs)
670 else:
670 else:
671 status = _('removing %s\n') % m.uipath(abs)
671 status = _('removing %s\n') % m.uipath(abs)
672 repo.ui.status(status)
672 repo.ui.status(status)
673
673
674 renames = _findrenames(repo, m, added + unknown, removed + deleted,
674 renames = _findrenames(repo, m, added + unknown, removed + deleted,
675 similarity)
675 similarity)
676
676
677 if not dry_run:
677 if not dry_run:
678 _markchanges(repo, unknown + forgotten, deleted, renames)
678 _markchanges(repo, unknown + forgotten, deleted, renames)
679
679
680 for f in rejected:
680 for f in rejected:
681 if f in m.files():
681 if f in m.files():
682 return 1
682 return 1
683 return ret
683 return ret
684
684
685 def marktouched(repo, files, similarity=0.0):
685 def marktouched(repo, files, similarity=0.0):
686 '''Assert that files have somehow been operated upon. files are relative to
686 '''Assert that files have somehow been operated upon. files are relative to
687 the repo root.'''
687 the repo root.'''
688 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
688 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
689 rejected = []
689 rejected = []
690
690
691 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
691 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
692
692
693 if repo.ui.verbose:
693 if repo.ui.verbose:
694 unknownset = set(unknown + forgotten)
694 unknownset = set(unknown + forgotten)
695 toprint = unknownset.copy()
695 toprint = unknownset.copy()
696 toprint.update(deleted)
696 toprint.update(deleted)
697 for abs in sorted(toprint):
697 for abs in sorted(toprint):
698 if abs in unknownset:
698 if abs in unknownset:
699 status = _('adding %s\n') % abs
699 status = _('adding %s\n') % abs
700 else:
700 else:
701 status = _('removing %s\n') % abs
701 status = _('removing %s\n') % abs
702 repo.ui.status(status)
702 repo.ui.status(status)
703
703
704 renames = _findrenames(repo, m, added + unknown, removed + deleted,
704 renames = _findrenames(repo, m, added + unknown, removed + deleted,
705 similarity)
705 similarity)
706
706
707 _markchanges(repo, unknown + forgotten, deleted, renames)
707 _markchanges(repo, unknown + forgotten, deleted, renames)
708
708
709 for f in rejected:
709 for f in rejected:
710 if f in m.files():
710 if f in m.files():
711 return 1
711 return 1
712 return 0
712 return 0
713
713
714 def _interestingfiles(repo, matcher):
714 def _interestingfiles(repo, matcher):
715 '''Walk dirstate with matcher, looking for files that addremove would care
715 '''Walk dirstate with matcher, looking for files that addremove would care
716 about.
716 about.
717
717
718 This is different from dirstate.status because it doesn't care about
718 This is different from dirstate.status because it doesn't care about
719 whether files are modified or clean.'''
719 whether files are modified or clean.'''
720 added, unknown, deleted, removed, forgotten = [], [], [], [], []
720 added, unknown, deleted, removed, forgotten = [], [], [], [], []
721 audit_path = pathutil.pathauditor(repo.root)
721 audit_path = pathutil.pathauditor(repo.root)
722
722
723 ctx = repo[None]
723 ctx = repo[None]
724 dirstate = repo.dirstate
724 dirstate = repo.dirstate
725 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
725 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
726 full=False)
726 full=False)
727 for abs, st in walkresults.iteritems():
727 for abs, st in walkresults.iteritems():
728 dstate = dirstate[abs]
728 dstate = dirstate[abs]
729 if dstate == '?' and audit_path.check(abs):
729 if dstate == '?' and audit_path.check(abs):
730 unknown.append(abs)
730 unknown.append(abs)
731 elif dstate != 'r' and not st:
731 elif dstate != 'r' and not st:
732 deleted.append(abs)
732 deleted.append(abs)
733 elif dstate == 'r' and st:
733 elif dstate == 'r' and st:
734 forgotten.append(abs)
734 forgotten.append(abs)
735 # for finding renames
735 # for finding renames
736 elif dstate == 'r' and not st:
736 elif dstate == 'r' and not st:
737 removed.append(abs)
737 removed.append(abs)
738 elif dstate == 'a':
738 elif dstate == 'a':
739 added.append(abs)
739 added.append(abs)
740
740
741 return added, unknown, deleted, removed, forgotten
741 return added, unknown, deleted, removed, forgotten
742
742
743 def _findrenames(repo, matcher, added, removed, similarity):
743 def _findrenames(repo, matcher, added, removed, similarity):
744 '''Find renames from removed files to added ones.'''
744 '''Find renames from removed files to added ones.'''
745 renames = {}
745 renames = {}
746 if similarity > 0:
746 if similarity > 0:
747 for old, new, score in similar.findrenames(repo, added, removed,
747 for old, new, score in similar.findrenames(repo, added, removed,
748 similarity):
748 similarity):
749 if (repo.ui.verbose or not matcher.exact(old)
749 if (repo.ui.verbose or not matcher.exact(old)
750 or not matcher.exact(new)):
750 or not matcher.exact(new)):
751 repo.ui.status(_('recording removal of %s as rename to %s '
751 repo.ui.status(_('recording removal of %s as rename to %s '
752 '(%d%% similar)\n') %
752 '(%d%% similar)\n') %
753 (matcher.rel(old), matcher.rel(new),
753 (matcher.rel(old), matcher.rel(new),
754 score * 100))
754 score * 100))
755 renames[new] = old
755 renames[new] = old
756 return renames
756 return renames
757
757
758 def _markchanges(repo, unknown, deleted, renames):
758 def _markchanges(repo, unknown, deleted, renames):
759 '''Marks the files in unknown as added, the files in deleted as removed,
759 '''Marks the files in unknown as added, the files in deleted as removed,
760 and the files in renames as copied.'''
760 and the files in renames as copied.'''
761 wctx = repo[None]
761 wctx = repo[None]
762 with repo.wlock():
762 with repo.wlock():
763 wctx.forget(deleted)
763 wctx.forget(deleted)
764 wctx.add(unknown)
764 wctx.add(unknown)
765 for new, old in renames.iteritems():
765 for new, old in renames.iteritems():
766 wctx.copy(old, new)
766 wctx.copy(old, new)
767
767
768 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
768 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
769 """Update the dirstate to reflect the intent of copying src to dst. For
769 """Update the dirstate to reflect the intent of copying src to dst. For
770 different reasons it might not end with dst being marked as copied from src.
770 different reasons it might not end with dst being marked as copied from src.
771 """
771 """
772 origsrc = repo.dirstate.copied(src) or src
772 origsrc = repo.dirstate.copied(src) or src
773 if dst == origsrc: # copying back a copy?
773 if dst == origsrc: # copying back a copy?
774 if repo.dirstate[dst] not in 'mn' and not dryrun:
774 if repo.dirstate[dst] not in 'mn' and not dryrun:
775 repo.dirstate.normallookup(dst)
775 repo.dirstate.normallookup(dst)
776 else:
776 else:
777 if repo.dirstate[origsrc] == 'a' and origsrc == src:
777 if repo.dirstate[origsrc] == 'a' and origsrc == src:
778 if not ui.quiet:
778 if not ui.quiet:
779 ui.warn(_("%s has not been committed yet, so no copy "
779 ui.warn(_("%s has not been committed yet, so no copy "
780 "data will be stored for %s.\n")
780 "data will be stored for %s.\n")
781 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
781 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
782 if repo.dirstate[dst] in '?r' and not dryrun:
782 if repo.dirstate[dst] in '?r' and not dryrun:
783 wctx.add([dst])
783 wctx.add([dst])
784 elif not dryrun:
784 elif not dryrun:
785 wctx.copy(origsrc, dst)
785 wctx.copy(origsrc, dst)
786
786
787 def readrequires(opener, supported):
787 def readrequires(opener, supported):
788 '''Reads and parses .hg/requires and checks if all entries found
788 '''Reads and parses .hg/requires and checks if all entries found
789 are in the list of supported features.'''
789 are in the list of supported features.'''
790 requirements = set(opener.read("requires").splitlines())
790 requirements = set(opener.read("requires").splitlines())
791 missings = []
791 missings = []
792 for r in requirements:
792 for r in requirements:
793 if r not in supported:
793 if r not in supported:
794 if not r or not r[0].isalnum():
794 if not r or not r[0].isalnum():
795 raise error.RequirementError(_(".hg/requires file is corrupt"))
795 raise error.RequirementError(_(".hg/requires file is corrupt"))
796 missings.append(r)
796 missings.append(r)
797 missings.sort()
797 missings.sort()
798 if missings:
798 if missings:
799 raise error.RequirementError(
799 raise error.RequirementError(
800 _("repository requires features unknown to this Mercurial: %s")
800 _("repository requires features unknown to this Mercurial: %s")
801 % " ".join(missings),
801 % " ".join(missings),
802 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
802 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
803 " for more information"))
803 " for more information"))
804 return requirements
804 return requirements
805
805
806 def writerequires(opener, requirements):
806 def writerequires(opener, requirements):
807 with opener('requires', 'w') as fp:
807 with opener('requires', 'w') as fp:
808 for r in sorted(requirements):
808 for r in sorted(requirements):
809 fp.write("%s\n" % r)
809 fp.write("%s\n" % r)
810
810
811 class filecachesubentry(object):
811 class filecachesubentry(object):
812 def __init__(self, path, stat):
812 def __init__(self, path, stat):
813 self.path = path
813 self.path = path
814 self.cachestat = None
814 self.cachestat = None
815 self._cacheable = None
815 self._cacheable = None
816
816
817 if stat:
817 if stat:
818 self.cachestat = filecachesubentry.stat(self.path)
818 self.cachestat = filecachesubentry.stat(self.path)
819
819
820 if self.cachestat:
820 if self.cachestat:
821 self._cacheable = self.cachestat.cacheable()
821 self._cacheable = self.cachestat.cacheable()
822 else:
822 else:
823 # None means we don't know yet
823 # None means we don't know yet
824 self._cacheable = None
824 self._cacheable = None
825
825
826 def refresh(self):
826 def refresh(self):
827 if self.cacheable():
827 if self.cacheable():
828 self.cachestat = filecachesubentry.stat(self.path)
828 self.cachestat = filecachesubentry.stat(self.path)
829
829
830 def cacheable(self):
830 def cacheable(self):
831 if self._cacheable is not None:
831 if self._cacheable is not None:
832 return self._cacheable
832 return self._cacheable
833
833
834 # we don't know yet, assume it is for now
834 # we don't know yet, assume it is for now
835 return True
835 return True
836
836
837 def changed(self):
837 def changed(self):
838 # no point in going further if we can't cache it
838 # no point in going further if we can't cache it
839 if not self.cacheable():
839 if not self.cacheable():
840 return True
840 return True
841
841
842 newstat = filecachesubentry.stat(self.path)
842 newstat = filecachesubentry.stat(self.path)
843
843
844 # we may not know if it's cacheable yet, check again now
844 # we may not know if it's cacheable yet, check again now
845 if newstat and self._cacheable is None:
845 if newstat and self._cacheable is None:
846 self._cacheable = newstat.cacheable()
846 self._cacheable = newstat.cacheable()
847
847
848 # check again
848 # check again
849 if not self._cacheable:
849 if not self._cacheable:
850 return True
850 return True
851
851
852 if self.cachestat != newstat:
852 if self.cachestat != newstat:
853 self.cachestat = newstat
853 self.cachestat = newstat
854 return True
854 return True
855 else:
855 else:
856 return False
856 return False
857
857
858 @staticmethod
858 @staticmethod
859 def stat(path):
859 def stat(path):
860 try:
860 try:
861 return util.cachestat(path)
861 return util.cachestat(path)
862 except OSError as e:
862 except OSError as e:
863 if e.errno != errno.ENOENT:
863 if e.errno != errno.ENOENT:
864 raise
864 raise
865
865
866 class filecacheentry(object):
866 class filecacheentry(object):
867 def __init__(self, paths, stat=True):
867 def __init__(self, paths, stat=True):
868 self._entries = []
868 self._entries = []
869 for path in paths:
869 for path in paths:
870 self._entries.append(filecachesubentry(path, stat))
870 self._entries.append(filecachesubentry(path, stat))
871
871
872 def changed(self):
872 def changed(self):
873 '''true if any entry has changed'''
873 '''true if any entry has changed'''
874 for entry in self._entries:
874 for entry in self._entries:
875 if entry.changed():
875 if entry.changed():
876 return True
876 return True
877 return False
877 return False
878
878
879 def refresh(self):
879 def refresh(self):
880 for entry in self._entries:
880 for entry in self._entries:
881 entry.refresh()
881 entry.refresh()
882
882
883 class filecache(object):
883 class filecache(object):
884 '''A property like decorator that tracks files under .hg/ for updates.
884 '''A property like decorator that tracks files under .hg/ for updates.
885
885
886 Records stat info when called in _filecache.
886 Records stat info when called in _filecache.
887
887
888 On subsequent calls, compares old stat info with new info, and recreates the
888 On subsequent calls, compares old stat info with new info, and recreates the
889 object when any of the files changes, updating the new stat info in
889 object when any of the files changes, updating the new stat info in
890 _filecache.
890 _filecache.
891
891
892 Mercurial either atomic renames or appends for files under .hg,
892 Mercurial either atomic renames or appends for files under .hg,
893 so to ensure the cache is reliable we need the filesystem to be able
893 so to ensure the cache is reliable we need the filesystem to be able
894 to tell us if a file has been replaced. If it can't, we fallback to
894 to tell us if a file has been replaced. If it can't, we fallback to
895 recreating the object on every call (essentially the same behavior as
895 recreating the object on every call (essentially the same behavior as
896 propertycache).
896 propertycache).
897
897
898 '''
898 '''
899 def __init__(self, *paths):
899 def __init__(self, *paths):
900 self.paths = paths
900 self.paths = paths
901
901
902 def join(self, obj, fname):
902 def join(self, obj, fname):
903 """Used to compute the runtime path of a cached file.
903 """Used to compute the runtime path of a cached file.
904
904
905 Users should subclass filecache and provide their own version of this
905 Users should subclass filecache and provide their own version of this
906 function to call the appropriate join function on 'obj' (an instance
906 function to call the appropriate join function on 'obj' (an instance
907 of the class that its member function was decorated).
907 of the class that its member function was decorated).
908 """
908 """
909 raise NotImplementedError
909 raise NotImplementedError
910
910
911 def __call__(self, func):
911 def __call__(self, func):
912 self.func = func
912 self.func = func
913 self.name = func.__name__.encode('ascii')
913 self.name = func.__name__.encode('ascii')
914 return self
914 return self
915
915
916 def __get__(self, obj, type=None):
916 def __get__(self, obj, type=None):
917 # if accessed on the class, return the descriptor itself.
917 # if accessed on the class, return the descriptor itself.
918 if obj is None:
918 if obj is None:
919 return self
919 return self
920 # do we need to check if the file changed?
920 # do we need to check if the file changed?
921 if self.name in obj.__dict__:
921 if self.name in obj.__dict__:
922 assert self.name in obj._filecache, self.name
922 assert self.name in obj._filecache, self.name
923 return obj.__dict__[self.name]
923 return obj.__dict__[self.name]
924
924
925 entry = obj._filecache.get(self.name)
925 entry = obj._filecache.get(self.name)
926
926
927 if entry:
927 if entry:
928 if entry.changed():
928 if entry.changed():
929 entry.obj = self.func(obj)
929 entry.obj = self.func(obj)
930 else:
930 else:
931 paths = [self.join(obj, path) for path in self.paths]
931 paths = [self.join(obj, path) for path in self.paths]
932
932
933 # We stat -before- creating the object so our cache doesn't lie if
933 # We stat -before- creating the object so our cache doesn't lie if
934 # a writer modified between the time we read and stat
934 # a writer modified between the time we read and stat
935 entry = filecacheentry(paths, True)
935 entry = filecacheentry(paths, True)
936 entry.obj = self.func(obj)
936 entry.obj = self.func(obj)
937
937
938 obj._filecache[self.name] = entry
938 obj._filecache[self.name] = entry
939
939
940 obj.__dict__[self.name] = entry.obj
940 obj.__dict__[self.name] = entry.obj
941 return entry.obj
941 return entry.obj
942
942
943 def __set__(self, obj, value):
943 def __set__(self, obj, value):
944 if self.name not in obj._filecache:
944 if self.name not in obj._filecache:
945 # we add an entry for the missing value because X in __dict__
945 # we add an entry for the missing value because X in __dict__
946 # implies X in _filecache
946 # implies X in _filecache
947 paths = [self.join(obj, path) for path in self.paths]
947 paths = [self.join(obj, path) for path in self.paths]
948 ce = filecacheentry(paths, False)
948 ce = filecacheentry(paths, False)
949 obj._filecache[self.name] = ce
949 obj._filecache[self.name] = ce
950 else:
950 else:
951 ce = obj._filecache[self.name]
951 ce = obj._filecache[self.name]
952
952
953 ce.obj = value # update cached copy
953 ce.obj = value # update cached copy
954 obj.__dict__[self.name] = value # update copy returned by obj.x
954 obj.__dict__[self.name] = value # update copy returned by obj.x
955
955
956 def __delete__(self, obj):
956 def __delete__(self, obj):
957 try:
957 try:
958 del obj.__dict__[self.name]
958 del obj.__dict__[self.name]
959 except KeyError:
959 except KeyError:
960 raise AttributeError(self.name)
960 raise AttributeError(self.name)
961
961
962 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
962 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
963 if lock is None:
963 if lock is None:
964 raise error.LockInheritanceContractViolation(
964 raise error.LockInheritanceContractViolation(
965 'lock can only be inherited while held')
965 'lock can only be inherited while held')
966 if environ is None:
966 if environ is None:
967 environ = {}
967 environ = {}
968 with lock.inherit() as locker:
968 with lock.inherit() as locker:
969 environ[envvar] = locker
969 environ[envvar] = locker
970 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
970 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
971
971
972 def wlocksub(repo, cmd, *args, **kwargs):
972 def wlocksub(repo, cmd, *args, **kwargs):
973 """run cmd as a subprocess that allows inheriting repo's wlock
973 """run cmd as a subprocess that allows inheriting repo's wlock
974
974
975 This can only be called while the wlock is held. This takes all the
975 This can only be called while the wlock is held. This takes all the
976 arguments that ui.system does, and returns the exit code of the
976 arguments that ui.system does, and returns the exit code of the
977 subprocess."""
977 subprocess."""
978 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
978 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
979 **kwargs)
979 **kwargs)
980
980
981 def gdinitconfig(ui):
981 def gdinitconfig(ui):
982 """helper function to know if a repo should be created as general delta
982 """helper function to know if a repo should be created as general delta
983 """
983 """
984 # experimental config: format.generaldelta
984 # experimental config: format.generaldelta
985 return (ui.configbool('format', 'generaldelta')
985 return (ui.configbool('format', 'generaldelta')
986 or ui.configbool('format', 'usegeneraldelta', True))
986 or ui.configbool('format', 'usegeneraldelta'))
987
987
988 def gddeltaconfig(ui):
988 def gddeltaconfig(ui):
989 """helper function to know if incoming delta should be optimised
989 """helper function to know if incoming delta should be optimised
990 """
990 """
991 # experimental config: format.generaldelta
991 # experimental config: format.generaldelta
992 return ui.configbool('format', 'generaldelta')
992 return ui.configbool('format', 'generaldelta')
993
993
994 class simplekeyvaluefile(object):
994 class simplekeyvaluefile(object):
995 """A simple file with key=value lines
995 """A simple file with key=value lines
996
996
997 Keys must be alphanumerics and start with a letter, values must not
997 Keys must be alphanumerics and start with a letter, values must not
998 contain '\n' characters"""
998 contain '\n' characters"""
999 firstlinekey = '__firstline'
999 firstlinekey = '__firstline'
1000
1000
1001 def __init__(self, vfs, path, keys=None):
1001 def __init__(self, vfs, path, keys=None):
1002 self.vfs = vfs
1002 self.vfs = vfs
1003 self.path = path
1003 self.path = path
1004
1004
1005 def read(self, firstlinenonkeyval=False):
1005 def read(self, firstlinenonkeyval=False):
1006 """Read the contents of a simple key-value file
1006 """Read the contents of a simple key-value file
1007
1007
1008 'firstlinenonkeyval' indicates whether the first line of file should
1008 'firstlinenonkeyval' indicates whether the first line of file should
1009 be treated as a key-value pair or reuturned fully under the
1009 be treated as a key-value pair or reuturned fully under the
1010 __firstline key."""
1010 __firstline key."""
1011 lines = self.vfs.readlines(self.path)
1011 lines = self.vfs.readlines(self.path)
1012 d = {}
1012 d = {}
1013 if firstlinenonkeyval:
1013 if firstlinenonkeyval:
1014 if not lines:
1014 if not lines:
1015 e = _("empty simplekeyvalue file")
1015 e = _("empty simplekeyvalue file")
1016 raise error.CorruptedState(e)
1016 raise error.CorruptedState(e)
1017 # we don't want to include '\n' in the __firstline
1017 # we don't want to include '\n' in the __firstline
1018 d[self.firstlinekey] = lines[0][:-1]
1018 d[self.firstlinekey] = lines[0][:-1]
1019 del lines[0]
1019 del lines[0]
1020
1020
1021 try:
1021 try:
1022 # the 'if line.strip()' part prevents us from failing on empty
1022 # the 'if line.strip()' part prevents us from failing on empty
1023 # lines which only contain '\n' therefore are not skipped
1023 # lines which only contain '\n' therefore are not skipped
1024 # by 'if line'
1024 # by 'if line'
1025 updatedict = dict(line[:-1].split('=', 1) for line in lines
1025 updatedict = dict(line[:-1].split('=', 1) for line in lines
1026 if line.strip())
1026 if line.strip())
1027 if self.firstlinekey in updatedict:
1027 if self.firstlinekey in updatedict:
1028 e = _("%r can't be used as a key")
1028 e = _("%r can't be used as a key")
1029 raise error.CorruptedState(e % self.firstlinekey)
1029 raise error.CorruptedState(e % self.firstlinekey)
1030 d.update(updatedict)
1030 d.update(updatedict)
1031 except ValueError as e:
1031 except ValueError as e:
1032 raise error.CorruptedState(str(e))
1032 raise error.CorruptedState(str(e))
1033 return d
1033 return d
1034
1034
1035 def write(self, data, firstline=None):
1035 def write(self, data, firstline=None):
1036 """Write key=>value mapping to a file
1036 """Write key=>value mapping to a file
1037 data is a dict. Keys must be alphanumerical and start with a letter.
1037 data is a dict. Keys must be alphanumerical and start with a letter.
1038 Values must not contain newline characters.
1038 Values must not contain newline characters.
1039
1039
1040 If 'firstline' is not None, it is written to file before
1040 If 'firstline' is not None, it is written to file before
1041 everything else, as it is, not in a key=value form"""
1041 everything else, as it is, not in a key=value form"""
1042 lines = []
1042 lines = []
1043 if firstline is not None:
1043 if firstline is not None:
1044 lines.append('%s\n' % firstline)
1044 lines.append('%s\n' % firstline)
1045
1045
1046 for k, v in data.items():
1046 for k, v in data.items():
1047 if k == self.firstlinekey:
1047 if k == self.firstlinekey:
1048 e = "key name '%s' is reserved" % self.firstlinekey
1048 e = "key name '%s' is reserved" % self.firstlinekey
1049 raise error.ProgrammingError(e)
1049 raise error.ProgrammingError(e)
1050 if not k[0].isalpha():
1050 if not k[0].isalpha():
1051 e = "keys must start with a letter in a key-value file"
1051 e = "keys must start with a letter in a key-value file"
1052 raise error.ProgrammingError(e)
1052 raise error.ProgrammingError(e)
1053 if not k.isalnum():
1053 if not k.isalnum():
1054 e = "invalid key name in a simple key-value file"
1054 e = "invalid key name in a simple key-value file"
1055 raise error.ProgrammingError(e)
1055 raise error.ProgrammingError(e)
1056 if '\n' in v:
1056 if '\n' in v:
1057 e = "invalid value in a simple key-value file"
1057 e = "invalid value in a simple key-value file"
1058 raise error.ProgrammingError(e)
1058 raise error.ProgrammingError(e)
1059 lines.append("%s=%s\n" % (k, v))
1059 lines.append("%s=%s\n" % (k, v))
1060 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1060 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1061 fp.write(''.join(lines))
1061 fp.write(''.join(lines))
General Comments 0
You need to be logged in to leave comments. Login now