##// END OF EJS Templates
narrow: add trailing slash to dir earlier for debug{revlog,index,data}...
Martin von Zweigbergk -
r37287:6ff8bd69 default
parent child Browse files
Show More
@@ -1,187 +1,185 b''
1 # narrowrevlog.py - revlog storing irrelevant nodes as "ellipsis" nodes
1 # narrowrevlog.py - revlog storing irrelevant nodes as "ellipsis" nodes
2 #
2 #
3 # Copyright 2017 Google, Inc.
3 # Copyright 2017 Google, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from mercurial import (
10 from mercurial import (
11 error,
11 error,
12 manifest,
12 manifest,
13 revlog,
13 revlog,
14 util,
14 util,
15 )
15 )
16
16
17 def readtransform(self, text):
17 def readtransform(self, text):
18 return text, False
18 return text, False
19
19
20 def writetransform(self, text):
20 def writetransform(self, text):
21 return text, False
21 return text, False
22
22
23 def rawtransform(self, text):
23 def rawtransform(self, text):
24 return False
24 return False
25
25
26 revlog.addflagprocessor(revlog.REVIDX_ELLIPSIS,
26 revlog.addflagprocessor(revlog.REVIDX_ELLIPSIS,
27 (readtransform, writetransform, rawtransform))
27 (readtransform, writetransform, rawtransform))
28
28
29 def setup():
29 def setup():
30 # We just wanted to add the flag processor, which is done at module
30 # We just wanted to add the flag processor, which is done at module
31 # load time.
31 # load time.
32 pass
32 pass
33
33
34 class excludeddir(manifest.treemanifest):
34 class excludeddir(manifest.treemanifest):
35 """Stand-in for a directory that is excluded from the repository.
35 """Stand-in for a directory that is excluded from the repository.
36
36
37 With narrowing active on a repository that uses treemanifests,
37 With narrowing active on a repository that uses treemanifests,
38 some of the directory revlogs will be excluded from the resulting
38 some of the directory revlogs will be excluded from the resulting
39 clone. This is a huge storage win for clients, but means we need
39 clone. This is a huge storage win for clients, but means we need
40 some sort of pseudo-manifest to surface to internals so we can
40 some sort of pseudo-manifest to surface to internals so we can
41 detect a merge conflict outside the narrowspec. That's what this
41 detect a merge conflict outside the narrowspec. That's what this
42 class is: it stands in for a directory whose node is known, but
42 class is: it stands in for a directory whose node is known, but
43 whose contents are unknown.
43 whose contents are unknown.
44 """
44 """
45 def __init__(self, dir, node):
45 def __init__(self, dir, node):
46 super(excludeddir, self).__init__(dir)
46 super(excludeddir, self).__init__(dir)
47 self._node = node
47 self._node = node
48 # Add an empty file, which will be included by iterators and such,
48 # Add an empty file, which will be included by iterators and such,
49 # appearing as the directory itself (i.e. something like "dir/")
49 # appearing as the directory itself (i.e. something like "dir/")
50 self._files[''] = node
50 self._files[''] = node
51 self._flags[''] = 't'
51 self._flags[''] = 't'
52
52
53 # Manifests outside the narrowspec should never be modified, so avoid
53 # Manifests outside the narrowspec should never be modified, so avoid
54 # copying. This makes a noticeable difference when there are very many
54 # copying. This makes a noticeable difference when there are very many
55 # directories outside the narrowspec. Also, it makes sense for the copy to
55 # directories outside the narrowspec. Also, it makes sense for the copy to
56 # be of the same type as the original, which would not happen with the
56 # be of the same type as the original, which would not happen with the
57 # super type's copy().
57 # super type's copy().
58 def copy(self):
58 def copy(self):
59 return self
59 return self
60
60
61 class excludeddirmanifestctx(manifest.treemanifestctx):
61 class excludeddirmanifestctx(manifest.treemanifestctx):
62 """context wrapper for excludeddir - see that docstring for rationale"""
62 """context wrapper for excludeddir - see that docstring for rationale"""
63 def __init__(self, dir, node):
63 def __init__(self, dir, node):
64 self._dir = dir
64 self._dir = dir
65 self._node = node
65 self._node = node
66
66
67 def read(self):
67 def read(self):
68 return excludeddir(self._dir, self._node)
68 return excludeddir(self._dir, self._node)
69
69
70 def write(self, *args):
70 def write(self, *args):
71 raise error.ProgrammingError(
71 raise error.ProgrammingError(
72 'attempt to write manifest from excluded dir %s' % self._dir)
72 'attempt to write manifest from excluded dir %s' % self._dir)
73
73
74 class excludedmanifestrevlog(manifest.manifestrevlog):
74 class excludedmanifestrevlog(manifest.manifestrevlog):
75 """Stand-in for excluded treemanifest revlogs.
75 """Stand-in for excluded treemanifest revlogs.
76
76
77 When narrowing is active on a treemanifest repository, we'll have
77 When narrowing is active on a treemanifest repository, we'll have
78 references to directories we can't see due to the revlog being
78 references to directories we can't see due to the revlog being
79 skipped. This class exists to conform to the manifestrevlog
79 skipped. This class exists to conform to the manifestrevlog
80 interface for those directories and proactively prevent writes to
80 interface for those directories and proactively prevent writes to
81 outside the narrowspec.
81 outside the narrowspec.
82 """
82 """
83
83
84 def __init__(self, dir):
84 def __init__(self, dir):
85 self._dir = dir
85 self._dir = dir
86
86
87 def __len__(self):
87 def __len__(self):
88 raise error.ProgrammingError(
88 raise error.ProgrammingError(
89 'attempt to get length of excluded dir %s' % self._dir)
89 'attempt to get length of excluded dir %s' % self._dir)
90
90
91 def rev(self, node):
91 def rev(self, node):
92 raise error.ProgrammingError(
92 raise error.ProgrammingError(
93 'attempt to get rev from excluded dir %s' % self._dir)
93 'attempt to get rev from excluded dir %s' % self._dir)
94
94
95 def linkrev(self, node):
95 def linkrev(self, node):
96 raise error.ProgrammingError(
96 raise error.ProgrammingError(
97 'attempt to get linkrev from excluded dir %s' % self._dir)
97 'attempt to get linkrev from excluded dir %s' % self._dir)
98
98
99 def node(self, rev):
99 def node(self, rev):
100 raise error.ProgrammingError(
100 raise error.ProgrammingError(
101 'attempt to get node from excluded dir %s' % self._dir)
101 'attempt to get node from excluded dir %s' % self._dir)
102
102
103 def add(self, *args, **kwargs):
103 def add(self, *args, **kwargs):
104 # We should never write entries in dirlogs outside the narrow clone.
104 # We should never write entries in dirlogs outside the narrow clone.
105 # However, the method still gets called from writesubtree() in
105 # However, the method still gets called from writesubtree() in
106 # _addtree(), so we need to handle it. We should possibly make that
106 # _addtree(), so we need to handle it. We should possibly make that
107 # avoid calling add() with a clean manifest (_dirty is always False
107 # avoid calling add() with a clean manifest (_dirty is always False
108 # in excludeddir instances).
108 # in excludeddir instances).
109 pass
109 pass
110
110
111 def makenarrowmanifestrevlog(mfrevlog, repo):
111 def makenarrowmanifestrevlog(mfrevlog, repo):
112 if util.safehasattr(mfrevlog, '_narrowed'):
112 if util.safehasattr(mfrevlog, '_narrowed'):
113 return
113 return
114
114
115 class narrowmanifestrevlog(mfrevlog.__class__):
115 class narrowmanifestrevlog(mfrevlog.__class__):
116 # This function is called via debug{revlog,index,data}, but also during
116 # This function is called via debug{revlog,index,data}, but also during
117 # at least some push operations. This will be used to wrap/exclude the
117 # at least some push operations. This will be used to wrap/exclude the
118 # child directories when using treemanifests.
118 # child directories when using treemanifests.
119 def dirlog(self, d):
119 def dirlog(self, d):
120 if d and not d.endswith('/'):
121 d = d + '/'
122 if not repo.narrowmatch().visitdir(d[:-1] or '.'):
120 if not repo.narrowmatch().visitdir(d[:-1] or '.'):
123 return excludedmanifestrevlog(d)
121 return excludedmanifestrevlog(d)
124 result = super(narrowmanifestrevlog, self).dirlog(d)
122 result = super(narrowmanifestrevlog, self).dirlog(d)
125 makenarrowmanifestrevlog(result, repo)
123 makenarrowmanifestrevlog(result, repo)
126 return result
124 return result
127
125
128 mfrevlog.__class__ = narrowmanifestrevlog
126 mfrevlog.__class__ = narrowmanifestrevlog
129 mfrevlog._narrowed = True
127 mfrevlog._narrowed = True
130
128
131 def makenarrowmanifestlog(mfl, repo):
129 def makenarrowmanifestlog(mfl, repo):
132 class narrowmanifestlog(mfl.__class__):
130 class narrowmanifestlog(mfl.__class__):
133 def get(self, dir, node, verify=True):
131 def get(self, dir, node, verify=True):
134 if not repo.narrowmatch().visitdir(dir[:-1] or '.'):
132 if not repo.narrowmatch().visitdir(dir[:-1] or '.'):
135 return excludeddirmanifestctx(dir, node)
133 return excludeddirmanifestctx(dir, node)
136 return super(narrowmanifestlog, self).get(dir, node, verify=verify)
134 return super(narrowmanifestlog, self).get(dir, node, verify=verify)
137 mfl.__class__ = narrowmanifestlog
135 mfl.__class__ = narrowmanifestlog
138
136
139 def makenarrowfilelog(fl, narrowmatch):
137 def makenarrowfilelog(fl, narrowmatch):
140 class narrowfilelog(fl.__class__):
138 class narrowfilelog(fl.__class__):
141 def renamed(self, node):
139 def renamed(self, node):
142 # Renames that come from outside the narrowspec are
140 # Renames that come from outside the narrowspec are
143 # problematic at least for git-diffs, because we lack the
141 # problematic at least for git-diffs, because we lack the
144 # base text for the rename. This logic was introduced in
142 # base text for the rename. This logic was introduced in
145 # 3cd72b1 of narrowhg (authored by martinvonz, reviewed by
143 # 3cd72b1 of narrowhg (authored by martinvonz, reviewed by
146 # adgar), but that revision doesn't have any additional
144 # adgar), but that revision doesn't have any additional
147 # commentary on what problems we can encounter.
145 # commentary on what problems we can encounter.
148 m = super(narrowfilelog, self).renamed(node)
146 m = super(narrowfilelog, self).renamed(node)
149 if m and not narrowmatch(m[0]):
147 if m and not narrowmatch(m[0]):
150 return None
148 return None
151 return m
149 return m
152
150
153 def size(self, rev):
151 def size(self, rev):
154 # We take advantage of the fact that remotefilelog
152 # We take advantage of the fact that remotefilelog
155 # lacks a node() method to just skip the
153 # lacks a node() method to just skip the
156 # rename-checking logic when on remotefilelog. This
154 # rename-checking logic when on remotefilelog. This
157 # might be incorrect on other non-revlog-based storage
155 # might be incorrect on other non-revlog-based storage
158 # engines, but for now this seems to be fine.
156 # engines, but for now this seems to be fine.
159 #
157 #
160 # TODO: when remotefilelog is in core, improve this to
158 # TODO: when remotefilelog is in core, improve this to
161 # explicitly look for remotefilelog instead of cheating
159 # explicitly look for remotefilelog instead of cheating
162 # with a hasattr check.
160 # with a hasattr check.
163 if util.safehasattr(self, 'node'):
161 if util.safehasattr(self, 'node'):
164 node = self.node(rev)
162 node = self.node(rev)
165 # Because renamed() is overridden above to
163 # Because renamed() is overridden above to
166 # sometimes return None even if there is metadata
164 # sometimes return None even if there is metadata
167 # in the revlog, size can be incorrect for
165 # in the revlog, size can be incorrect for
168 # copies/renames, so we need to make sure we call
166 # copies/renames, so we need to make sure we call
169 # the super class's implementation of renamed()
167 # the super class's implementation of renamed()
170 # for the purpose of size calculation.
168 # for the purpose of size calculation.
171 if super(narrowfilelog, self).renamed(node):
169 if super(narrowfilelog, self).renamed(node):
172 return len(self.read(node))
170 return len(self.read(node))
173 return super(narrowfilelog, self).size(rev)
171 return super(narrowfilelog, self).size(rev)
174
172
175 def cmp(self, node, text):
173 def cmp(self, node, text):
176 different = super(narrowfilelog, self).cmp(node, text)
174 different = super(narrowfilelog, self).cmp(node, text)
177 if different:
175 if different:
178 # Similar to size() above, if the file was copied from
176 # Similar to size() above, if the file was copied from
179 # a file outside the narrowspec, the super class's
177 # a file outside the narrowspec, the super class's
180 # would have returned True because we tricked it into
178 # would have returned True because we tricked it into
181 # thinking that the file was not renamed.
179 # thinking that the file was not renamed.
182 if super(narrowfilelog, self).renamed(node):
180 if super(narrowfilelog, self).renamed(node):
183 t2 = self.read(node)
181 t2 = self.read(node)
184 return t2 != text
182 return t2 != text
185 return different
183 return different
186
184
187 fl.__class__ = narrowfilelog
185 fl.__class__ = narrowfilelog
@@ -1,3214 +1,3216 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import os
11 import os
12 import re
12 import re
13 import tempfile
13 import tempfile
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 hex,
17 hex,
18 nullid,
18 nullid,
19 nullrev,
19 nullrev,
20 short,
20 short,
21 )
21 )
22
22
23 from . import (
23 from . import (
24 bookmarks,
24 bookmarks,
25 changelog,
25 changelog,
26 copies,
26 copies,
27 crecord as crecordmod,
27 crecord as crecordmod,
28 dirstateguard,
28 dirstateguard,
29 encoding,
29 encoding,
30 error,
30 error,
31 formatter,
31 formatter,
32 logcmdutil,
32 logcmdutil,
33 match as matchmod,
33 match as matchmod,
34 merge as mergemod,
34 merge as mergemod,
35 mergeutil,
35 mergeutil,
36 obsolete,
36 obsolete,
37 patch,
37 patch,
38 pathutil,
38 pathutil,
39 pycompat,
39 pycompat,
40 registrar,
40 registrar,
41 revlog,
41 revlog,
42 rewriteutil,
42 rewriteutil,
43 scmutil,
43 scmutil,
44 smartset,
44 smartset,
45 subrepoutil,
45 subrepoutil,
46 templatekw,
46 templatekw,
47 templater,
47 templater,
48 util,
48 util,
49 vfs as vfsmod,
49 vfs as vfsmod,
50 )
50 )
51
51
52 from .utils import (
52 from .utils import (
53 dateutil,
53 dateutil,
54 stringutil,
54 stringutil,
55 )
55 )
56
56
57 stringio = util.stringio
57 stringio = util.stringio
58
58
59 # templates of common command options
59 # templates of common command options
60
60
61 dryrunopts = [
61 dryrunopts = [
62 ('n', 'dry-run', None,
62 ('n', 'dry-run', None,
63 _('do not perform actions, just print output')),
63 _('do not perform actions, just print output')),
64 ]
64 ]
65
65
66 remoteopts = [
66 remoteopts = [
67 ('e', 'ssh', '',
67 ('e', 'ssh', '',
68 _('specify ssh command to use'), _('CMD')),
68 _('specify ssh command to use'), _('CMD')),
69 ('', 'remotecmd', '',
69 ('', 'remotecmd', '',
70 _('specify hg command to run on the remote side'), _('CMD')),
70 _('specify hg command to run on the remote side'), _('CMD')),
71 ('', 'insecure', None,
71 ('', 'insecure', None,
72 _('do not verify server certificate (ignoring web.cacerts config)')),
72 _('do not verify server certificate (ignoring web.cacerts config)')),
73 ]
73 ]
74
74
75 walkopts = [
75 walkopts = [
76 ('I', 'include', [],
76 ('I', 'include', [],
77 _('include names matching the given patterns'), _('PATTERN')),
77 _('include names matching the given patterns'), _('PATTERN')),
78 ('X', 'exclude', [],
78 ('X', 'exclude', [],
79 _('exclude names matching the given patterns'), _('PATTERN')),
79 _('exclude names matching the given patterns'), _('PATTERN')),
80 ]
80 ]
81
81
82 commitopts = [
82 commitopts = [
83 ('m', 'message', '',
83 ('m', 'message', '',
84 _('use text as commit message'), _('TEXT')),
84 _('use text as commit message'), _('TEXT')),
85 ('l', 'logfile', '',
85 ('l', 'logfile', '',
86 _('read commit message from file'), _('FILE')),
86 _('read commit message from file'), _('FILE')),
87 ]
87 ]
88
88
89 commitopts2 = [
89 commitopts2 = [
90 ('d', 'date', '',
90 ('d', 'date', '',
91 _('record the specified date as commit date'), _('DATE')),
91 _('record the specified date as commit date'), _('DATE')),
92 ('u', 'user', '',
92 ('u', 'user', '',
93 _('record the specified user as committer'), _('USER')),
93 _('record the specified user as committer'), _('USER')),
94 ]
94 ]
95
95
96 # hidden for now
96 # hidden for now
97 formatteropts = [
97 formatteropts = [
98 ('T', 'template', '',
98 ('T', 'template', '',
99 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
99 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
100 ]
100 ]
101
101
102 templateopts = [
102 templateopts = [
103 ('', 'style', '',
103 ('', 'style', '',
104 _('display using template map file (DEPRECATED)'), _('STYLE')),
104 _('display using template map file (DEPRECATED)'), _('STYLE')),
105 ('T', 'template', '',
105 ('T', 'template', '',
106 _('display with template'), _('TEMPLATE')),
106 _('display with template'), _('TEMPLATE')),
107 ]
107 ]
108
108
109 logopts = [
109 logopts = [
110 ('p', 'patch', None, _('show patch')),
110 ('p', 'patch', None, _('show patch')),
111 ('g', 'git', None, _('use git extended diff format')),
111 ('g', 'git', None, _('use git extended diff format')),
112 ('l', 'limit', '',
112 ('l', 'limit', '',
113 _('limit number of changes displayed'), _('NUM')),
113 _('limit number of changes displayed'), _('NUM')),
114 ('M', 'no-merges', None, _('do not show merges')),
114 ('M', 'no-merges', None, _('do not show merges')),
115 ('', 'stat', None, _('output diffstat-style summary of changes')),
115 ('', 'stat', None, _('output diffstat-style summary of changes')),
116 ('G', 'graph', None, _("show the revision DAG")),
116 ('G', 'graph', None, _("show the revision DAG")),
117 ] + templateopts
117 ] + templateopts
118
118
119 diffopts = [
119 diffopts = [
120 ('a', 'text', None, _('treat all files as text')),
120 ('a', 'text', None, _('treat all files as text')),
121 ('g', 'git', None, _('use git extended diff format')),
121 ('g', 'git', None, _('use git extended diff format')),
122 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
122 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
123 ('', 'nodates', None, _('omit dates from diff headers'))
123 ('', 'nodates', None, _('omit dates from diff headers'))
124 ]
124 ]
125
125
126 diffwsopts = [
126 diffwsopts = [
127 ('w', 'ignore-all-space', None,
127 ('w', 'ignore-all-space', None,
128 _('ignore white space when comparing lines')),
128 _('ignore white space when comparing lines')),
129 ('b', 'ignore-space-change', None,
129 ('b', 'ignore-space-change', None,
130 _('ignore changes in the amount of white space')),
130 _('ignore changes in the amount of white space')),
131 ('B', 'ignore-blank-lines', None,
131 ('B', 'ignore-blank-lines', None,
132 _('ignore changes whose lines are all blank')),
132 _('ignore changes whose lines are all blank')),
133 ('Z', 'ignore-space-at-eol', None,
133 ('Z', 'ignore-space-at-eol', None,
134 _('ignore changes in whitespace at EOL')),
134 _('ignore changes in whitespace at EOL')),
135 ]
135 ]
136
136
137 diffopts2 = [
137 diffopts2 = [
138 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
138 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
139 ('p', 'show-function', None, _('show which function each change is in')),
139 ('p', 'show-function', None, _('show which function each change is in')),
140 ('', 'reverse', None, _('produce a diff that undoes the changes')),
140 ('', 'reverse', None, _('produce a diff that undoes the changes')),
141 ] + diffwsopts + [
141 ] + diffwsopts + [
142 ('U', 'unified', '',
142 ('U', 'unified', '',
143 _('number of lines of context to show'), _('NUM')),
143 _('number of lines of context to show'), _('NUM')),
144 ('', 'stat', None, _('output diffstat-style summary of changes')),
144 ('', 'stat', None, _('output diffstat-style summary of changes')),
145 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
145 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
146 ]
146 ]
147
147
148 mergetoolopts = [
148 mergetoolopts = [
149 ('t', 'tool', '', _('specify merge tool')),
149 ('t', 'tool', '', _('specify merge tool')),
150 ]
150 ]
151
151
152 similarityopts = [
152 similarityopts = [
153 ('s', 'similarity', '',
153 ('s', 'similarity', '',
154 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
154 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
155 ]
155 ]
156
156
157 subrepoopts = [
157 subrepoopts = [
158 ('S', 'subrepos', None,
158 ('S', 'subrepos', None,
159 _('recurse into subrepositories'))
159 _('recurse into subrepositories'))
160 ]
160 ]
161
161
162 debugrevlogopts = [
162 debugrevlogopts = [
163 ('c', 'changelog', False, _('open changelog')),
163 ('c', 'changelog', False, _('open changelog')),
164 ('m', 'manifest', False, _('open manifest')),
164 ('m', 'manifest', False, _('open manifest')),
165 ('', 'dir', '', _('open directory manifest')),
165 ('', 'dir', '', _('open directory manifest')),
166 ]
166 ]
167
167
168 # special string such that everything below this line will be ingored in the
168 # special string such that everything below this line will be ingored in the
169 # editor text
169 # editor text
170 _linebelow = "^HG: ------------------------ >8 ------------------------$"
170 _linebelow = "^HG: ------------------------ >8 ------------------------$"
171
171
172 def ishunk(x):
172 def ishunk(x):
173 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
173 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
174 return isinstance(x, hunkclasses)
174 return isinstance(x, hunkclasses)
175
175
176 def newandmodified(chunks, originalchunks):
176 def newandmodified(chunks, originalchunks):
177 newlyaddedandmodifiedfiles = set()
177 newlyaddedandmodifiedfiles = set()
178 for chunk in chunks:
178 for chunk in chunks:
179 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
179 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
180 originalchunks:
180 originalchunks:
181 newlyaddedandmodifiedfiles.add(chunk.header.filename())
181 newlyaddedandmodifiedfiles.add(chunk.header.filename())
182 return newlyaddedandmodifiedfiles
182 return newlyaddedandmodifiedfiles
183
183
184 def parsealiases(cmd):
184 def parsealiases(cmd):
185 return cmd.lstrip("^").split("|")
185 return cmd.lstrip("^").split("|")
186
186
187 def setupwrapcolorwrite(ui):
187 def setupwrapcolorwrite(ui):
188 # wrap ui.write so diff output can be labeled/colorized
188 # wrap ui.write so diff output can be labeled/colorized
189 def wrapwrite(orig, *args, **kw):
189 def wrapwrite(orig, *args, **kw):
190 label = kw.pop(r'label', '')
190 label = kw.pop(r'label', '')
191 for chunk, l in patch.difflabel(lambda: args):
191 for chunk, l in patch.difflabel(lambda: args):
192 orig(chunk, label=label + l)
192 orig(chunk, label=label + l)
193
193
194 oldwrite = ui.write
194 oldwrite = ui.write
195 def wrap(*args, **kwargs):
195 def wrap(*args, **kwargs):
196 return wrapwrite(oldwrite, *args, **kwargs)
196 return wrapwrite(oldwrite, *args, **kwargs)
197 setattr(ui, 'write', wrap)
197 setattr(ui, 'write', wrap)
198 return oldwrite
198 return oldwrite
199
199
200 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
200 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
201 if usecurses:
201 if usecurses:
202 if testfile:
202 if testfile:
203 recordfn = crecordmod.testdecorator(testfile,
203 recordfn = crecordmod.testdecorator(testfile,
204 crecordmod.testchunkselector)
204 crecordmod.testchunkselector)
205 else:
205 else:
206 recordfn = crecordmod.chunkselector
206 recordfn = crecordmod.chunkselector
207
207
208 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
208 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
209
209
210 else:
210 else:
211 return patch.filterpatch(ui, originalhunks, operation)
211 return patch.filterpatch(ui, originalhunks, operation)
212
212
213 def recordfilter(ui, originalhunks, operation=None):
213 def recordfilter(ui, originalhunks, operation=None):
214 """ Prompts the user to filter the originalhunks and return a list of
214 """ Prompts the user to filter the originalhunks and return a list of
215 selected hunks.
215 selected hunks.
216 *operation* is used for to build ui messages to indicate the user what
216 *operation* is used for to build ui messages to indicate the user what
217 kind of filtering they are doing: reverting, committing, shelving, etc.
217 kind of filtering they are doing: reverting, committing, shelving, etc.
218 (see patch.filterpatch).
218 (see patch.filterpatch).
219 """
219 """
220 usecurses = crecordmod.checkcurses(ui)
220 usecurses = crecordmod.checkcurses(ui)
221 testfile = ui.config('experimental', 'crecordtest')
221 testfile = ui.config('experimental', 'crecordtest')
222 oldwrite = setupwrapcolorwrite(ui)
222 oldwrite = setupwrapcolorwrite(ui)
223 try:
223 try:
224 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
224 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
225 testfile, operation)
225 testfile, operation)
226 finally:
226 finally:
227 ui.write = oldwrite
227 ui.write = oldwrite
228 return newchunks, newopts
228 return newchunks, newopts
229
229
230 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
230 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
231 filterfn, *pats, **opts):
231 filterfn, *pats, **opts):
232 opts = pycompat.byteskwargs(opts)
232 opts = pycompat.byteskwargs(opts)
233 if not ui.interactive():
233 if not ui.interactive():
234 if cmdsuggest:
234 if cmdsuggest:
235 msg = _('running non-interactively, use %s instead') % cmdsuggest
235 msg = _('running non-interactively, use %s instead') % cmdsuggest
236 else:
236 else:
237 msg = _('running non-interactively')
237 msg = _('running non-interactively')
238 raise error.Abort(msg)
238 raise error.Abort(msg)
239
239
240 # make sure username is set before going interactive
240 # make sure username is set before going interactive
241 if not opts.get('user'):
241 if not opts.get('user'):
242 ui.username() # raise exception, username not provided
242 ui.username() # raise exception, username not provided
243
243
244 def recordfunc(ui, repo, message, match, opts):
244 def recordfunc(ui, repo, message, match, opts):
245 """This is generic record driver.
245 """This is generic record driver.
246
246
247 Its job is to interactively filter local changes, and
247 Its job is to interactively filter local changes, and
248 accordingly prepare working directory into a state in which the
248 accordingly prepare working directory into a state in which the
249 job can be delegated to a non-interactive commit command such as
249 job can be delegated to a non-interactive commit command such as
250 'commit' or 'qrefresh'.
250 'commit' or 'qrefresh'.
251
251
252 After the actual job is done by non-interactive command, the
252 After the actual job is done by non-interactive command, the
253 working directory is restored to its original state.
253 working directory is restored to its original state.
254
254
255 In the end we'll record interesting changes, and everything else
255 In the end we'll record interesting changes, and everything else
256 will be left in place, so the user can continue working.
256 will be left in place, so the user can continue working.
257 """
257 """
258
258
259 checkunfinished(repo, commit=True)
259 checkunfinished(repo, commit=True)
260 wctx = repo[None]
260 wctx = repo[None]
261 merge = len(wctx.parents()) > 1
261 merge = len(wctx.parents()) > 1
262 if merge:
262 if merge:
263 raise error.Abort(_('cannot partially commit a merge '
263 raise error.Abort(_('cannot partially commit a merge '
264 '(use "hg commit" instead)'))
264 '(use "hg commit" instead)'))
265
265
266 def fail(f, msg):
266 def fail(f, msg):
267 raise error.Abort('%s: %s' % (f, msg))
267 raise error.Abort('%s: %s' % (f, msg))
268
268
269 force = opts.get('force')
269 force = opts.get('force')
270 if not force:
270 if not force:
271 vdirs = []
271 vdirs = []
272 match.explicitdir = vdirs.append
272 match.explicitdir = vdirs.append
273 match.bad = fail
273 match.bad = fail
274
274
275 status = repo.status(match=match)
275 status = repo.status(match=match)
276 if not force:
276 if not force:
277 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
277 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
278 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
278 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
279 diffopts.nodates = True
279 diffopts.nodates = True
280 diffopts.git = True
280 diffopts.git = True
281 diffopts.showfunc = True
281 diffopts.showfunc = True
282 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
282 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
283 originalchunks = patch.parsepatch(originaldiff)
283 originalchunks = patch.parsepatch(originaldiff)
284
284
285 # 1. filter patch, since we are intending to apply subset of it
285 # 1. filter patch, since we are intending to apply subset of it
286 try:
286 try:
287 chunks, newopts = filterfn(ui, originalchunks)
287 chunks, newopts = filterfn(ui, originalchunks)
288 except error.PatchError as err:
288 except error.PatchError as err:
289 raise error.Abort(_('error parsing patch: %s') % err)
289 raise error.Abort(_('error parsing patch: %s') % err)
290 opts.update(newopts)
290 opts.update(newopts)
291
291
292 # We need to keep a backup of files that have been newly added and
292 # We need to keep a backup of files that have been newly added and
293 # modified during the recording process because there is a previous
293 # modified during the recording process because there is a previous
294 # version without the edit in the workdir
294 # version without the edit in the workdir
295 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
295 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
296 contenders = set()
296 contenders = set()
297 for h in chunks:
297 for h in chunks:
298 try:
298 try:
299 contenders.update(set(h.files()))
299 contenders.update(set(h.files()))
300 except AttributeError:
300 except AttributeError:
301 pass
301 pass
302
302
303 changed = status.modified + status.added + status.removed
303 changed = status.modified + status.added + status.removed
304 newfiles = [f for f in changed if f in contenders]
304 newfiles = [f for f in changed if f in contenders]
305 if not newfiles:
305 if not newfiles:
306 ui.status(_('no changes to record\n'))
306 ui.status(_('no changes to record\n'))
307 return 0
307 return 0
308
308
309 modified = set(status.modified)
309 modified = set(status.modified)
310
310
311 # 2. backup changed files, so we can restore them in the end
311 # 2. backup changed files, so we can restore them in the end
312
312
313 if backupall:
313 if backupall:
314 tobackup = changed
314 tobackup = changed
315 else:
315 else:
316 tobackup = [f for f in newfiles if f in modified or f in \
316 tobackup = [f for f in newfiles if f in modified or f in \
317 newlyaddedandmodifiedfiles]
317 newlyaddedandmodifiedfiles]
318 backups = {}
318 backups = {}
319 if tobackup:
319 if tobackup:
320 backupdir = repo.vfs.join('record-backups')
320 backupdir = repo.vfs.join('record-backups')
321 try:
321 try:
322 os.mkdir(backupdir)
322 os.mkdir(backupdir)
323 except OSError as err:
323 except OSError as err:
324 if err.errno != errno.EEXIST:
324 if err.errno != errno.EEXIST:
325 raise
325 raise
326 try:
326 try:
327 # backup continues
327 # backup continues
328 for f in tobackup:
328 for f in tobackup:
329 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
329 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
330 dir=backupdir)
330 dir=backupdir)
331 os.close(fd)
331 os.close(fd)
332 ui.debug('backup %r as %r\n' % (f, tmpname))
332 ui.debug('backup %r as %r\n' % (f, tmpname))
333 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
333 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
334 backups[f] = tmpname
334 backups[f] = tmpname
335
335
336 fp = stringio()
336 fp = stringio()
337 for c in chunks:
337 for c in chunks:
338 fname = c.filename()
338 fname = c.filename()
339 if fname in backups:
339 if fname in backups:
340 c.write(fp)
340 c.write(fp)
341 dopatch = fp.tell()
341 dopatch = fp.tell()
342 fp.seek(0)
342 fp.seek(0)
343
343
344 # 2.5 optionally review / modify patch in text editor
344 # 2.5 optionally review / modify patch in text editor
345 if opts.get('review', False):
345 if opts.get('review', False):
346 patchtext = (crecordmod.diffhelptext
346 patchtext = (crecordmod.diffhelptext
347 + crecordmod.patchhelptext
347 + crecordmod.patchhelptext
348 + fp.read())
348 + fp.read())
349 reviewedpatch = ui.edit(patchtext, "",
349 reviewedpatch = ui.edit(patchtext, "",
350 action="diff",
350 action="diff",
351 repopath=repo.path)
351 repopath=repo.path)
352 fp.truncate(0)
352 fp.truncate(0)
353 fp.write(reviewedpatch)
353 fp.write(reviewedpatch)
354 fp.seek(0)
354 fp.seek(0)
355
355
356 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
356 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
357 # 3a. apply filtered patch to clean repo (clean)
357 # 3a. apply filtered patch to clean repo (clean)
358 if backups:
358 if backups:
359 # Equivalent to hg.revert
359 # Equivalent to hg.revert
360 m = scmutil.matchfiles(repo, backups.keys())
360 m = scmutil.matchfiles(repo, backups.keys())
361 mergemod.update(repo, repo.dirstate.p1(),
361 mergemod.update(repo, repo.dirstate.p1(),
362 False, True, matcher=m)
362 False, True, matcher=m)
363
363
364 # 3b. (apply)
364 # 3b. (apply)
365 if dopatch:
365 if dopatch:
366 try:
366 try:
367 ui.debug('applying patch\n')
367 ui.debug('applying patch\n')
368 ui.debug(fp.getvalue())
368 ui.debug(fp.getvalue())
369 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
369 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
370 except error.PatchError as err:
370 except error.PatchError as err:
371 raise error.Abort(pycompat.bytestr(err))
371 raise error.Abort(pycompat.bytestr(err))
372 del fp
372 del fp
373
373
374 # 4. We prepared working directory according to filtered
374 # 4. We prepared working directory according to filtered
375 # patch. Now is the time to delegate the job to
375 # patch. Now is the time to delegate the job to
376 # commit/qrefresh or the like!
376 # commit/qrefresh or the like!
377
377
378 # Make all of the pathnames absolute.
378 # Make all of the pathnames absolute.
379 newfiles = [repo.wjoin(nf) for nf in newfiles]
379 newfiles = [repo.wjoin(nf) for nf in newfiles]
380 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
380 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
381 finally:
381 finally:
382 # 5. finally restore backed-up files
382 # 5. finally restore backed-up files
383 try:
383 try:
384 dirstate = repo.dirstate
384 dirstate = repo.dirstate
385 for realname, tmpname in backups.iteritems():
385 for realname, tmpname in backups.iteritems():
386 ui.debug('restoring %r to %r\n' % (tmpname, realname))
386 ui.debug('restoring %r to %r\n' % (tmpname, realname))
387
387
388 if dirstate[realname] == 'n':
388 if dirstate[realname] == 'n':
389 # without normallookup, restoring timestamp
389 # without normallookup, restoring timestamp
390 # may cause partially committed files
390 # may cause partially committed files
391 # to be treated as unmodified
391 # to be treated as unmodified
392 dirstate.normallookup(realname)
392 dirstate.normallookup(realname)
393
393
394 # copystat=True here and above are a hack to trick any
394 # copystat=True here and above are a hack to trick any
395 # editors that have f open that we haven't modified them.
395 # editors that have f open that we haven't modified them.
396 #
396 #
397 # Also note that this racy as an editor could notice the
397 # Also note that this racy as an editor could notice the
398 # file's mtime before we've finished writing it.
398 # file's mtime before we've finished writing it.
399 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
399 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
400 os.unlink(tmpname)
400 os.unlink(tmpname)
401 if tobackup:
401 if tobackup:
402 os.rmdir(backupdir)
402 os.rmdir(backupdir)
403 except OSError:
403 except OSError:
404 pass
404 pass
405
405
406 def recordinwlock(ui, repo, message, match, opts):
406 def recordinwlock(ui, repo, message, match, opts):
407 with repo.wlock():
407 with repo.wlock():
408 return recordfunc(ui, repo, message, match, opts)
408 return recordfunc(ui, repo, message, match, opts)
409
409
410 return commit(ui, repo, recordinwlock, pats, opts)
410 return commit(ui, repo, recordinwlock, pats, opts)
411
411
412 class dirnode(object):
412 class dirnode(object):
413 """
413 """
414 Represent a directory in user working copy with information required for
414 Represent a directory in user working copy with information required for
415 the purpose of tersing its status.
415 the purpose of tersing its status.
416
416
417 path is the path to the directory
417 path is the path to the directory
418
418
419 statuses is a set of statuses of all files in this directory (this includes
419 statuses is a set of statuses of all files in this directory (this includes
420 all the files in all the subdirectories too)
420 all the files in all the subdirectories too)
421
421
422 files is a list of files which are direct child of this directory
422 files is a list of files which are direct child of this directory
423
423
424 subdirs is a dictionary of sub-directory name as the key and it's own
424 subdirs is a dictionary of sub-directory name as the key and it's own
425 dirnode object as the value
425 dirnode object as the value
426 """
426 """
427
427
428 def __init__(self, dirpath):
428 def __init__(self, dirpath):
429 self.path = dirpath
429 self.path = dirpath
430 self.statuses = set([])
430 self.statuses = set([])
431 self.files = []
431 self.files = []
432 self.subdirs = {}
432 self.subdirs = {}
433
433
434 def _addfileindir(self, filename, status):
434 def _addfileindir(self, filename, status):
435 """Add a file in this directory as a direct child."""
435 """Add a file in this directory as a direct child."""
436 self.files.append((filename, status))
436 self.files.append((filename, status))
437
437
438 def addfile(self, filename, status):
438 def addfile(self, filename, status):
439 """
439 """
440 Add a file to this directory or to its direct parent directory.
440 Add a file to this directory or to its direct parent directory.
441
441
442 If the file is not direct child of this directory, we traverse to the
442 If the file is not direct child of this directory, we traverse to the
443 directory of which this file is a direct child of and add the file
443 directory of which this file is a direct child of and add the file
444 there.
444 there.
445 """
445 """
446
446
447 # the filename contains a path separator, it means it's not the direct
447 # the filename contains a path separator, it means it's not the direct
448 # child of this directory
448 # child of this directory
449 if '/' in filename:
449 if '/' in filename:
450 subdir, filep = filename.split('/', 1)
450 subdir, filep = filename.split('/', 1)
451
451
452 # does the dirnode object for subdir exists
452 # does the dirnode object for subdir exists
453 if subdir not in self.subdirs:
453 if subdir not in self.subdirs:
454 subdirpath = os.path.join(self.path, subdir)
454 subdirpath = os.path.join(self.path, subdir)
455 self.subdirs[subdir] = dirnode(subdirpath)
455 self.subdirs[subdir] = dirnode(subdirpath)
456
456
457 # try adding the file in subdir
457 # try adding the file in subdir
458 self.subdirs[subdir].addfile(filep, status)
458 self.subdirs[subdir].addfile(filep, status)
459
459
460 else:
460 else:
461 self._addfileindir(filename, status)
461 self._addfileindir(filename, status)
462
462
463 if status not in self.statuses:
463 if status not in self.statuses:
464 self.statuses.add(status)
464 self.statuses.add(status)
465
465
466 def iterfilepaths(self):
466 def iterfilepaths(self):
467 """Yield (status, path) for files directly under this directory."""
467 """Yield (status, path) for files directly under this directory."""
468 for f, st in self.files:
468 for f, st in self.files:
469 yield st, os.path.join(self.path, f)
469 yield st, os.path.join(self.path, f)
470
470
471 def tersewalk(self, terseargs):
471 def tersewalk(self, terseargs):
472 """
472 """
473 Yield (status, path) obtained by processing the status of this
473 Yield (status, path) obtained by processing the status of this
474 dirnode.
474 dirnode.
475
475
476 terseargs is the string of arguments passed by the user with `--terse`
476 terseargs is the string of arguments passed by the user with `--terse`
477 flag.
477 flag.
478
478
479 Following are the cases which can happen:
479 Following are the cases which can happen:
480
480
481 1) All the files in the directory (including all the files in its
481 1) All the files in the directory (including all the files in its
482 subdirectories) share the same status and the user has asked us to terse
482 subdirectories) share the same status and the user has asked us to terse
483 that status. -> yield (status, dirpath)
483 that status. -> yield (status, dirpath)
484
484
485 2) Otherwise, we do following:
485 2) Otherwise, we do following:
486
486
487 a) Yield (status, filepath) for all the files which are in this
487 a) Yield (status, filepath) for all the files which are in this
488 directory (only the ones in this directory, not the subdirs)
488 directory (only the ones in this directory, not the subdirs)
489
489
490 b) Recurse the function on all the subdirectories of this
490 b) Recurse the function on all the subdirectories of this
491 directory
491 directory
492 """
492 """
493
493
494 if len(self.statuses) == 1:
494 if len(self.statuses) == 1:
495 onlyst = self.statuses.pop()
495 onlyst = self.statuses.pop()
496
496
497 # Making sure we terse only when the status abbreviation is
497 # Making sure we terse only when the status abbreviation is
498 # passed as terse argument
498 # passed as terse argument
499 if onlyst in terseargs:
499 if onlyst in terseargs:
500 yield onlyst, self.path + pycompat.ossep
500 yield onlyst, self.path + pycompat.ossep
501 return
501 return
502
502
503 # add the files to status list
503 # add the files to status list
504 for st, fpath in self.iterfilepaths():
504 for st, fpath in self.iterfilepaths():
505 yield st, fpath
505 yield st, fpath
506
506
507 #recurse on the subdirs
507 #recurse on the subdirs
508 for dirobj in self.subdirs.values():
508 for dirobj in self.subdirs.values():
509 for st, fpath in dirobj.tersewalk(terseargs):
509 for st, fpath in dirobj.tersewalk(terseargs):
510 yield st, fpath
510 yield st, fpath
511
511
512 def tersedir(statuslist, terseargs):
512 def tersedir(statuslist, terseargs):
513 """
513 """
514 Terse the status if all the files in a directory shares the same status.
514 Terse the status if all the files in a directory shares the same status.
515
515
516 statuslist is scmutil.status() object which contains a list of files for
516 statuslist is scmutil.status() object which contains a list of files for
517 each status.
517 each status.
518 terseargs is string which is passed by the user as the argument to `--terse`
518 terseargs is string which is passed by the user as the argument to `--terse`
519 flag.
519 flag.
520
520
521 The function makes a tree of objects of dirnode class, and at each node it
521 The function makes a tree of objects of dirnode class, and at each node it
522 stores the information required to know whether we can terse a certain
522 stores the information required to know whether we can terse a certain
523 directory or not.
523 directory or not.
524 """
524 """
525 # the order matters here as that is used to produce final list
525 # the order matters here as that is used to produce final list
526 allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
526 allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
527
527
528 # checking the argument validity
528 # checking the argument validity
529 for s in pycompat.bytestr(terseargs):
529 for s in pycompat.bytestr(terseargs):
530 if s not in allst:
530 if s not in allst:
531 raise error.Abort(_("'%s' not recognized") % s)
531 raise error.Abort(_("'%s' not recognized") % s)
532
532
533 # creating a dirnode object for the root of the repo
533 # creating a dirnode object for the root of the repo
534 rootobj = dirnode('')
534 rootobj = dirnode('')
535 pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
535 pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
536 'ignored', 'removed')
536 'ignored', 'removed')
537
537
538 tersedict = {}
538 tersedict = {}
539 for attrname in pstatus:
539 for attrname in pstatus:
540 statuschar = attrname[0:1]
540 statuschar = attrname[0:1]
541 for f in getattr(statuslist, attrname):
541 for f in getattr(statuslist, attrname):
542 rootobj.addfile(f, statuschar)
542 rootobj.addfile(f, statuschar)
543 tersedict[statuschar] = []
543 tersedict[statuschar] = []
544
544
545 # we won't be tersing the root dir, so add files in it
545 # we won't be tersing the root dir, so add files in it
546 for st, fpath in rootobj.iterfilepaths():
546 for st, fpath in rootobj.iterfilepaths():
547 tersedict[st].append(fpath)
547 tersedict[st].append(fpath)
548
548
549 # process each sub-directory and build tersedict
549 # process each sub-directory and build tersedict
550 for subdir in rootobj.subdirs.values():
550 for subdir in rootobj.subdirs.values():
551 for st, f in subdir.tersewalk(terseargs):
551 for st, f in subdir.tersewalk(terseargs):
552 tersedict[st].append(f)
552 tersedict[st].append(f)
553
553
554 tersedlist = []
554 tersedlist = []
555 for st in allst:
555 for st in allst:
556 tersedict[st].sort()
556 tersedict[st].sort()
557 tersedlist.append(tersedict[st])
557 tersedlist.append(tersedict[st])
558
558
559 return tersedlist
559 return tersedlist
560
560
561 def _commentlines(raw):
561 def _commentlines(raw):
562 '''Surround lineswith a comment char and a new line'''
562 '''Surround lineswith a comment char and a new line'''
563 lines = raw.splitlines()
563 lines = raw.splitlines()
564 commentedlines = ['# %s' % line for line in lines]
564 commentedlines = ['# %s' % line for line in lines]
565 return '\n'.join(commentedlines) + '\n'
565 return '\n'.join(commentedlines) + '\n'
566
566
567 def _conflictsmsg(repo):
567 def _conflictsmsg(repo):
568 mergestate = mergemod.mergestate.read(repo)
568 mergestate = mergemod.mergestate.read(repo)
569 if not mergestate.active():
569 if not mergestate.active():
570 return
570 return
571
571
572 m = scmutil.match(repo[None])
572 m = scmutil.match(repo[None])
573 unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
573 unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
574 if unresolvedlist:
574 if unresolvedlist:
575 mergeliststr = '\n'.join(
575 mergeliststr = '\n'.join(
576 [' %s' % util.pathto(repo.root, pycompat.getcwd(), path)
576 [' %s' % util.pathto(repo.root, pycompat.getcwd(), path)
577 for path in unresolvedlist])
577 for path in unresolvedlist])
578 msg = _('''Unresolved merge conflicts:
578 msg = _('''Unresolved merge conflicts:
579
579
580 %s
580 %s
581
581
582 To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
582 To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
583 else:
583 else:
584 msg = _('No unresolved merge conflicts.')
584 msg = _('No unresolved merge conflicts.')
585
585
586 return _commentlines(msg)
586 return _commentlines(msg)
587
587
588 def _helpmessage(continuecmd, abortcmd):
588 def _helpmessage(continuecmd, abortcmd):
589 msg = _('To continue: %s\n'
589 msg = _('To continue: %s\n'
590 'To abort: %s') % (continuecmd, abortcmd)
590 'To abort: %s') % (continuecmd, abortcmd)
591 return _commentlines(msg)
591 return _commentlines(msg)
592
592
593 def _rebasemsg():
593 def _rebasemsg():
594 return _helpmessage('hg rebase --continue', 'hg rebase --abort')
594 return _helpmessage('hg rebase --continue', 'hg rebase --abort')
595
595
596 def _histeditmsg():
596 def _histeditmsg():
597 return _helpmessage('hg histedit --continue', 'hg histedit --abort')
597 return _helpmessage('hg histedit --continue', 'hg histedit --abort')
598
598
599 def _unshelvemsg():
599 def _unshelvemsg():
600 return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
600 return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
601
601
602 def _updatecleanmsg(dest=None):
602 def _updatecleanmsg(dest=None):
603 warning = _('warning: this will discard uncommitted changes')
603 warning = _('warning: this will discard uncommitted changes')
604 return 'hg update --clean %s (%s)' % (dest or '.', warning)
604 return 'hg update --clean %s (%s)' % (dest or '.', warning)
605
605
606 def _graftmsg():
606 def _graftmsg():
607 # tweakdefaults requires `update` to have a rev hence the `.`
607 # tweakdefaults requires `update` to have a rev hence the `.`
608 return _helpmessage('hg graft --continue', _updatecleanmsg())
608 return _helpmessage('hg graft --continue', _updatecleanmsg())
609
609
610 def _mergemsg():
610 def _mergemsg():
611 # tweakdefaults requires `update` to have a rev hence the `.`
611 # tweakdefaults requires `update` to have a rev hence the `.`
612 return _helpmessage('hg commit', _updatecleanmsg())
612 return _helpmessage('hg commit', _updatecleanmsg())
613
613
614 def _bisectmsg():
614 def _bisectmsg():
615 msg = _('To mark the changeset good: hg bisect --good\n'
615 msg = _('To mark the changeset good: hg bisect --good\n'
616 'To mark the changeset bad: hg bisect --bad\n'
616 'To mark the changeset bad: hg bisect --bad\n'
617 'To abort: hg bisect --reset\n')
617 'To abort: hg bisect --reset\n')
618 return _commentlines(msg)
618 return _commentlines(msg)
619
619
620 def fileexistspredicate(filename):
620 def fileexistspredicate(filename):
621 return lambda repo: repo.vfs.exists(filename)
621 return lambda repo: repo.vfs.exists(filename)
622
622
623 def _mergepredicate(repo):
623 def _mergepredicate(repo):
624 return len(repo[None].parents()) > 1
624 return len(repo[None].parents()) > 1
625
625
626 STATES = (
626 STATES = (
627 # (state, predicate to detect states, helpful message function)
627 # (state, predicate to detect states, helpful message function)
628 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
628 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
629 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
629 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
630 ('graft', fileexistspredicate('graftstate'), _graftmsg),
630 ('graft', fileexistspredicate('graftstate'), _graftmsg),
631 ('unshelve', fileexistspredicate('unshelverebasestate'), _unshelvemsg),
631 ('unshelve', fileexistspredicate('unshelverebasestate'), _unshelvemsg),
632 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
632 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
633 # The merge state is part of a list that will be iterated over.
633 # The merge state is part of a list that will be iterated over.
634 # They need to be last because some of the other unfinished states may also
634 # They need to be last because some of the other unfinished states may also
635 # be in a merge or update state (eg. rebase, histedit, graft, etc).
635 # be in a merge or update state (eg. rebase, histedit, graft, etc).
636 # We want those to have priority.
636 # We want those to have priority.
637 ('merge', _mergepredicate, _mergemsg),
637 ('merge', _mergepredicate, _mergemsg),
638 )
638 )
639
639
640 def _getrepostate(repo):
640 def _getrepostate(repo):
641 # experimental config: commands.status.skipstates
641 # experimental config: commands.status.skipstates
642 skip = set(repo.ui.configlist('commands', 'status.skipstates'))
642 skip = set(repo.ui.configlist('commands', 'status.skipstates'))
643 for state, statedetectionpredicate, msgfn in STATES:
643 for state, statedetectionpredicate, msgfn in STATES:
644 if state in skip:
644 if state in skip:
645 continue
645 continue
646 if statedetectionpredicate(repo):
646 if statedetectionpredicate(repo):
647 return (state, statedetectionpredicate, msgfn)
647 return (state, statedetectionpredicate, msgfn)
648
648
649 def morestatus(repo, fm):
649 def morestatus(repo, fm):
650 statetuple = _getrepostate(repo)
650 statetuple = _getrepostate(repo)
651 label = 'status.morestatus'
651 label = 'status.morestatus'
652 if statetuple:
652 if statetuple:
653 fm.startitem()
653 fm.startitem()
654 state, statedetectionpredicate, helpfulmsg = statetuple
654 state, statedetectionpredicate, helpfulmsg = statetuple
655 statemsg = _('The repository is in an unfinished *%s* state.') % state
655 statemsg = _('The repository is in an unfinished *%s* state.') % state
656 fm.write('statemsg', '%s\n', _commentlines(statemsg), label=label)
656 fm.write('statemsg', '%s\n', _commentlines(statemsg), label=label)
657 conmsg = _conflictsmsg(repo)
657 conmsg = _conflictsmsg(repo)
658 if conmsg:
658 if conmsg:
659 fm.write('conflictsmsg', '%s\n', conmsg, label=label)
659 fm.write('conflictsmsg', '%s\n', conmsg, label=label)
660 if helpfulmsg:
660 if helpfulmsg:
661 helpmsg = helpfulmsg()
661 helpmsg = helpfulmsg()
662 fm.write('helpmsg', '%s\n', helpmsg, label=label)
662 fm.write('helpmsg', '%s\n', helpmsg, label=label)
663
663
664 def findpossible(cmd, table, strict=False):
664 def findpossible(cmd, table, strict=False):
665 """
665 """
666 Return cmd -> (aliases, command table entry)
666 Return cmd -> (aliases, command table entry)
667 for each matching command.
667 for each matching command.
668 Return debug commands (or their aliases) only if no normal command matches.
668 Return debug commands (or their aliases) only if no normal command matches.
669 """
669 """
670 choice = {}
670 choice = {}
671 debugchoice = {}
671 debugchoice = {}
672
672
673 if cmd in table:
673 if cmd in table:
674 # short-circuit exact matches, "log" alias beats "^log|history"
674 # short-circuit exact matches, "log" alias beats "^log|history"
675 keys = [cmd]
675 keys = [cmd]
676 else:
676 else:
677 keys = table.keys()
677 keys = table.keys()
678
678
679 allcmds = []
679 allcmds = []
680 for e in keys:
680 for e in keys:
681 aliases = parsealiases(e)
681 aliases = parsealiases(e)
682 allcmds.extend(aliases)
682 allcmds.extend(aliases)
683 found = None
683 found = None
684 if cmd in aliases:
684 if cmd in aliases:
685 found = cmd
685 found = cmd
686 elif not strict:
686 elif not strict:
687 for a in aliases:
687 for a in aliases:
688 if a.startswith(cmd):
688 if a.startswith(cmd):
689 found = a
689 found = a
690 break
690 break
691 if found is not None:
691 if found is not None:
692 if aliases[0].startswith("debug") or found.startswith("debug"):
692 if aliases[0].startswith("debug") or found.startswith("debug"):
693 debugchoice[found] = (aliases, table[e])
693 debugchoice[found] = (aliases, table[e])
694 else:
694 else:
695 choice[found] = (aliases, table[e])
695 choice[found] = (aliases, table[e])
696
696
697 if not choice and debugchoice:
697 if not choice and debugchoice:
698 choice = debugchoice
698 choice = debugchoice
699
699
700 return choice, allcmds
700 return choice, allcmds
701
701
702 def findcmd(cmd, table, strict=True):
702 def findcmd(cmd, table, strict=True):
703 """Return (aliases, command table entry) for command string."""
703 """Return (aliases, command table entry) for command string."""
704 choice, allcmds = findpossible(cmd, table, strict)
704 choice, allcmds = findpossible(cmd, table, strict)
705
705
706 if cmd in choice:
706 if cmd in choice:
707 return choice[cmd]
707 return choice[cmd]
708
708
709 if len(choice) > 1:
709 if len(choice) > 1:
710 clist = sorted(choice)
710 clist = sorted(choice)
711 raise error.AmbiguousCommand(cmd, clist)
711 raise error.AmbiguousCommand(cmd, clist)
712
712
713 if choice:
713 if choice:
714 return list(choice.values())[0]
714 return list(choice.values())[0]
715
715
716 raise error.UnknownCommand(cmd, allcmds)
716 raise error.UnknownCommand(cmd, allcmds)
717
717
718 def changebranch(ui, repo, revs, label):
718 def changebranch(ui, repo, revs, label):
719 """ Change the branch name of given revs to label """
719 """ Change the branch name of given revs to label """
720
720
721 with repo.wlock(), repo.lock(), repo.transaction('branches'):
721 with repo.wlock(), repo.lock(), repo.transaction('branches'):
722 # abort in case of uncommitted merge or dirty wdir
722 # abort in case of uncommitted merge or dirty wdir
723 bailifchanged(repo)
723 bailifchanged(repo)
724 revs = scmutil.revrange(repo, revs)
724 revs = scmutil.revrange(repo, revs)
725 if not revs:
725 if not revs:
726 raise error.Abort("empty revision set")
726 raise error.Abort("empty revision set")
727 roots = repo.revs('roots(%ld)', revs)
727 roots = repo.revs('roots(%ld)', revs)
728 if len(roots) > 1:
728 if len(roots) > 1:
729 raise error.Abort(_("cannot change branch of non-linear revisions"))
729 raise error.Abort(_("cannot change branch of non-linear revisions"))
730 rewriteutil.precheck(repo, revs, 'change branch of')
730 rewriteutil.precheck(repo, revs, 'change branch of')
731
731
732 root = repo[roots.first()]
732 root = repo[roots.first()]
733 if not root.p1().branch() == label and label in repo.branchmap():
733 if not root.p1().branch() == label and label in repo.branchmap():
734 raise error.Abort(_("a branch of the same name already exists"))
734 raise error.Abort(_("a branch of the same name already exists"))
735
735
736 if repo.revs('merge() and %ld', revs):
736 if repo.revs('merge() and %ld', revs):
737 raise error.Abort(_("cannot change branch of a merge commit"))
737 raise error.Abort(_("cannot change branch of a merge commit"))
738 if repo.revs('obsolete() and %ld', revs):
738 if repo.revs('obsolete() and %ld', revs):
739 raise error.Abort(_("cannot change branch of a obsolete changeset"))
739 raise error.Abort(_("cannot change branch of a obsolete changeset"))
740
740
741 # make sure only topological heads
741 # make sure only topological heads
742 if repo.revs('heads(%ld) - head()', revs):
742 if repo.revs('heads(%ld) - head()', revs):
743 raise error.Abort(_("cannot change branch in middle of a stack"))
743 raise error.Abort(_("cannot change branch in middle of a stack"))
744
744
745 replacements = {}
745 replacements = {}
746 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
746 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
747 # mercurial.subrepo -> mercurial.cmdutil
747 # mercurial.subrepo -> mercurial.cmdutil
748 from . import context
748 from . import context
749 for rev in revs:
749 for rev in revs:
750 ctx = repo[rev]
750 ctx = repo[rev]
751 oldbranch = ctx.branch()
751 oldbranch = ctx.branch()
752 # check if ctx has same branch
752 # check if ctx has same branch
753 if oldbranch == label:
753 if oldbranch == label:
754 continue
754 continue
755
755
756 def filectxfn(repo, newctx, path):
756 def filectxfn(repo, newctx, path):
757 try:
757 try:
758 return ctx[path]
758 return ctx[path]
759 except error.ManifestLookupError:
759 except error.ManifestLookupError:
760 return None
760 return None
761
761
762 ui.debug("changing branch of '%s' from '%s' to '%s'\n"
762 ui.debug("changing branch of '%s' from '%s' to '%s'\n"
763 % (hex(ctx.node()), oldbranch, label))
763 % (hex(ctx.node()), oldbranch, label))
764 extra = ctx.extra()
764 extra = ctx.extra()
765 extra['branch_change'] = hex(ctx.node())
765 extra['branch_change'] = hex(ctx.node())
766 # While changing branch of set of linear commits, make sure that
766 # While changing branch of set of linear commits, make sure that
767 # we base our commits on new parent rather than old parent which
767 # we base our commits on new parent rather than old parent which
768 # was obsoleted while changing the branch
768 # was obsoleted while changing the branch
769 p1 = ctx.p1().node()
769 p1 = ctx.p1().node()
770 p2 = ctx.p2().node()
770 p2 = ctx.p2().node()
771 if p1 in replacements:
771 if p1 in replacements:
772 p1 = replacements[p1][0]
772 p1 = replacements[p1][0]
773 if p2 in replacements:
773 if p2 in replacements:
774 p2 = replacements[p2][0]
774 p2 = replacements[p2][0]
775
775
776 mc = context.memctx(repo, (p1, p2),
776 mc = context.memctx(repo, (p1, p2),
777 ctx.description(),
777 ctx.description(),
778 ctx.files(),
778 ctx.files(),
779 filectxfn,
779 filectxfn,
780 user=ctx.user(),
780 user=ctx.user(),
781 date=ctx.date(),
781 date=ctx.date(),
782 extra=extra,
782 extra=extra,
783 branch=label)
783 branch=label)
784
784
785 commitphase = ctx.phase()
785 commitphase = ctx.phase()
786 overrides = {('phases', 'new-commit'): commitphase}
786 overrides = {('phases', 'new-commit'): commitphase}
787 with repo.ui.configoverride(overrides, 'branch-change'):
787 with repo.ui.configoverride(overrides, 'branch-change'):
788 newnode = repo.commitctx(mc)
788 newnode = repo.commitctx(mc)
789
789
790 replacements[ctx.node()] = (newnode,)
790 replacements[ctx.node()] = (newnode,)
791 ui.debug('new node id is %s\n' % hex(newnode))
791 ui.debug('new node id is %s\n' % hex(newnode))
792
792
793 # create obsmarkers and move bookmarks
793 # create obsmarkers and move bookmarks
794 scmutil.cleanupnodes(repo, replacements, 'branch-change')
794 scmutil.cleanupnodes(repo, replacements, 'branch-change')
795
795
796 # move the working copy too
796 # move the working copy too
797 wctx = repo[None]
797 wctx = repo[None]
798 # in-progress merge is a bit too complex for now.
798 # in-progress merge is a bit too complex for now.
799 if len(wctx.parents()) == 1:
799 if len(wctx.parents()) == 1:
800 newid = replacements.get(wctx.p1().node())
800 newid = replacements.get(wctx.p1().node())
801 if newid is not None:
801 if newid is not None:
802 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
802 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
803 # mercurial.cmdutil
803 # mercurial.cmdutil
804 from . import hg
804 from . import hg
805 hg.update(repo, newid[0], quietempty=True)
805 hg.update(repo, newid[0], quietempty=True)
806
806
807 ui.status(_("changed branch on %d changesets\n") % len(replacements))
807 ui.status(_("changed branch on %d changesets\n") % len(replacements))
808
808
809 def findrepo(p):
809 def findrepo(p):
810 while not os.path.isdir(os.path.join(p, ".hg")):
810 while not os.path.isdir(os.path.join(p, ".hg")):
811 oldp, p = p, os.path.dirname(p)
811 oldp, p = p, os.path.dirname(p)
812 if p == oldp:
812 if p == oldp:
813 return None
813 return None
814
814
815 return p
815 return p
816
816
817 def bailifchanged(repo, merge=True, hint=None):
817 def bailifchanged(repo, merge=True, hint=None):
818 """ enforce the precondition that working directory must be clean.
818 """ enforce the precondition that working directory must be clean.
819
819
820 'merge' can be set to false if a pending uncommitted merge should be
820 'merge' can be set to false if a pending uncommitted merge should be
821 ignored (such as when 'update --check' runs).
821 ignored (such as when 'update --check' runs).
822
822
823 'hint' is the usual hint given to Abort exception.
823 'hint' is the usual hint given to Abort exception.
824 """
824 """
825
825
826 if merge and repo.dirstate.p2() != nullid:
826 if merge and repo.dirstate.p2() != nullid:
827 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
827 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
828 modified, added, removed, deleted = repo.status()[:4]
828 modified, added, removed, deleted = repo.status()[:4]
829 if modified or added or removed or deleted:
829 if modified or added or removed or deleted:
830 raise error.Abort(_('uncommitted changes'), hint=hint)
830 raise error.Abort(_('uncommitted changes'), hint=hint)
831 ctx = repo[None]
831 ctx = repo[None]
832 for s in sorted(ctx.substate):
832 for s in sorted(ctx.substate):
833 ctx.sub(s).bailifchanged(hint=hint)
833 ctx.sub(s).bailifchanged(hint=hint)
834
834
835 def logmessage(ui, opts):
835 def logmessage(ui, opts):
836 """ get the log message according to -m and -l option """
836 """ get the log message according to -m and -l option """
837 message = opts.get('message')
837 message = opts.get('message')
838 logfile = opts.get('logfile')
838 logfile = opts.get('logfile')
839
839
840 if message and logfile:
840 if message and logfile:
841 raise error.Abort(_('options --message and --logfile are mutually '
841 raise error.Abort(_('options --message and --logfile are mutually '
842 'exclusive'))
842 'exclusive'))
843 if not message and logfile:
843 if not message and logfile:
844 try:
844 try:
845 if isstdiofilename(logfile):
845 if isstdiofilename(logfile):
846 message = ui.fin.read()
846 message = ui.fin.read()
847 else:
847 else:
848 message = '\n'.join(util.readfile(logfile).splitlines())
848 message = '\n'.join(util.readfile(logfile).splitlines())
849 except IOError as inst:
849 except IOError as inst:
850 raise error.Abort(_("can't read commit message '%s': %s") %
850 raise error.Abort(_("can't read commit message '%s': %s") %
851 (logfile, encoding.strtolocal(inst.strerror)))
851 (logfile, encoding.strtolocal(inst.strerror)))
852 return message
852 return message
853
853
854 def mergeeditform(ctxorbool, baseformname):
854 def mergeeditform(ctxorbool, baseformname):
855 """return appropriate editform name (referencing a committemplate)
855 """return appropriate editform name (referencing a committemplate)
856
856
857 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
857 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
858 merging is committed.
858 merging is committed.
859
859
860 This returns baseformname with '.merge' appended if it is a merge,
860 This returns baseformname with '.merge' appended if it is a merge,
861 otherwise '.normal' is appended.
861 otherwise '.normal' is appended.
862 """
862 """
863 if isinstance(ctxorbool, bool):
863 if isinstance(ctxorbool, bool):
864 if ctxorbool:
864 if ctxorbool:
865 return baseformname + ".merge"
865 return baseformname + ".merge"
866 elif 1 < len(ctxorbool.parents()):
866 elif 1 < len(ctxorbool.parents()):
867 return baseformname + ".merge"
867 return baseformname + ".merge"
868
868
869 return baseformname + ".normal"
869 return baseformname + ".normal"
870
870
871 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
871 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
872 editform='', **opts):
872 editform='', **opts):
873 """get appropriate commit message editor according to '--edit' option
873 """get appropriate commit message editor according to '--edit' option
874
874
875 'finishdesc' is a function to be called with edited commit message
875 'finishdesc' is a function to be called with edited commit message
876 (= 'description' of the new changeset) just after editing, but
876 (= 'description' of the new changeset) just after editing, but
877 before checking empty-ness. It should return actual text to be
877 before checking empty-ness. It should return actual text to be
878 stored into history. This allows to change description before
878 stored into history. This allows to change description before
879 storing.
879 storing.
880
880
881 'extramsg' is a extra message to be shown in the editor instead of
881 'extramsg' is a extra message to be shown in the editor instead of
882 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
882 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
883 is automatically added.
883 is automatically added.
884
884
885 'editform' is a dot-separated list of names, to distinguish
885 'editform' is a dot-separated list of names, to distinguish
886 the purpose of commit text editing.
886 the purpose of commit text editing.
887
887
888 'getcommiteditor' returns 'commitforceeditor' regardless of
888 'getcommiteditor' returns 'commitforceeditor' regardless of
889 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
889 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
890 they are specific for usage in MQ.
890 they are specific for usage in MQ.
891 """
891 """
892 if edit or finishdesc or extramsg:
892 if edit or finishdesc or extramsg:
893 return lambda r, c, s: commitforceeditor(r, c, s,
893 return lambda r, c, s: commitforceeditor(r, c, s,
894 finishdesc=finishdesc,
894 finishdesc=finishdesc,
895 extramsg=extramsg,
895 extramsg=extramsg,
896 editform=editform)
896 editform=editform)
897 elif editform:
897 elif editform:
898 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
898 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
899 else:
899 else:
900 return commiteditor
900 return commiteditor
901
901
902 def rendertemplate(ctx, tmpl, props=None):
902 def rendertemplate(ctx, tmpl, props=None):
903 """Expand a literal template 'tmpl' byte-string against one changeset
903 """Expand a literal template 'tmpl' byte-string against one changeset
904
904
905 Each props item must be a stringify-able value or a callable returning
905 Each props item must be a stringify-able value or a callable returning
906 such value, i.e. no bare list nor dict should be passed.
906 such value, i.e. no bare list nor dict should be passed.
907 """
907 """
908 repo = ctx.repo()
908 repo = ctx.repo()
909 tres = formatter.templateresources(repo.ui, repo)
909 tres = formatter.templateresources(repo.ui, repo)
910 t = formatter.maketemplater(repo.ui, tmpl, defaults=templatekw.keywords,
910 t = formatter.maketemplater(repo.ui, tmpl, defaults=templatekw.keywords,
911 resources=tres)
911 resources=tres)
912 mapping = {'ctx': ctx}
912 mapping = {'ctx': ctx}
913 if props:
913 if props:
914 mapping.update(props)
914 mapping.update(props)
915 return t.renderdefault(mapping)
915 return t.renderdefault(mapping)
916
916
917 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
917 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
918 r"""Convert old-style filename format string to template string
918 r"""Convert old-style filename format string to template string
919
919
920 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
920 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
921 'foo-{reporoot|basename}-{seqno}.patch'
921 'foo-{reporoot|basename}-{seqno}.patch'
922 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
922 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
923 '{rev}{tags % "{tag}"}{node}'
923 '{rev}{tags % "{tag}"}{node}'
924
924
925 '\' in outermost strings has to be escaped because it is a directory
925 '\' in outermost strings has to be escaped because it is a directory
926 separator on Windows:
926 separator on Windows:
927
927
928 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
928 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
929 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
929 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
930 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
930 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
931 '\\\\\\\\foo\\\\bar.patch'
931 '\\\\\\\\foo\\\\bar.patch'
932 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
932 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
933 '\\\\{tags % "{tag}"}'
933 '\\\\{tags % "{tag}"}'
934
934
935 but inner strings follow the template rules (i.e. '\' is taken as an
935 but inner strings follow the template rules (i.e. '\' is taken as an
936 escape character):
936 escape character):
937
937
938 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
938 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
939 '{"c:\\tmp"}'
939 '{"c:\\tmp"}'
940 """
940 """
941 expander = {
941 expander = {
942 b'H': b'{node}',
942 b'H': b'{node}',
943 b'R': b'{rev}',
943 b'R': b'{rev}',
944 b'h': b'{node|short}',
944 b'h': b'{node|short}',
945 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
945 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
946 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
946 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
947 b'%': b'%',
947 b'%': b'%',
948 b'b': b'{reporoot|basename}',
948 b'b': b'{reporoot|basename}',
949 }
949 }
950 if total is not None:
950 if total is not None:
951 expander[b'N'] = b'{total}'
951 expander[b'N'] = b'{total}'
952 if seqno is not None:
952 if seqno is not None:
953 expander[b'n'] = b'{seqno}'
953 expander[b'n'] = b'{seqno}'
954 if total is not None and seqno is not None:
954 if total is not None and seqno is not None:
955 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
955 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
956 if pathname is not None:
956 if pathname is not None:
957 expander[b's'] = b'{pathname|basename}'
957 expander[b's'] = b'{pathname|basename}'
958 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
958 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
959 expander[b'p'] = b'{pathname}'
959 expander[b'p'] = b'{pathname}'
960
960
961 newname = []
961 newname = []
962 for typ, start, end in templater.scantemplate(pat, raw=True):
962 for typ, start, end in templater.scantemplate(pat, raw=True):
963 if typ != b'string':
963 if typ != b'string':
964 newname.append(pat[start:end])
964 newname.append(pat[start:end])
965 continue
965 continue
966 i = start
966 i = start
967 while i < end:
967 while i < end:
968 n = pat.find(b'%', i, end)
968 n = pat.find(b'%', i, end)
969 if n < 0:
969 if n < 0:
970 newname.append(stringutil.escapestr(pat[i:end]))
970 newname.append(stringutil.escapestr(pat[i:end]))
971 break
971 break
972 newname.append(stringutil.escapestr(pat[i:n]))
972 newname.append(stringutil.escapestr(pat[i:n]))
973 if n + 2 > end:
973 if n + 2 > end:
974 raise error.Abort(_("incomplete format spec in output "
974 raise error.Abort(_("incomplete format spec in output "
975 "filename"))
975 "filename"))
976 c = pat[n + 1:n + 2]
976 c = pat[n + 1:n + 2]
977 i = n + 2
977 i = n + 2
978 try:
978 try:
979 newname.append(expander[c])
979 newname.append(expander[c])
980 except KeyError:
980 except KeyError:
981 raise error.Abort(_("invalid format spec '%%%s' in output "
981 raise error.Abort(_("invalid format spec '%%%s' in output "
982 "filename") % c)
982 "filename") % c)
983 return ''.join(newname)
983 return ''.join(newname)
984
984
985 def makefilename(ctx, pat, **props):
985 def makefilename(ctx, pat, **props):
986 if not pat:
986 if not pat:
987 return pat
987 return pat
988 tmpl = _buildfntemplate(pat, **props)
988 tmpl = _buildfntemplate(pat, **props)
989 # BUG: alias expansion shouldn't be made against template fragments
989 # BUG: alias expansion shouldn't be made against template fragments
990 # rewritten from %-format strings, but we have no easy way to partially
990 # rewritten from %-format strings, but we have no easy way to partially
991 # disable the expansion.
991 # disable the expansion.
992 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
992 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
993
993
994 def isstdiofilename(pat):
994 def isstdiofilename(pat):
995 """True if the given pat looks like a filename denoting stdin/stdout"""
995 """True if the given pat looks like a filename denoting stdin/stdout"""
996 return not pat or pat == '-'
996 return not pat or pat == '-'
997
997
998 class _unclosablefile(object):
998 class _unclosablefile(object):
999 def __init__(self, fp):
999 def __init__(self, fp):
1000 self._fp = fp
1000 self._fp = fp
1001
1001
1002 def close(self):
1002 def close(self):
1003 pass
1003 pass
1004
1004
1005 def __iter__(self):
1005 def __iter__(self):
1006 return iter(self._fp)
1006 return iter(self._fp)
1007
1007
1008 def __getattr__(self, attr):
1008 def __getattr__(self, attr):
1009 return getattr(self._fp, attr)
1009 return getattr(self._fp, attr)
1010
1010
1011 def __enter__(self):
1011 def __enter__(self):
1012 return self
1012 return self
1013
1013
1014 def __exit__(self, exc_type, exc_value, exc_tb):
1014 def __exit__(self, exc_type, exc_value, exc_tb):
1015 pass
1015 pass
1016
1016
1017 def makefileobj(ctx, pat, mode='wb', modemap=None, **props):
1017 def makefileobj(ctx, pat, mode='wb', modemap=None, **props):
1018 writable = mode not in ('r', 'rb')
1018 writable = mode not in ('r', 'rb')
1019
1019
1020 if isstdiofilename(pat):
1020 if isstdiofilename(pat):
1021 repo = ctx.repo()
1021 repo = ctx.repo()
1022 if writable:
1022 if writable:
1023 fp = repo.ui.fout
1023 fp = repo.ui.fout
1024 else:
1024 else:
1025 fp = repo.ui.fin
1025 fp = repo.ui.fin
1026 return _unclosablefile(fp)
1026 return _unclosablefile(fp)
1027 fn = makefilename(ctx, pat, **props)
1027 fn = makefilename(ctx, pat, **props)
1028 if modemap is not None:
1028 if modemap is not None:
1029 mode = modemap.get(fn, mode)
1029 mode = modemap.get(fn, mode)
1030 if mode == 'wb':
1030 if mode == 'wb':
1031 modemap[fn] = 'ab'
1031 modemap[fn] = 'ab'
1032 return open(fn, mode)
1032 return open(fn, mode)
1033
1033
1034 def openrevlog(repo, cmd, file_, opts):
1034 def openrevlog(repo, cmd, file_, opts):
1035 """opens the changelog, manifest, a filelog or a given revlog"""
1035 """opens the changelog, manifest, a filelog or a given revlog"""
1036 cl = opts['changelog']
1036 cl = opts['changelog']
1037 mf = opts['manifest']
1037 mf = opts['manifest']
1038 dir = opts['dir']
1038 dir = opts['dir']
1039 msg = None
1039 msg = None
1040 if cl and mf:
1040 if cl and mf:
1041 msg = _('cannot specify --changelog and --manifest at the same time')
1041 msg = _('cannot specify --changelog and --manifest at the same time')
1042 elif cl and dir:
1042 elif cl and dir:
1043 msg = _('cannot specify --changelog and --dir at the same time')
1043 msg = _('cannot specify --changelog and --dir at the same time')
1044 elif cl or mf or dir:
1044 elif cl or mf or dir:
1045 if file_:
1045 if file_:
1046 msg = _('cannot specify filename with --changelog or --manifest')
1046 msg = _('cannot specify filename with --changelog or --manifest')
1047 elif not repo:
1047 elif not repo:
1048 msg = _('cannot specify --changelog or --manifest or --dir '
1048 msg = _('cannot specify --changelog or --manifest or --dir '
1049 'without a repository')
1049 'without a repository')
1050 if msg:
1050 if msg:
1051 raise error.Abort(msg)
1051 raise error.Abort(msg)
1052
1052
1053 r = None
1053 r = None
1054 if repo:
1054 if repo:
1055 if cl:
1055 if cl:
1056 r = repo.unfiltered().changelog
1056 r = repo.unfiltered().changelog
1057 elif dir:
1057 elif dir:
1058 if 'treemanifest' not in repo.requirements:
1058 if 'treemanifest' not in repo.requirements:
1059 raise error.Abort(_("--dir can only be used on repos with "
1059 raise error.Abort(_("--dir can only be used on repos with "
1060 "treemanifest enabled"))
1060 "treemanifest enabled"))
1061 if not dir.endswith('/'):
1062 dir = dir + '/'
1061 dirlog = repo.manifestlog._revlog.dirlog(dir)
1063 dirlog = repo.manifestlog._revlog.dirlog(dir)
1062 if len(dirlog):
1064 if len(dirlog):
1063 r = dirlog
1065 r = dirlog
1064 elif mf:
1066 elif mf:
1065 r = repo.manifestlog._revlog
1067 r = repo.manifestlog._revlog
1066 elif file_:
1068 elif file_:
1067 filelog = repo.file(file_)
1069 filelog = repo.file(file_)
1068 if len(filelog):
1070 if len(filelog):
1069 r = filelog
1071 r = filelog
1070 if not r:
1072 if not r:
1071 if not file_:
1073 if not file_:
1072 raise error.CommandError(cmd, _('invalid arguments'))
1074 raise error.CommandError(cmd, _('invalid arguments'))
1073 if not os.path.isfile(file_):
1075 if not os.path.isfile(file_):
1074 raise error.Abort(_("revlog '%s' not found") % file_)
1076 raise error.Abort(_("revlog '%s' not found") % file_)
1075 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
1077 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
1076 file_[:-2] + ".i")
1078 file_[:-2] + ".i")
1077 return r
1079 return r
1078
1080
1079 def copy(ui, repo, pats, opts, rename=False):
1081 def copy(ui, repo, pats, opts, rename=False):
1080 # called with the repo lock held
1082 # called with the repo lock held
1081 #
1083 #
1082 # hgsep => pathname that uses "/" to separate directories
1084 # hgsep => pathname that uses "/" to separate directories
1083 # ossep => pathname that uses os.sep to separate directories
1085 # ossep => pathname that uses os.sep to separate directories
1084 cwd = repo.getcwd()
1086 cwd = repo.getcwd()
1085 targets = {}
1087 targets = {}
1086 after = opts.get("after")
1088 after = opts.get("after")
1087 dryrun = opts.get("dry_run")
1089 dryrun = opts.get("dry_run")
1088 wctx = repo[None]
1090 wctx = repo[None]
1089
1091
1090 def walkpat(pat):
1092 def walkpat(pat):
1091 srcs = []
1093 srcs = []
1092 if after:
1094 if after:
1093 badstates = '?'
1095 badstates = '?'
1094 else:
1096 else:
1095 badstates = '?r'
1097 badstates = '?r'
1096 m = scmutil.match(wctx, [pat], opts, globbed=True)
1098 m = scmutil.match(wctx, [pat], opts, globbed=True)
1097 for abs in wctx.walk(m):
1099 for abs in wctx.walk(m):
1098 state = repo.dirstate[abs]
1100 state = repo.dirstate[abs]
1099 rel = m.rel(abs)
1101 rel = m.rel(abs)
1100 exact = m.exact(abs)
1102 exact = m.exact(abs)
1101 if state in badstates:
1103 if state in badstates:
1102 if exact and state == '?':
1104 if exact and state == '?':
1103 ui.warn(_('%s: not copying - file is not managed\n') % rel)
1105 ui.warn(_('%s: not copying - file is not managed\n') % rel)
1104 if exact and state == 'r':
1106 if exact and state == 'r':
1105 ui.warn(_('%s: not copying - file has been marked for'
1107 ui.warn(_('%s: not copying - file has been marked for'
1106 ' remove\n') % rel)
1108 ' remove\n') % rel)
1107 continue
1109 continue
1108 # abs: hgsep
1110 # abs: hgsep
1109 # rel: ossep
1111 # rel: ossep
1110 srcs.append((abs, rel, exact))
1112 srcs.append((abs, rel, exact))
1111 return srcs
1113 return srcs
1112
1114
1113 # abssrc: hgsep
1115 # abssrc: hgsep
1114 # relsrc: ossep
1116 # relsrc: ossep
1115 # otarget: ossep
1117 # otarget: ossep
1116 def copyfile(abssrc, relsrc, otarget, exact):
1118 def copyfile(abssrc, relsrc, otarget, exact):
1117 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1119 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1118 if '/' in abstarget:
1120 if '/' in abstarget:
1119 # We cannot normalize abstarget itself, this would prevent
1121 # We cannot normalize abstarget itself, this would prevent
1120 # case only renames, like a => A.
1122 # case only renames, like a => A.
1121 abspath, absname = abstarget.rsplit('/', 1)
1123 abspath, absname = abstarget.rsplit('/', 1)
1122 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
1124 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
1123 reltarget = repo.pathto(abstarget, cwd)
1125 reltarget = repo.pathto(abstarget, cwd)
1124 target = repo.wjoin(abstarget)
1126 target = repo.wjoin(abstarget)
1125 src = repo.wjoin(abssrc)
1127 src = repo.wjoin(abssrc)
1126 state = repo.dirstate[abstarget]
1128 state = repo.dirstate[abstarget]
1127
1129
1128 scmutil.checkportable(ui, abstarget)
1130 scmutil.checkportable(ui, abstarget)
1129
1131
1130 # check for collisions
1132 # check for collisions
1131 prevsrc = targets.get(abstarget)
1133 prevsrc = targets.get(abstarget)
1132 if prevsrc is not None:
1134 if prevsrc is not None:
1133 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1135 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1134 (reltarget, repo.pathto(abssrc, cwd),
1136 (reltarget, repo.pathto(abssrc, cwd),
1135 repo.pathto(prevsrc, cwd)))
1137 repo.pathto(prevsrc, cwd)))
1136 return
1138 return
1137
1139
1138 # check for overwrites
1140 # check for overwrites
1139 exists = os.path.lexists(target)
1141 exists = os.path.lexists(target)
1140 samefile = False
1142 samefile = False
1141 if exists and abssrc != abstarget:
1143 if exists and abssrc != abstarget:
1142 if (repo.dirstate.normalize(abssrc) ==
1144 if (repo.dirstate.normalize(abssrc) ==
1143 repo.dirstate.normalize(abstarget)):
1145 repo.dirstate.normalize(abstarget)):
1144 if not rename:
1146 if not rename:
1145 ui.warn(_("%s: can't copy - same file\n") % reltarget)
1147 ui.warn(_("%s: can't copy - same file\n") % reltarget)
1146 return
1148 return
1147 exists = False
1149 exists = False
1148 samefile = True
1150 samefile = True
1149
1151
1150 if not after and exists or after and state in 'mn':
1152 if not after and exists or after and state in 'mn':
1151 if not opts['force']:
1153 if not opts['force']:
1152 if state in 'mn':
1154 if state in 'mn':
1153 msg = _('%s: not overwriting - file already committed\n')
1155 msg = _('%s: not overwriting - file already committed\n')
1154 if after:
1156 if after:
1155 flags = '--after --force'
1157 flags = '--after --force'
1156 else:
1158 else:
1157 flags = '--force'
1159 flags = '--force'
1158 if rename:
1160 if rename:
1159 hint = _('(hg rename %s to replace the file by '
1161 hint = _('(hg rename %s to replace the file by '
1160 'recording a rename)\n') % flags
1162 'recording a rename)\n') % flags
1161 else:
1163 else:
1162 hint = _('(hg copy %s to replace the file by '
1164 hint = _('(hg copy %s to replace the file by '
1163 'recording a copy)\n') % flags
1165 'recording a copy)\n') % flags
1164 else:
1166 else:
1165 msg = _('%s: not overwriting - file exists\n')
1167 msg = _('%s: not overwriting - file exists\n')
1166 if rename:
1168 if rename:
1167 hint = _('(hg rename --after to record the rename)\n')
1169 hint = _('(hg rename --after to record the rename)\n')
1168 else:
1170 else:
1169 hint = _('(hg copy --after to record the copy)\n')
1171 hint = _('(hg copy --after to record the copy)\n')
1170 ui.warn(msg % reltarget)
1172 ui.warn(msg % reltarget)
1171 ui.warn(hint)
1173 ui.warn(hint)
1172 return
1174 return
1173
1175
1174 if after:
1176 if after:
1175 if not exists:
1177 if not exists:
1176 if rename:
1178 if rename:
1177 ui.warn(_('%s: not recording move - %s does not exist\n') %
1179 ui.warn(_('%s: not recording move - %s does not exist\n') %
1178 (relsrc, reltarget))
1180 (relsrc, reltarget))
1179 else:
1181 else:
1180 ui.warn(_('%s: not recording copy - %s does not exist\n') %
1182 ui.warn(_('%s: not recording copy - %s does not exist\n') %
1181 (relsrc, reltarget))
1183 (relsrc, reltarget))
1182 return
1184 return
1183 elif not dryrun:
1185 elif not dryrun:
1184 try:
1186 try:
1185 if exists:
1187 if exists:
1186 os.unlink(target)
1188 os.unlink(target)
1187 targetdir = os.path.dirname(target) or '.'
1189 targetdir = os.path.dirname(target) or '.'
1188 if not os.path.isdir(targetdir):
1190 if not os.path.isdir(targetdir):
1189 os.makedirs(targetdir)
1191 os.makedirs(targetdir)
1190 if samefile:
1192 if samefile:
1191 tmp = target + "~hgrename"
1193 tmp = target + "~hgrename"
1192 os.rename(src, tmp)
1194 os.rename(src, tmp)
1193 os.rename(tmp, target)
1195 os.rename(tmp, target)
1194 else:
1196 else:
1195 # Preserve stat info on renames, not on copies; this matches
1197 # Preserve stat info on renames, not on copies; this matches
1196 # Linux CLI behavior.
1198 # Linux CLI behavior.
1197 util.copyfile(src, target, copystat=rename)
1199 util.copyfile(src, target, copystat=rename)
1198 srcexists = True
1200 srcexists = True
1199 except IOError as inst:
1201 except IOError as inst:
1200 if inst.errno == errno.ENOENT:
1202 if inst.errno == errno.ENOENT:
1201 ui.warn(_('%s: deleted in working directory\n') % relsrc)
1203 ui.warn(_('%s: deleted in working directory\n') % relsrc)
1202 srcexists = False
1204 srcexists = False
1203 else:
1205 else:
1204 ui.warn(_('%s: cannot copy - %s\n') %
1206 ui.warn(_('%s: cannot copy - %s\n') %
1205 (relsrc, encoding.strtolocal(inst.strerror)))
1207 (relsrc, encoding.strtolocal(inst.strerror)))
1206 return True # report a failure
1208 return True # report a failure
1207
1209
1208 if ui.verbose or not exact:
1210 if ui.verbose or not exact:
1209 if rename:
1211 if rename:
1210 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1212 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1211 else:
1213 else:
1212 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1214 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1213
1215
1214 targets[abstarget] = abssrc
1216 targets[abstarget] = abssrc
1215
1217
1216 # fix up dirstate
1218 # fix up dirstate
1217 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1219 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1218 dryrun=dryrun, cwd=cwd)
1220 dryrun=dryrun, cwd=cwd)
1219 if rename and not dryrun:
1221 if rename and not dryrun:
1220 if not after and srcexists and not samefile:
1222 if not after and srcexists and not samefile:
1221 repo.wvfs.unlinkpath(abssrc)
1223 repo.wvfs.unlinkpath(abssrc)
1222 wctx.forget([abssrc])
1224 wctx.forget([abssrc])
1223
1225
1224 # pat: ossep
1226 # pat: ossep
1225 # dest ossep
1227 # dest ossep
1226 # srcs: list of (hgsep, hgsep, ossep, bool)
1228 # srcs: list of (hgsep, hgsep, ossep, bool)
1227 # return: function that takes hgsep and returns ossep
1229 # return: function that takes hgsep and returns ossep
1228 def targetpathfn(pat, dest, srcs):
1230 def targetpathfn(pat, dest, srcs):
1229 if os.path.isdir(pat):
1231 if os.path.isdir(pat):
1230 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1232 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1231 abspfx = util.localpath(abspfx)
1233 abspfx = util.localpath(abspfx)
1232 if destdirexists:
1234 if destdirexists:
1233 striplen = len(os.path.split(abspfx)[0])
1235 striplen = len(os.path.split(abspfx)[0])
1234 else:
1236 else:
1235 striplen = len(abspfx)
1237 striplen = len(abspfx)
1236 if striplen:
1238 if striplen:
1237 striplen += len(pycompat.ossep)
1239 striplen += len(pycompat.ossep)
1238 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1240 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1239 elif destdirexists:
1241 elif destdirexists:
1240 res = lambda p: os.path.join(dest,
1242 res = lambda p: os.path.join(dest,
1241 os.path.basename(util.localpath(p)))
1243 os.path.basename(util.localpath(p)))
1242 else:
1244 else:
1243 res = lambda p: dest
1245 res = lambda p: dest
1244 return res
1246 return res
1245
1247
1246 # pat: ossep
1248 # pat: ossep
1247 # dest ossep
1249 # dest ossep
1248 # srcs: list of (hgsep, hgsep, ossep, bool)
1250 # srcs: list of (hgsep, hgsep, ossep, bool)
1249 # return: function that takes hgsep and returns ossep
1251 # return: function that takes hgsep and returns ossep
1250 def targetpathafterfn(pat, dest, srcs):
1252 def targetpathafterfn(pat, dest, srcs):
1251 if matchmod.patkind(pat):
1253 if matchmod.patkind(pat):
1252 # a mercurial pattern
1254 # a mercurial pattern
1253 res = lambda p: os.path.join(dest,
1255 res = lambda p: os.path.join(dest,
1254 os.path.basename(util.localpath(p)))
1256 os.path.basename(util.localpath(p)))
1255 else:
1257 else:
1256 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1258 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1257 if len(abspfx) < len(srcs[0][0]):
1259 if len(abspfx) < len(srcs[0][0]):
1258 # A directory. Either the target path contains the last
1260 # A directory. Either the target path contains the last
1259 # component of the source path or it does not.
1261 # component of the source path or it does not.
1260 def evalpath(striplen):
1262 def evalpath(striplen):
1261 score = 0
1263 score = 0
1262 for s in srcs:
1264 for s in srcs:
1263 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1265 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1264 if os.path.lexists(t):
1266 if os.path.lexists(t):
1265 score += 1
1267 score += 1
1266 return score
1268 return score
1267
1269
1268 abspfx = util.localpath(abspfx)
1270 abspfx = util.localpath(abspfx)
1269 striplen = len(abspfx)
1271 striplen = len(abspfx)
1270 if striplen:
1272 if striplen:
1271 striplen += len(pycompat.ossep)
1273 striplen += len(pycompat.ossep)
1272 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1274 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1273 score = evalpath(striplen)
1275 score = evalpath(striplen)
1274 striplen1 = len(os.path.split(abspfx)[0])
1276 striplen1 = len(os.path.split(abspfx)[0])
1275 if striplen1:
1277 if striplen1:
1276 striplen1 += len(pycompat.ossep)
1278 striplen1 += len(pycompat.ossep)
1277 if evalpath(striplen1) > score:
1279 if evalpath(striplen1) > score:
1278 striplen = striplen1
1280 striplen = striplen1
1279 res = lambda p: os.path.join(dest,
1281 res = lambda p: os.path.join(dest,
1280 util.localpath(p)[striplen:])
1282 util.localpath(p)[striplen:])
1281 else:
1283 else:
1282 # a file
1284 # a file
1283 if destdirexists:
1285 if destdirexists:
1284 res = lambda p: os.path.join(dest,
1286 res = lambda p: os.path.join(dest,
1285 os.path.basename(util.localpath(p)))
1287 os.path.basename(util.localpath(p)))
1286 else:
1288 else:
1287 res = lambda p: dest
1289 res = lambda p: dest
1288 return res
1290 return res
1289
1291
1290 pats = scmutil.expandpats(pats)
1292 pats = scmutil.expandpats(pats)
1291 if not pats:
1293 if not pats:
1292 raise error.Abort(_('no source or destination specified'))
1294 raise error.Abort(_('no source or destination specified'))
1293 if len(pats) == 1:
1295 if len(pats) == 1:
1294 raise error.Abort(_('no destination specified'))
1296 raise error.Abort(_('no destination specified'))
1295 dest = pats.pop()
1297 dest = pats.pop()
1296 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1298 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1297 if not destdirexists:
1299 if not destdirexists:
1298 if len(pats) > 1 or matchmod.patkind(pats[0]):
1300 if len(pats) > 1 or matchmod.patkind(pats[0]):
1299 raise error.Abort(_('with multiple sources, destination must be an '
1301 raise error.Abort(_('with multiple sources, destination must be an '
1300 'existing directory'))
1302 'existing directory'))
1301 if util.endswithsep(dest):
1303 if util.endswithsep(dest):
1302 raise error.Abort(_('destination %s is not a directory') % dest)
1304 raise error.Abort(_('destination %s is not a directory') % dest)
1303
1305
1304 tfn = targetpathfn
1306 tfn = targetpathfn
1305 if after:
1307 if after:
1306 tfn = targetpathafterfn
1308 tfn = targetpathafterfn
1307 copylist = []
1309 copylist = []
1308 for pat in pats:
1310 for pat in pats:
1309 srcs = walkpat(pat)
1311 srcs = walkpat(pat)
1310 if not srcs:
1312 if not srcs:
1311 continue
1313 continue
1312 copylist.append((tfn(pat, dest, srcs), srcs))
1314 copylist.append((tfn(pat, dest, srcs), srcs))
1313 if not copylist:
1315 if not copylist:
1314 raise error.Abort(_('no files to copy'))
1316 raise error.Abort(_('no files to copy'))
1315
1317
1316 errors = 0
1318 errors = 0
1317 for targetpath, srcs in copylist:
1319 for targetpath, srcs in copylist:
1318 for abssrc, relsrc, exact in srcs:
1320 for abssrc, relsrc, exact in srcs:
1319 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1321 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1320 errors += 1
1322 errors += 1
1321
1323
1322 if errors:
1324 if errors:
1323 ui.warn(_('(consider using --after)\n'))
1325 ui.warn(_('(consider using --after)\n'))
1324
1326
1325 return errors != 0
1327 return errors != 0
1326
1328
1327 ## facility to let extension process additional data into an import patch
1329 ## facility to let extension process additional data into an import patch
1328 # list of identifier to be executed in order
1330 # list of identifier to be executed in order
1329 extrapreimport = [] # run before commit
1331 extrapreimport = [] # run before commit
1330 extrapostimport = [] # run after commit
1332 extrapostimport = [] # run after commit
1331 # mapping from identifier to actual import function
1333 # mapping from identifier to actual import function
1332 #
1334 #
1333 # 'preimport' are run before the commit is made and are provided the following
1335 # 'preimport' are run before the commit is made and are provided the following
1334 # arguments:
1336 # arguments:
1335 # - repo: the localrepository instance,
1337 # - repo: the localrepository instance,
1336 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1338 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1337 # - extra: the future extra dictionary of the changeset, please mutate it,
1339 # - extra: the future extra dictionary of the changeset, please mutate it,
1338 # - opts: the import options.
1340 # - opts: the import options.
1339 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1341 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1340 # mutation of in memory commit and more. Feel free to rework the code to get
1342 # mutation of in memory commit and more. Feel free to rework the code to get
1341 # there.
1343 # there.
1342 extrapreimportmap = {}
1344 extrapreimportmap = {}
1343 # 'postimport' are run after the commit is made and are provided the following
1345 # 'postimport' are run after the commit is made and are provided the following
1344 # argument:
1346 # argument:
1345 # - ctx: the changectx created by import.
1347 # - ctx: the changectx created by import.
1346 extrapostimportmap = {}
1348 extrapostimportmap = {}
1347
1349
1348 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
1350 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
1349 """Utility function used by commands.import to import a single patch
1351 """Utility function used by commands.import to import a single patch
1350
1352
1351 This function is explicitly defined here to help the evolve extension to
1353 This function is explicitly defined here to help the evolve extension to
1352 wrap this part of the import logic.
1354 wrap this part of the import logic.
1353
1355
1354 The API is currently a bit ugly because it a simple code translation from
1356 The API is currently a bit ugly because it a simple code translation from
1355 the import command. Feel free to make it better.
1357 the import command. Feel free to make it better.
1356
1358
1357 :hunk: a patch (as a binary string)
1359 :hunk: a patch (as a binary string)
1358 :parents: nodes that will be parent of the created commit
1360 :parents: nodes that will be parent of the created commit
1359 :opts: the full dict of option passed to the import command
1361 :opts: the full dict of option passed to the import command
1360 :msgs: list to save commit message to.
1362 :msgs: list to save commit message to.
1361 (used in case we need to save it when failing)
1363 (used in case we need to save it when failing)
1362 :updatefunc: a function that update a repo to a given node
1364 :updatefunc: a function that update a repo to a given node
1363 updatefunc(<repo>, <node>)
1365 updatefunc(<repo>, <node>)
1364 """
1366 """
1365 # avoid cycle context -> subrepo -> cmdutil
1367 # avoid cycle context -> subrepo -> cmdutil
1366 from . import context
1368 from . import context
1367 extractdata = patch.extract(ui, hunk)
1369 extractdata = patch.extract(ui, hunk)
1368 tmpname = extractdata.get('filename')
1370 tmpname = extractdata.get('filename')
1369 message = extractdata.get('message')
1371 message = extractdata.get('message')
1370 user = opts.get('user') or extractdata.get('user')
1372 user = opts.get('user') or extractdata.get('user')
1371 date = opts.get('date') or extractdata.get('date')
1373 date = opts.get('date') or extractdata.get('date')
1372 branch = extractdata.get('branch')
1374 branch = extractdata.get('branch')
1373 nodeid = extractdata.get('nodeid')
1375 nodeid = extractdata.get('nodeid')
1374 p1 = extractdata.get('p1')
1376 p1 = extractdata.get('p1')
1375 p2 = extractdata.get('p2')
1377 p2 = extractdata.get('p2')
1376
1378
1377 nocommit = opts.get('no_commit')
1379 nocommit = opts.get('no_commit')
1378 importbranch = opts.get('import_branch')
1380 importbranch = opts.get('import_branch')
1379 update = not opts.get('bypass')
1381 update = not opts.get('bypass')
1380 strip = opts["strip"]
1382 strip = opts["strip"]
1381 prefix = opts["prefix"]
1383 prefix = opts["prefix"]
1382 sim = float(opts.get('similarity') or 0)
1384 sim = float(opts.get('similarity') or 0)
1383 if not tmpname:
1385 if not tmpname:
1384 return (None, None, False)
1386 return (None, None, False)
1385
1387
1386 rejects = False
1388 rejects = False
1387
1389
1388 try:
1390 try:
1389 cmdline_message = logmessage(ui, opts)
1391 cmdline_message = logmessage(ui, opts)
1390 if cmdline_message:
1392 if cmdline_message:
1391 # pickup the cmdline msg
1393 # pickup the cmdline msg
1392 message = cmdline_message
1394 message = cmdline_message
1393 elif message:
1395 elif message:
1394 # pickup the patch msg
1396 # pickup the patch msg
1395 message = message.strip()
1397 message = message.strip()
1396 else:
1398 else:
1397 # launch the editor
1399 # launch the editor
1398 message = None
1400 message = None
1399 ui.debug('message:\n%s\n' % message)
1401 ui.debug('message:\n%s\n' % message)
1400
1402
1401 if len(parents) == 1:
1403 if len(parents) == 1:
1402 parents.append(repo[nullid])
1404 parents.append(repo[nullid])
1403 if opts.get('exact'):
1405 if opts.get('exact'):
1404 if not nodeid or not p1:
1406 if not nodeid or not p1:
1405 raise error.Abort(_('not a Mercurial patch'))
1407 raise error.Abort(_('not a Mercurial patch'))
1406 p1 = repo[p1]
1408 p1 = repo[p1]
1407 p2 = repo[p2 or nullid]
1409 p2 = repo[p2 or nullid]
1408 elif p2:
1410 elif p2:
1409 try:
1411 try:
1410 p1 = repo[p1]
1412 p1 = repo[p1]
1411 p2 = repo[p2]
1413 p2 = repo[p2]
1412 # Without any options, consider p2 only if the
1414 # Without any options, consider p2 only if the
1413 # patch is being applied on top of the recorded
1415 # patch is being applied on top of the recorded
1414 # first parent.
1416 # first parent.
1415 if p1 != parents[0]:
1417 if p1 != parents[0]:
1416 p1 = parents[0]
1418 p1 = parents[0]
1417 p2 = repo[nullid]
1419 p2 = repo[nullid]
1418 except error.RepoError:
1420 except error.RepoError:
1419 p1, p2 = parents
1421 p1, p2 = parents
1420 if p2.node() == nullid:
1422 if p2.node() == nullid:
1421 ui.warn(_("warning: import the patch as a normal revision\n"
1423 ui.warn(_("warning: import the patch as a normal revision\n"
1422 "(use --exact to import the patch as a merge)\n"))
1424 "(use --exact to import the patch as a merge)\n"))
1423 else:
1425 else:
1424 p1, p2 = parents
1426 p1, p2 = parents
1425
1427
1426 n = None
1428 n = None
1427 if update:
1429 if update:
1428 if p1 != parents[0]:
1430 if p1 != parents[0]:
1429 updatefunc(repo, p1.node())
1431 updatefunc(repo, p1.node())
1430 if p2 != parents[1]:
1432 if p2 != parents[1]:
1431 repo.setparents(p1.node(), p2.node())
1433 repo.setparents(p1.node(), p2.node())
1432
1434
1433 if opts.get('exact') or importbranch:
1435 if opts.get('exact') or importbranch:
1434 repo.dirstate.setbranch(branch or 'default')
1436 repo.dirstate.setbranch(branch or 'default')
1435
1437
1436 partial = opts.get('partial', False)
1438 partial = opts.get('partial', False)
1437 files = set()
1439 files = set()
1438 try:
1440 try:
1439 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1441 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1440 files=files, eolmode=None, similarity=sim / 100.0)
1442 files=files, eolmode=None, similarity=sim / 100.0)
1441 except error.PatchError as e:
1443 except error.PatchError as e:
1442 if not partial:
1444 if not partial:
1443 raise error.Abort(pycompat.bytestr(e))
1445 raise error.Abort(pycompat.bytestr(e))
1444 if partial:
1446 if partial:
1445 rejects = True
1447 rejects = True
1446
1448
1447 files = list(files)
1449 files = list(files)
1448 if nocommit:
1450 if nocommit:
1449 if message:
1451 if message:
1450 msgs.append(message)
1452 msgs.append(message)
1451 else:
1453 else:
1452 if opts.get('exact') or p2:
1454 if opts.get('exact') or p2:
1453 # If you got here, you either use --force and know what
1455 # If you got here, you either use --force and know what
1454 # you are doing or used --exact or a merge patch while
1456 # you are doing or used --exact or a merge patch while
1455 # being updated to its first parent.
1457 # being updated to its first parent.
1456 m = None
1458 m = None
1457 else:
1459 else:
1458 m = scmutil.matchfiles(repo, files or [])
1460 m = scmutil.matchfiles(repo, files or [])
1459 editform = mergeeditform(repo[None], 'import.normal')
1461 editform = mergeeditform(repo[None], 'import.normal')
1460 if opts.get('exact'):
1462 if opts.get('exact'):
1461 editor = None
1463 editor = None
1462 else:
1464 else:
1463 editor = getcommiteditor(editform=editform,
1465 editor = getcommiteditor(editform=editform,
1464 **pycompat.strkwargs(opts))
1466 **pycompat.strkwargs(opts))
1465 extra = {}
1467 extra = {}
1466 for idfunc in extrapreimport:
1468 for idfunc in extrapreimport:
1467 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1469 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1468 overrides = {}
1470 overrides = {}
1469 if partial:
1471 if partial:
1470 overrides[('ui', 'allowemptycommit')] = True
1472 overrides[('ui', 'allowemptycommit')] = True
1471 with repo.ui.configoverride(overrides, 'import'):
1473 with repo.ui.configoverride(overrides, 'import'):
1472 n = repo.commit(message, user,
1474 n = repo.commit(message, user,
1473 date, match=m,
1475 date, match=m,
1474 editor=editor, extra=extra)
1476 editor=editor, extra=extra)
1475 for idfunc in extrapostimport:
1477 for idfunc in extrapostimport:
1476 extrapostimportmap[idfunc](repo[n])
1478 extrapostimportmap[idfunc](repo[n])
1477 else:
1479 else:
1478 if opts.get('exact') or importbranch:
1480 if opts.get('exact') or importbranch:
1479 branch = branch or 'default'
1481 branch = branch or 'default'
1480 else:
1482 else:
1481 branch = p1.branch()
1483 branch = p1.branch()
1482 store = patch.filestore()
1484 store = patch.filestore()
1483 try:
1485 try:
1484 files = set()
1486 files = set()
1485 try:
1487 try:
1486 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1488 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1487 files, eolmode=None)
1489 files, eolmode=None)
1488 except error.PatchError as e:
1490 except error.PatchError as e:
1489 raise error.Abort(stringutil.forcebytestr(e))
1491 raise error.Abort(stringutil.forcebytestr(e))
1490 if opts.get('exact'):
1492 if opts.get('exact'):
1491 editor = None
1493 editor = None
1492 else:
1494 else:
1493 editor = getcommiteditor(editform='import.bypass')
1495 editor = getcommiteditor(editform='import.bypass')
1494 memctx = context.memctx(repo, (p1.node(), p2.node()),
1496 memctx = context.memctx(repo, (p1.node(), p2.node()),
1495 message,
1497 message,
1496 files=files,
1498 files=files,
1497 filectxfn=store,
1499 filectxfn=store,
1498 user=user,
1500 user=user,
1499 date=date,
1501 date=date,
1500 branch=branch,
1502 branch=branch,
1501 editor=editor)
1503 editor=editor)
1502 n = memctx.commit()
1504 n = memctx.commit()
1503 finally:
1505 finally:
1504 store.close()
1506 store.close()
1505 if opts.get('exact') and nocommit:
1507 if opts.get('exact') and nocommit:
1506 # --exact with --no-commit is still useful in that it does merge
1508 # --exact with --no-commit is still useful in that it does merge
1507 # and branch bits
1509 # and branch bits
1508 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1510 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1509 elif opts.get('exact') and hex(n) != nodeid:
1511 elif opts.get('exact') and hex(n) != nodeid:
1510 raise error.Abort(_('patch is damaged or loses information'))
1512 raise error.Abort(_('patch is damaged or loses information'))
1511 msg = _('applied to working directory')
1513 msg = _('applied to working directory')
1512 if n:
1514 if n:
1513 # i18n: refers to a short changeset id
1515 # i18n: refers to a short changeset id
1514 msg = _('created %s') % short(n)
1516 msg = _('created %s') % short(n)
1515 return (msg, n, rejects)
1517 return (msg, n, rejects)
1516 finally:
1518 finally:
1517 os.unlink(tmpname)
1519 os.unlink(tmpname)
1518
1520
1519 # facility to let extensions include additional data in an exported patch
1521 # facility to let extensions include additional data in an exported patch
1520 # list of identifiers to be executed in order
1522 # list of identifiers to be executed in order
1521 extraexport = []
1523 extraexport = []
1522 # mapping from identifier to actual export function
1524 # mapping from identifier to actual export function
1523 # function as to return a string to be added to the header or None
1525 # function as to return a string to be added to the header or None
1524 # it is given two arguments (sequencenumber, changectx)
1526 # it is given two arguments (sequencenumber, changectx)
1525 extraexportmap = {}
1527 extraexportmap = {}
1526
1528
1527 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1529 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1528 node = scmutil.binnode(ctx)
1530 node = scmutil.binnode(ctx)
1529 parents = [p.node() for p in ctx.parents() if p]
1531 parents = [p.node() for p in ctx.parents() if p]
1530 branch = ctx.branch()
1532 branch = ctx.branch()
1531 if switch_parent:
1533 if switch_parent:
1532 parents.reverse()
1534 parents.reverse()
1533
1535
1534 if parents:
1536 if parents:
1535 prev = parents[0]
1537 prev = parents[0]
1536 else:
1538 else:
1537 prev = nullid
1539 prev = nullid
1538
1540
1539 write("# HG changeset patch\n")
1541 write("# HG changeset patch\n")
1540 write("# User %s\n" % ctx.user())
1542 write("# User %s\n" % ctx.user())
1541 write("# Date %d %d\n" % ctx.date())
1543 write("# Date %d %d\n" % ctx.date())
1542 write("# %s\n" % dateutil.datestr(ctx.date()))
1544 write("# %s\n" % dateutil.datestr(ctx.date()))
1543 if branch and branch != 'default':
1545 if branch and branch != 'default':
1544 write("# Branch %s\n" % branch)
1546 write("# Branch %s\n" % branch)
1545 write("# Node ID %s\n" % hex(node))
1547 write("# Node ID %s\n" % hex(node))
1546 write("# Parent %s\n" % hex(prev))
1548 write("# Parent %s\n" % hex(prev))
1547 if len(parents) > 1:
1549 if len(parents) > 1:
1548 write("# Parent %s\n" % hex(parents[1]))
1550 write("# Parent %s\n" % hex(parents[1]))
1549
1551
1550 for headerid in extraexport:
1552 for headerid in extraexport:
1551 header = extraexportmap[headerid](seqno, ctx)
1553 header = extraexportmap[headerid](seqno, ctx)
1552 if header is not None:
1554 if header is not None:
1553 write('# %s\n' % header)
1555 write('# %s\n' % header)
1554 write(ctx.description().rstrip())
1556 write(ctx.description().rstrip())
1555 write("\n\n")
1557 write("\n\n")
1556
1558
1557 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1559 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1558 write(chunk, label=label)
1560 write(chunk, label=label)
1559
1561
1560 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1562 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1561 opts=None, match=None):
1563 opts=None, match=None):
1562 '''export changesets as hg patches
1564 '''export changesets as hg patches
1563
1565
1564 Args:
1566 Args:
1565 repo: The repository from which we're exporting revisions.
1567 repo: The repository from which we're exporting revisions.
1566 revs: A list of revisions to export as revision numbers.
1568 revs: A list of revisions to export as revision numbers.
1567 fntemplate: An optional string to use for generating patch file names.
1569 fntemplate: An optional string to use for generating patch file names.
1568 fp: An optional file-like object to which patches should be written.
1570 fp: An optional file-like object to which patches should be written.
1569 switch_parent: If True, show diffs against second parent when not nullid.
1571 switch_parent: If True, show diffs against second parent when not nullid.
1570 Default is false, which always shows diff against p1.
1572 Default is false, which always shows diff against p1.
1571 opts: diff options to use for generating the patch.
1573 opts: diff options to use for generating the patch.
1572 match: If specified, only export changes to files matching this matcher.
1574 match: If specified, only export changes to files matching this matcher.
1573
1575
1574 Returns:
1576 Returns:
1575 Nothing.
1577 Nothing.
1576
1578
1577 Side Effect:
1579 Side Effect:
1578 "HG Changeset Patch" data is emitted to one of the following
1580 "HG Changeset Patch" data is emitted to one of the following
1579 destinations:
1581 destinations:
1580 fp is specified: All revs are written to the specified
1582 fp is specified: All revs are written to the specified
1581 file-like object.
1583 file-like object.
1582 fntemplate specified: Each rev is written to a unique file named using
1584 fntemplate specified: Each rev is written to a unique file named using
1583 the given template.
1585 the given template.
1584 Neither fp nor template specified: All revs written to repo.ui.write()
1586 Neither fp nor template specified: All revs written to repo.ui.write()
1585 '''
1587 '''
1586
1588
1587 total = len(revs)
1589 total = len(revs)
1588 revwidth = max(len(str(rev)) for rev in revs)
1590 revwidth = max(len(str(rev)) for rev in revs)
1589 filemode = {}
1591 filemode = {}
1590
1592
1591 write = None
1593 write = None
1592 dest = '<unnamed>'
1594 dest = '<unnamed>'
1593 if fp:
1595 if fp:
1594 dest = getattr(fp, 'name', dest)
1596 dest = getattr(fp, 'name', dest)
1595 def write(s, **kw):
1597 def write(s, **kw):
1596 fp.write(s)
1598 fp.write(s)
1597 elif not fntemplate:
1599 elif not fntemplate:
1598 write = repo.ui.write
1600 write = repo.ui.write
1599
1601
1600 for seqno, rev in enumerate(revs, 1):
1602 for seqno, rev in enumerate(revs, 1):
1601 ctx = repo[rev]
1603 ctx = repo[rev]
1602 fo = None
1604 fo = None
1603 if not fp and fntemplate:
1605 if not fp and fntemplate:
1604 fo = makefileobj(ctx, fntemplate, mode='wb', modemap=filemode,
1606 fo = makefileobj(ctx, fntemplate, mode='wb', modemap=filemode,
1605 total=total, seqno=seqno, revwidth=revwidth)
1607 total=total, seqno=seqno, revwidth=revwidth)
1606 dest = fo.name
1608 dest = fo.name
1607 def write(s, **kw):
1609 def write(s, **kw):
1608 fo.write(s)
1610 fo.write(s)
1609 if not dest.startswith('<'):
1611 if not dest.startswith('<'):
1610 repo.ui.note("%s\n" % dest)
1612 repo.ui.note("%s\n" % dest)
1611 _exportsingle(
1613 _exportsingle(
1612 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1614 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1613 if fo is not None:
1615 if fo is not None:
1614 fo.close()
1616 fo.close()
1615
1617
1616 def showmarker(fm, marker, index=None):
1618 def showmarker(fm, marker, index=None):
1617 """utility function to display obsolescence marker in a readable way
1619 """utility function to display obsolescence marker in a readable way
1618
1620
1619 To be used by debug function."""
1621 To be used by debug function."""
1620 if index is not None:
1622 if index is not None:
1621 fm.write('index', '%i ', index)
1623 fm.write('index', '%i ', index)
1622 fm.write('prednode', '%s ', hex(marker.prednode()))
1624 fm.write('prednode', '%s ', hex(marker.prednode()))
1623 succs = marker.succnodes()
1625 succs = marker.succnodes()
1624 fm.condwrite(succs, 'succnodes', '%s ',
1626 fm.condwrite(succs, 'succnodes', '%s ',
1625 fm.formatlist(map(hex, succs), name='node'))
1627 fm.formatlist(map(hex, succs), name='node'))
1626 fm.write('flag', '%X ', marker.flags())
1628 fm.write('flag', '%X ', marker.flags())
1627 parents = marker.parentnodes()
1629 parents = marker.parentnodes()
1628 if parents is not None:
1630 if parents is not None:
1629 fm.write('parentnodes', '{%s} ',
1631 fm.write('parentnodes', '{%s} ',
1630 fm.formatlist(map(hex, parents), name='node', sep=', '))
1632 fm.formatlist(map(hex, parents), name='node', sep=', '))
1631 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1633 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1632 meta = marker.metadata().copy()
1634 meta = marker.metadata().copy()
1633 meta.pop('date', None)
1635 meta.pop('date', None)
1634 smeta = util.rapply(pycompat.maybebytestr, meta)
1636 smeta = util.rapply(pycompat.maybebytestr, meta)
1635 fm.write('metadata', '{%s}', fm.formatdict(smeta, fmt='%r: %r', sep=', '))
1637 fm.write('metadata', '{%s}', fm.formatdict(smeta, fmt='%r: %r', sep=', '))
1636 fm.plain('\n')
1638 fm.plain('\n')
1637
1639
1638 def finddate(ui, repo, date):
1640 def finddate(ui, repo, date):
1639 """Find the tipmost changeset that matches the given date spec"""
1641 """Find the tipmost changeset that matches the given date spec"""
1640
1642
1641 df = dateutil.matchdate(date)
1643 df = dateutil.matchdate(date)
1642 m = scmutil.matchall(repo)
1644 m = scmutil.matchall(repo)
1643 results = {}
1645 results = {}
1644
1646
1645 def prep(ctx, fns):
1647 def prep(ctx, fns):
1646 d = ctx.date()
1648 d = ctx.date()
1647 if df(d[0]):
1649 if df(d[0]):
1648 results[ctx.rev()] = d
1650 results[ctx.rev()] = d
1649
1651
1650 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1652 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1651 rev = ctx.rev()
1653 rev = ctx.rev()
1652 if rev in results:
1654 if rev in results:
1653 ui.status(_("found revision %s from %s\n") %
1655 ui.status(_("found revision %s from %s\n") %
1654 (rev, dateutil.datestr(results[rev])))
1656 (rev, dateutil.datestr(results[rev])))
1655 return '%d' % rev
1657 return '%d' % rev
1656
1658
1657 raise error.Abort(_("revision matching date not found"))
1659 raise error.Abort(_("revision matching date not found"))
1658
1660
1659 def increasingwindows(windowsize=8, sizelimit=512):
1661 def increasingwindows(windowsize=8, sizelimit=512):
1660 while True:
1662 while True:
1661 yield windowsize
1663 yield windowsize
1662 if windowsize < sizelimit:
1664 if windowsize < sizelimit:
1663 windowsize *= 2
1665 windowsize *= 2
1664
1666
1665 def _walkrevs(repo, opts):
1667 def _walkrevs(repo, opts):
1666 # Default --rev value depends on --follow but --follow behavior
1668 # Default --rev value depends on --follow but --follow behavior
1667 # depends on revisions resolved from --rev...
1669 # depends on revisions resolved from --rev...
1668 follow = opts.get('follow') or opts.get('follow_first')
1670 follow = opts.get('follow') or opts.get('follow_first')
1669 if opts.get('rev'):
1671 if opts.get('rev'):
1670 revs = scmutil.revrange(repo, opts['rev'])
1672 revs = scmutil.revrange(repo, opts['rev'])
1671 elif follow and repo.dirstate.p1() == nullid:
1673 elif follow and repo.dirstate.p1() == nullid:
1672 revs = smartset.baseset()
1674 revs = smartset.baseset()
1673 elif follow:
1675 elif follow:
1674 revs = repo.revs('reverse(:.)')
1676 revs = repo.revs('reverse(:.)')
1675 else:
1677 else:
1676 revs = smartset.spanset(repo)
1678 revs = smartset.spanset(repo)
1677 revs.reverse()
1679 revs.reverse()
1678 return revs
1680 return revs
1679
1681
1680 class FileWalkError(Exception):
1682 class FileWalkError(Exception):
1681 pass
1683 pass
1682
1684
1683 def walkfilerevs(repo, match, follow, revs, fncache):
1685 def walkfilerevs(repo, match, follow, revs, fncache):
1684 '''Walks the file history for the matched files.
1686 '''Walks the file history for the matched files.
1685
1687
1686 Returns the changeset revs that are involved in the file history.
1688 Returns the changeset revs that are involved in the file history.
1687
1689
1688 Throws FileWalkError if the file history can't be walked using
1690 Throws FileWalkError if the file history can't be walked using
1689 filelogs alone.
1691 filelogs alone.
1690 '''
1692 '''
1691 wanted = set()
1693 wanted = set()
1692 copies = []
1694 copies = []
1693 minrev, maxrev = min(revs), max(revs)
1695 minrev, maxrev = min(revs), max(revs)
1694 def filerevgen(filelog, last):
1696 def filerevgen(filelog, last):
1695 """
1697 """
1696 Only files, no patterns. Check the history of each file.
1698 Only files, no patterns. Check the history of each file.
1697
1699
1698 Examines filelog entries within minrev, maxrev linkrev range
1700 Examines filelog entries within minrev, maxrev linkrev range
1699 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1701 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1700 tuples in backwards order
1702 tuples in backwards order
1701 """
1703 """
1702 cl_count = len(repo)
1704 cl_count = len(repo)
1703 revs = []
1705 revs = []
1704 for j in xrange(0, last + 1):
1706 for j in xrange(0, last + 1):
1705 linkrev = filelog.linkrev(j)
1707 linkrev = filelog.linkrev(j)
1706 if linkrev < minrev:
1708 if linkrev < minrev:
1707 continue
1709 continue
1708 # only yield rev for which we have the changelog, it can
1710 # only yield rev for which we have the changelog, it can
1709 # happen while doing "hg log" during a pull or commit
1711 # happen while doing "hg log" during a pull or commit
1710 if linkrev >= cl_count:
1712 if linkrev >= cl_count:
1711 break
1713 break
1712
1714
1713 parentlinkrevs = []
1715 parentlinkrevs = []
1714 for p in filelog.parentrevs(j):
1716 for p in filelog.parentrevs(j):
1715 if p != nullrev:
1717 if p != nullrev:
1716 parentlinkrevs.append(filelog.linkrev(p))
1718 parentlinkrevs.append(filelog.linkrev(p))
1717 n = filelog.node(j)
1719 n = filelog.node(j)
1718 revs.append((linkrev, parentlinkrevs,
1720 revs.append((linkrev, parentlinkrevs,
1719 follow and filelog.renamed(n)))
1721 follow and filelog.renamed(n)))
1720
1722
1721 return reversed(revs)
1723 return reversed(revs)
1722 def iterfiles():
1724 def iterfiles():
1723 pctx = repo['.']
1725 pctx = repo['.']
1724 for filename in match.files():
1726 for filename in match.files():
1725 if follow:
1727 if follow:
1726 if filename not in pctx:
1728 if filename not in pctx:
1727 raise error.Abort(_('cannot follow file not in parent '
1729 raise error.Abort(_('cannot follow file not in parent '
1728 'revision: "%s"') % filename)
1730 'revision: "%s"') % filename)
1729 yield filename, pctx[filename].filenode()
1731 yield filename, pctx[filename].filenode()
1730 else:
1732 else:
1731 yield filename, None
1733 yield filename, None
1732 for filename_node in copies:
1734 for filename_node in copies:
1733 yield filename_node
1735 yield filename_node
1734
1736
1735 for file_, node in iterfiles():
1737 for file_, node in iterfiles():
1736 filelog = repo.file(file_)
1738 filelog = repo.file(file_)
1737 if not len(filelog):
1739 if not len(filelog):
1738 if node is None:
1740 if node is None:
1739 # A zero count may be a directory or deleted file, so
1741 # A zero count may be a directory or deleted file, so
1740 # try to find matching entries on the slow path.
1742 # try to find matching entries on the slow path.
1741 if follow:
1743 if follow:
1742 raise error.Abort(
1744 raise error.Abort(
1743 _('cannot follow nonexistent file: "%s"') % file_)
1745 _('cannot follow nonexistent file: "%s"') % file_)
1744 raise FileWalkError("Cannot walk via filelog")
1746 raise FileWalkError("Cannot walk via filelog")
1745 else:
1747 else:
1746 continue
1748 continue
1747
1749
1748 if node is None:
1750 if node is None:
1749 last = len(filelog) - 1
1751 last = len(filelog) - 1
1750 else:
1752 else:
1751 last = filelog.rev(node)
1753 last = filelog.rev(node)
1752
1754
1753 # keep track of all ancestors of the file
1755 # keep track of all ancestors of the file
1754 ancestors = {filelog.linkrev(last)}
1756 ancestors = {filelog.linkrev(last)}
1755
1757
1756 # iterate from latest to oldest revision
1758 # iterate from latest to oldest revision
1757 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1759 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1758 if not follow:
1760 if not follow:
1759 if rev > maxrev:
1761 if rev > maxrev:
1760 continue
1762 continue
1761 else:
1763 else:
1762 # Note that last might not be the first interesting
1764 # Note that last might not be the first interesting
1763 # rev to us:
1765 # rev to us:
1764 # if the file has been changed after maxrev, we'll
1766 # if the file has been changed after maxrev, we'll
1765 # have linkrev(last) > maxrev, and we still need
1767 # have linkrev(last) > maxrev, and we still need
1766 # to explore the file graph
1768 # to explore the file graph
1767 if rev not in ancestors:
1769 if rev not in ancestors:
1768 continue
1770 continue
1769 # XXX insert 1327 fix here
1771 # XXX insert 1327 fix here
1770 if flparentlinkrevs:
1772 if flparentlinkrevs:
1771 ancestors.update(flparentlinkrevs)
1773 ancestors.update(flparentlinkrevs)
1772
1774
1773 fncache.setdefault(rev, []).append(file_)
1775 fncache.setdefault(rev, []).append(file_)
1774 wanted.add(rev)
1776 wanted.add(rev)
1775 if copied:
1777 if copied:
1776 copies.append(copied)
1778 copies.append(copied)
1777
1779
1778 return wanted
1780 return wanted
1779
1781
1780 class _followfilter(object):
1782 class _followfilter(object):
1781 def __init__(self, repo, onlyfirst=False):
1783 def __init__(self, repo, onlyfirst=False):
1782 self.repo = repo
1784 self.repo = repo
1783 self.startrev = nullrev
1785 self.startrev = nullrev
1784 self.roots = set()
1786 self.roots = set()
1785 self.onlyfirst = onlyfirst
1787 self.onlyfirst = onlyfirst
1786
1788
1787 def match(self, rev):
1789 def match(self, rev):
1788 def realparents(rev):
1790 def realparents(rev):
1789 if self.onlyfirst:
1791 if self.onlyfirst:
1790 return self.repo.changelog.parentrevs(rev)[0:1]
1792 return self.repo.changelog.parentrevs(rev)[0:1]
1791 else:
1793 else:
1792 return filter(lambda x: x != nullrev,
1794 return filter(lambda x: x != nullrev,
1793 self.repo.changelog.parentrevs(rev))
1795 self.repo.changelog.parentrevs(rev))
1794
1796
1795 if self.startrev == nullrev:
1797 if self.startrev == nullrev:
1796 self.startrev = rev
1798 self.startrev = rev
1797 return True
1799 return True
1798
1800
1799 if rev > self.startrev:
1801 if rev > self.startrev:
1800 # forward: all descendants
1802 # forward: all descendants
1801 if not self.roots:
1803 if not self.roots:
1802 self.roots.add(self.startrev)
1804 self.roots.add(self.startrev)
1803 for parent in realparents(rev):
1805 for parent in realparents(rev):
1804 if parent in self.roots:
1806 if parent in self.roots:
1805 self.roots.add(rev)
1807 self.roots.add(rev)
1806 return True
1808 return True
1807 else:
1809 else:
1808 # backwards: all parents
1810 # backwards: all parents
1809 if not self.roots:
1811 if not self.roots:
1810 self.roots.update(realparents(self.startrev))
1812 self.roots.update(realparents(self.startrev))
1811 if rev in self.roots:
1813 if rev in self.roots:
1812 self.roots.remove(rev)
1814 self.roots.remove(rev)
1813 self.roots.update(realparents(rev))
1815 self.roots.update(realparents(rev))
1814 return True
1816 return True
1815
1817
1816 return False
1818 return False
1817
1819
1818 def walkchangerevs(repo, match, opts, prepare):
1820 def walkchangerevs(repo, match, opts, prepare):
1819 '''Iterate over files and the revs in which they changed.
1821 '''Iterate over files and the revs in which they changed.
1820
1822
1821 Callers most commonly need to iterate backwards over the history
1823 Callers most commonly need to iterate backwards over the history
1822 in which they are interested. Doing so has awful (quadratic-looking)
1824 in which they are interested. Doing so has awful (quadratic-looking)
1823 performance, so we use iterators in a "windowed" way.
1825 performance, so we use iterators in a "windowed" way.
1824
1826
1825 We walk a window of revisions in the desired order. Within the
1827 We walk a window of revisions in the desired order. Within the
1826 window, we first walk forwards to gather data, then in the desired
1828 window, we first walk forwards to gather data, then in the desired
1827 order (usually backwards) to display it.
1829 order (usually backwards) to display it.
1828
1830
1829 This function returns an iterator yielding contexts. Before
1831 This function returns an iterator yielding contexts. Before
1830 yielding each context, the iterator will first call the prepare
1832 yielding each context, the iterator will first call the prepare
1831 function on each context in the window in forward order.'''
1833 function on each context in the window in forward order.'''
1832
1834
1833 follow = opts.get('follow') or opts.get('follow_first')
1835 follow = opts.get('follow') or opts.get('follow_first')
1834 revs = _walkrevs(repo, opts)
1836 revs = _walkrevs(repo, opts)
1835 if not revs:
1837 if not revs:
1836 return []
1838 return []
1837 wanted = set()
1839 wanted = set()
1838 slowpath = match.anypats() or (not match.always() and opts.get('removed'))
1840 slowpath = match.anypats() or (not match.always() and opts.get('removed'))
1839 fncache = {}
1841 fncache = {}
1840 change = repo.changectx
1842 change = repo.changectx
1841
1843
1842 # First step is to fill wanted, the set of revisions that we want to yield.
1844 # First step is to fill wanted, the set of revisions that we want to yield.
1843 # When it does not induce extra cost, we also fill fncache for revisions in
1845 # When it does not induce extra cost, we also fill fncache for revisions in
1844 # wanted: a cache of filenames that were changed (ctx.files()) and that
1846 # wanted: a cache of filenames that were changed (ctx.files()) and that
1845 # match the file filtering conditions.
1847 # match the file filtering conditions.
1846
1848
1847 if match.always():
1849 if match.always():
1848 # No files, no patterns. Display all revs.
1850 # No files, no patterns. Display all revs.
1849 wanted = revs
1851 wanted = revs
1850 elif not slowpath:
1852 elif not slowpath:
1851 # We only have to read through the filelog to find wanted revisions
1853 # We only have to read through the filelog to find wanted revisions
1852
1854
1853 try:
1855 try:
1854 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1856 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1855 except FileWalkError:
1857 except FileWalkError:
1856 slowpath = True
1858 slowpath = True
1857
1859
1858 # We decided to fall back to the slowpath because at least one
1860 # We decided to fall back to the slowpath because at least one
1859 # of the paths was not a file. Check to see if at least one of them
1861 # of the paths was not a file. Check to see if at least one of them
1860 # existed in history, otherwise simply return
1862 # existed in history, otherwise simply return
1861 for path in match.files():
1863 for path in match.files():
1862 if path == '.' or path in repo.store:
1864 if path == '.' or path in repo.store:
1863 break
1865 break
1864 else:
1866 else:
1865 return []
1867 return []
1866
1868
1867 if slowpath:
1869 if slowpath:
1868 # We have to read the changelog to match filenames against
1870 # We have to read the changelog to match filenames against
1869 # changed files
1871 # changed files
1870
1872
1871 if follow:
1873 if follow:
1872 raise error.Abort(_('can only follow copies/renames for explicit '
1874 raise error.Abort(_('can only follow copies/renames for explicit '
1873 'filenames'))
1875 'filenames'))
1874
1876
1875 # The slow path checks files modified in every changeset.
1877 # The slow path checks files modified in every changeset.
1876 # This is really slow on large repos, so compute the set lazily.
1878 # This is really slow on large repos, so compute the set lazily.
1877 class lazywantedset(object):
1879 class lazywantedset(object):
1878 def __init__(self):
1880 def __init__(self):
1879 self.set = set()
1881 self.set = set()
1880 self.revs = set(revs)
1882 self.revs = set(revs)
1881
1883
1882 # No need to worry about locality here because it will be accessed
1884 # No need to worry about locality here because it will be accessed
1883 # in the same order as the increasing window below.
1885 # in the same order as the increasing window below.
1884 def __contains__(self, value):
1886 def __contains__(self, value):
1885 if value in self.set:
1887 if value in self.set:
1886 return True
1888 return True
1887 elif not value in self.revs:
1889 elif not value in self.revs:
1888 return False
1890 return False
1889 else:
1891 else:
1890 self.revs.discard(value)
1892 self.revs.discard(value)
1891 ctx = change(value)
1893 ctx = change(value)
1892 matches = [f for f in ctx.files() if match(f)]
1894 matches = [f for f in ctx.files() if match(f)]
1893 if matches:
1895 if matches:
1894 fncache[value] = matches
1896 fncache[value] = matches
1895 self.set.add(value)
1897 self.set.add(value)
1896 return True
1898 return True
1897 return False
1899 return False
1898
1900
1899 def discard(self, value):
1901 def discard(self, value):
1900 self.revs.discard(value)
1902 self.revs.discard(value)
1901 self.set.discard(value)
1903 self.set.discard(value)
1902
1904
1903 wanted = lazywantedset()
1905 wanted = lazywantedset()
1904
1906
1905 # it might be worthwhile to do this in the iterator if the rev range
1907 # it might be worthwhile to do this in the iterator if the rev range
1906 # is descending and the prune args are all within that range
1908 # is descending and the prune args are all within that range
1907 for rev in opts.get('prune', ()):
1909 for rev in opts.get('prune', ()):
1908 rev = repo[rev].rev()
1910 rev = repo[rev].rev()
1909 ff = _followfilter(repo)
1911 ff = _followfilter(repo)
1910 stop = min(revs[0], revs[-1])
1912 stop = min(revs[0], revs[-1])
1911 for x in xrange(rev, stop - 1, -1):
1913 for x in xrange(rev, stop - 1, -1):
1912 if ff.match(x):
1914 if ff.match(x):
1913 wanted = wanted - [x]
1915 wanted = wanted - [x]
1914
1916
1915 # Now that wanted is correctly initialized, we can iterate over the
1917 # Now that wanted is correctly initialized, we can iterate over the
1916 # revision range, yielding only revisions in wanted.
1918 # revision range, yielding only revisions in wanted.
1917 def iterate():
1919 def iterate():
1918 if follow and match.always():
1920 if follow and match.always():
1919 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1921 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1920 def want(rev):
1922 def want(rev):
1921 return ff.match(rev) and rev in wanted
1923 return ff.match(rev) and rev in wanted
1922 else:
1924 else:
1923 def want(rev):
1925 def want(rev):
1924 return rev in wanted
1926 return rev in wanted
1925
1927
1926 it = iter(revs)
1928 it = iter(revs)
1927 stopiteration = False
1929 stopiteration = False
1928 for windowsize in increasingwindows():
1930 for windowsize in increasingwindows():
1929 nrevs = []
1931 nrevs = []
1930 for i in xrange(windowsize):
1932 for i in xrange(windowsize):
1931 rev = next(it, None)
1933 rev = next(it, None)
1932 if rev is None:
1934 if rev is None:
1933 stopiteration = True
1935 stopiteration = True
1934 break
1936 break
1935 elif want(rev):
1937 elif want(rev):
1936 nrevs.append(rev)
1938 nrevs.append(rev)
1937 for rev in sorted(nrevs):
1939 for rev in sorted(nrevs):
1938 fns = fncache.get(rev)
1940 fns = fncache.get(rev)
1939 ctx = change(rev)
1941 ctx = change(rev)
1940 if not fns:
1942 if not fns:
1941 def fns_generator():
1943 def fns_generator():
1942 for f in ctx.files():
1944 for f in ctx.files():
1943 if match(f):
1945 if match(f):
1944 yield f
1946 yield f
1945 fns = fns_generator()
1947 fns = fns_generator()
1946 prepare(ctx, fns)
1948 prepare(ctx, fns)
1947 for rev in nrevs:
1949 for rev in nrevs:
1948 yield change(rev)
1950 yield change(rev)
1949
1951
1950 if stopiteration:
1952 if stopiteration:
1951 break
1953 break
1952
1954
1953 return iterate()
1955 return iterate()
1954
1956
1955 def add(ui, repo, match, prefix, explicitonly, **opts):
1957 def add(ui, repo, match, prefix, explicitonly, **opts):
1956 join = lambda f: os.path.join(prefix, f)
1958 join = lambda f: os.path.join(prefix, f)
1957 bad = []
1959 bad = []
1958
1960
1959 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
1961 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
1960 names = []
1962 names = []
1961 wctx = repo[None]
1963 wctx = repo[None]
1962 cca = None
1964 cca = None
1963 abort, warn = scmutil.checkportabilityalert(ui)
1965 abort, warn = scmutil.checkportabilityalert(ui)
1964 if abort or warn:
1966 if abort or warn:
1965 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
1967 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
1966
1968
1967 badmatch = matchmod.badmatch(match, badfn)
1969 badmatch = matchmod.badmatch(match, badfn)
1968 dirstate = repo.dirstate
1970 dirstate = repo.dirstate
1969 # We don't want to just call wctx.walk here, since it would return a lot of
1971 # We don't want to just call wctx.walk here, since it would return a lot of
1970 # clean files, which we aren't interested in and takes time.
1972 # clean files, which we aren't interested in and takes time.
1971 for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
1973 for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
1972 unknown=True, ignored=False, full=False)):
1974 unknown=True, ignored=False, full=False)):
1973 exact = match.exact(f)
1975 exact = match.exact(f)
1974 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
1976 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
1975 if cca:
1977 if cca:
1976 cca(f)
1978 cca(f)
1977 names.append(f)
1979 names.append(f)
1978 if ui.verbose or not exact:
1980 if ui.verbose or not exact:
1979 ui.status(_('adding %s\n') % match.rel(f))
1981 ui.status(_('adding %s\n') % match.rel(f))
1980
1982
1981 for subpath in sorted(wctx.substate):
1983 for subpath in sorted(wctx.substate):
1982 sub = wctx.sub(subpath)
1984 sub = wctx.sub(subpath)
1983 try:
1985 try:
1984 submatch = matchmod.subdirmatcher(subpath, match)
1986 submatch = matchmod.subdirmatcher(subpath, match)
1985 if opts.get(r'subrepos'):
1987 if opts.get(r'subrepos'):
1986 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
1988 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
1987 else:
1989 else:
1988 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
1990 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
1989 except error.LookupError:
1991 except error.LookupError:
1990 ui.status(_("skipping missing subrepository: %s\n")
1992 ui.status(_("skipping missing subrepository: %s\n")
1991 % join(subpath))
1993 % join(subpath))
1992
1994
1993 if not opts.get(r'dry_run'):
1995 if not opts.get(r'dry_run'):
1994 rejected = wctx.add(names, prefix)
1996 rejected = wctx.add(names, prefix)
1995 bad.extend(f for f in rejected if f in match.files())
1997 bad.extend(f for f in rejected if f in match.files())
1996 return bad
1998 return bad
1997
1999
1998 def addwebdirpath(repo, serverpath, webconf):
2000 def addwebdirpath(repo, serverpath, webconf):
1999 webconf[serverpath] = repo.root
2001 webconf[serverpath] = repo.root
2000 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2002 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2001
2003
2002 for r in repo.revs('filelog("path:.hgsub")'):
2004 for r in repo.revs('filelog("path:.hgsub")'):
2003 ctx = repo[r]
2005 ctx = repo[r]
2004 for subpath in ctx.substate:
2006 for subpath in ctx.substate:
2005 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2007 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2006
2008
2007 def forget(ui, repo, match, prefix, explicitonly, dryrun):
2009 def forget(ui, repo, match, prefix, explicitonly, dryrun):
2008 join = lambda f: os.path.join(prefix, f)
2010 join = lambda f: os.path.join(prefix, f)
2009 bad = []
2011 bad = []
2010 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2012 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2011 wctx = repo[None]
2013 wctx = repo[None]
2012 forgot = []
2014 forgot = []
2013
2015
2014 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2016 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2015 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2017 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2016 if explicitonly:
2018 if explicitonly:
2017 forget = [f for f in forget if match.exact(f)]
2019 forget = [f for f in forget if match.exact(f)]
2018
2020
2019 for subpath in sorted(wctx.substate):
2021 for subpath in sorted(wctx.substate):
2020 sub = wctx.sub(subpath)
2022 sub = wctx.sub(subpath)
2021 try:
2023 try:
2022 submatch = matchmod.subdirmatcher(subpath, match)
2024 submatch = matchmod.subdirmatcher(subpath, match)
2023 subbad, subforgot = sub.forget(submatch, prefix, dryrun=dryrun)
2025 subbad, subforgot = sub.forget(submatch, prefix, dryrun=dryrun)
2024 bad.extend([subpath + '/' + f for f in subbad])
2026 bad.extend([subpath + '/' + f for f in subbad])
2025 forgot.extend([subpath + '/' + f for f in subforgot])
2027 forgot.extend([subpath + '/' + f for f in subforgot])
2026 except error.LookupError:
2028 except error.LookupError:
2027 ui.status(_("skipping missing subrepository: %s\n")
2029 ui.status(_("skipping missing subrepository: %s\n")
2028 % join(subpath))
2030 % join(subpath))
2029
2031
2030 if not explicitonly:
2032 if not explicitonly:
2031 for f in match.files():
2033 for f in match.files():
2032 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2034 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2033 if f not in forgot:
2035 if f not in forgot:
2034 if repo.wvfs.exists(f):
2036 if repo.wvfs.exists(f):
2035 # Don't complain if the exact case match wasn't given.
2037 # Don't complain if the exact case match wasn't given.
2036 # But don't do this until after checking 'forgot', so
2038 # But don't do this until after checking 'forgot', so
2037 # that subrepo files aren't normalized, and this op is
2039 # that subrepo files aren't normalized, and this op is
2038 # purely from data cached by the status walk above.
2040 # purely from data cached by the status walk above.
2039 if repo.dirstate.normalize(f) in repo.dirstate:
2041 if repo.dirstate.normalize(f) in repo.dirstate:
2040 continue
2042 continue
2041 ui.warn(_('not removing %s: '
2043 ui.warn(_('not removing %s: '
2042 'file is already untracked\n')
2044 'file is already untracked\n')
2043 % match.rel(f))
2045 % match.rel(f))
2044 bad.append(f)
2046 bad.append(f)
2045
2047
2046 for f in forget:
2048 for f in forget:
2047 if ui.verbose or not match.exact(f):
2049 if ui.verbose or not match.exact(f):
2048 ui.status(_('removing %s\n') % match.rel(f))
2050 ui.status(_('removing %s\n') % match.rel(f))
2049
2051
2050 if not dryrun:
2052 if not dryrun:
2051 rejected = wctx.forget(forget, prefix)
2053 rejected = wctx.forget(forget, prefix)
2052 bad.extend(f for f in rejected if f in match.files())
2054 bad.extend(f for f in rejected if f in match.files())
2053 forgot.extend(f for f in forget if f not in rejected)
2055 forgot.extend(f for f in forget if f not in rejected)
2054 return bad, forgot
2056 return bad, forgot
2055
2057
2056 def files(ui, ctx, m, fm, fmt, subrepos):
2058 def files(ui, ctx, m, fm, fmt, subrepos):
2057 rev = ctx.rev()
2059 rev = ctx.rev()
2058 ret = 1
2060 ret = 1
2059 ds = ctx.repo().dirstate
2061 ds = ctx.repo().dirstate
2060
2062
2061 for f in ctx.matches(m):
2063 for f in ctx.matches(m):
2062 if rev is None and ds[f] == 'r':
2064 if rev is None and ds[f] == 'r':
2063 continue
2065 continue
2064 fm.startitem()
2066 fm.startitem()
2065 if ui.verbose:
2067 if ui.verbose:
2066 fc = ctx[f]
2068 fc = ctx[f]
2067 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2069 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2068 fm.data(abspath=f)
2070 fm.data(abspath=f)
2069 fm.write('path', fmt, m.rel(f))
2071 fm.write('path', fmt, m.rel(f))
2070 ret = 0
2072 ret = 0
2071
2073
2072 for subpath in sorted(ctx.substate):
2074 for subpath in sorted(ctx.substate):
2073 submatch = matchmod.subdirmatcher(subpath, m)
2075 submatch = matchmod.subdirmatcher(subpath, m)
2074 if (subrepos or m.exact(subpath) or any(submatch.files())):
2076 if (subrepos or m.exact(subpath) or any(submatch.files())):
2075 sub = ctx.sub(subpath)
2077 sub = ctx.sub(subpath)
2076 try:
2078 try:
2077 recurse = m.exact(subpath) or subrepos
2079 recurse = m.exact(subpath) or subrepos
2078 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2080 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2079 ret = 0
2081 ret = 0
2080 except error.LookupError:
2082 except error.LookupError:
2081 ui.status(_("skipping missing subrepository: %s\n")
2083 ui.status(_("skipping missing subrepository: %s\n")
2082 % m.abs(subpath))
2084 % m.abs(subpath))
2083
2085
2084 return ret
2086 return ret
2085
2087
2086 def remove(ui, repo, m, prefix, after, force, subrepos, dryrun, warnings=None):
2088 def remove(ui, repo, m, prefix, after, force, subrepos, dryrun, warnings=None):
2087 join = lambda f: os.path.join(prefix, f)
2089 join = lambda f: os.path.join(prefix, f)
2088 ret = 0
2090 ret = 0
2089 s = repo.status(match=m, clean=True)
2091 s = repo.status(match=m, clean=True)
2090 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2092 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2091
2093
2092 wctx = repo[None]
2094 wctx = repo[None]
2093
2095
2094 if warnings is None:
2096 if warnings is None:
2095 warnings = []
2097 warnings = []
2096 warn = True
2098 warn = True
2097 else:
2099 else:
2098 warn = False
2100 warn = False
2099
2101
2100 subs = sorted(wctx.substate)
2102 subs = sorted(wctx.substate)
2101 total = len(subs)
2103 total = len(subs)
2102 count = 0
2104 count = 0
2103 for subpath in subs:
2105 for subpath in subs:
2104 count += 1
2106 count += 1
2105 submatch = matchmod.subdirmatcher(subpath, m)
2107 submatch = matchmod.subdirmatcher(subpath, m)
2106 if subrepos or m.exact(subpath) or any(submatch.files()):
2108 if subrepos or m.exact(subpath) or any(submatch.files()):
2107 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2109 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2108 sub = wctx.sub(subpath)
2110 sub = wctx.sub(subpath)
2109 try:
2111 try:
2110 if sub.removefiles(submatch, prefix, after, force, subrepos,
2112 if sub.removefiles(submatch, prefix, after, force, subrepos,
2111 dryrun, warnings):
2113 dryrun, warnings):
2112 ret = 1
2114 ret = 1
2113 except error.LookupError:
2115 except error.LookupError:
2114 warnings.append(_("skipping missing subrepository: %s\n")
2116 warnings.append(_("skipping missing subrepository: %s\n")
2115 % join(subpath))
2117 % join(subpath))
2116 ui.progress(_('searching'), None)
2118 ui.progress(_('searching'), None)
2117
2119
2118 # warn about failure to delete explicit files/dirs
2120 # warn about failure to delete explicit files/dirs
2119 deleteddirs = util.dirs(deleted)
2121 deleteddirs = util.dirs(deleted)
2120 files = m.files()
2122 files = m.files()
2121 total = len(files)
2123 total = len(files)
2122 count = 0
2124 count = 0
2123 for f in files:
2125 for f in files:
2124 def insubrepo():
2126 def insubrepo():
2125 for subpath in wctx.substate:
2127 for subpath in wctx.substate:
2126 if f.startswith(subpath + '/'):
2128 if f.startswith(subpath + '/'):
2127 return True
2129 return True
2128 return False
2130 return False
2129
2131
2130 count += 1
2132 count += 1
2131 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2133 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2132 isdir = f in deleteddirs or wctx.hasdir(f)
2134 isdir = f in deleteddirs or wctx.hasdir(f)
2133 if (f in repo.dirstate or isdir or f == '.'
2135 if (f in repo.dirstate or isdir or f == '.'
2134 or insubrepo() or f in subs):
2136 or insubrepo() or f in subs):
2135 continue
2137 continue
2136
2138
2137 if repo.wvfs.exists(f):
2139 if repo.wvfs.exists(f):
2138 if repo.wvfs.isdir(f):
2140 if repo.wvfs.isdir(f):
2139 warnings.append(_('not removing %s: no tracked files\n')
2141 warnings.append(_('not removing %s: no tracked files\n')
2140 % m.rel(f))
2142 % m.rel(f))
2141 else:
2143 else:
2142 warnings.append(_('not removing %s: file is untracked\n')
2144 warnings.append(_('not removing %s: file is untracked\n')
2143 % m.rel(f))
2145 % m.rel(f))
2144 # missing files will generate a warning elsewhere
2146 # missing files will generate a warning elsewhere
2145 ret = 1
2147 ret = 1
2146 ui.progress(_('deleting'), None)
2148 ui.progress(_('deleting'), None)
2147
2149
2148 if force:
2150 if force:
2149 list = modified + deleted + clean + added
2151 list = modified + deleted + clean + added
2150 elif after:
2152 elif after:
2151 list = deleted
2153 list = deleted
2152 remaining = modified + added + clean
2154 remaining = modified + added + clean
2153 total = len(remaining)
2155 total = len(remaining)
2154 count = 0
2156 count = 0
2155 for f in remaining:
2157 for f in remaining:
2156 count += 1
2158 count += 1
2157 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2159 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2158 if ui.verbose or (f in files):
2160 if ui.verbose or (f in files):
2159 warnings.append(_('not removing %s: file still exists\n')
2161 warnings.append(_('not removing %s: file still exists\n')
2160 % m.rel(f))
2162 % m.rel(f))
2161 ret = 1
2163 ret = 1
2162 ui.progress(_('skipping'), None)
2164 ui.progress(_('skipping'), None)
2163 else:
2165 else:
2164 list = deleted + clean
2166 list = deleted + clean
2165 total = len(modified) + len(added)
2167 total = len(modified) + len(added)
2166 count = 0
2168 count = 0
2167 for f in modified:
2169 for f in modified:
2168 count += 1
2170 count += 1
2169 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2171 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2170 warnings.append(_('not removing %s: file is modified (use -f'
2172 warnings.append(_('not removing %s: file is modified (use -f'
2171 ' to force removal)\n') % m.rel(f))
2173 ' to force removal)\n') % m.rel(f))
2172 ret = 1
2174 ret = 1
2173 for f in added:
2175 for f in added:
2174 count += 1
2176 count += 1
2175 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2177 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2176 warnings.append(_("not removing %s: file has been marked for add"
2178 warnings.append(_("not removing %s: file has been marked for add"
2177 " (use 'hg forget' to undo add)\n") % m.rel(f))
2179 " (use 'hg forget' to undo add)\n") % m.rel(f))
2178 ret = 1
2180 ret = 1
2179 ui.progress(_('skipping'), None)
2181 ui.progress(_('skipping'), None)
2180
2182
2181 list = sorted(list)
2183 list = sorted(list)
2182 total = len(list)
2184 total = len(list)
2183 count = 0
2185 count = 0
2184 for f in list:
2186 for f in list:
2185 count += 1
2187 count += 1
2186 if ui.verbose or not m.exact(f):
2188 if ui.verbose or not m.exact(f):
2187 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2189 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2188 ui.status(_('removing %s\n') % m.rel(f))
2190 ui.status(_('removing %s\n') % m.rel(f))
2189 ui.progress(_('deleting'), None)
2191 ui.progress(_('deleting'), None)
2190
2192
2191 if not dryrun:
2193 if not dryrun:
2192 with repo.wlock():
2194 with repo.wlock():
2193 if not after:
2195 if not after:
2194 for f in list:
2196 for f in list:
2195 if f in added:
2197 if f in added:
2196 continue # we never unlink added files on remove
2198 continue # we never unlink added files on remove
2197 repo.wvfs.unlinkpath(f, ignoremissing=True)
2199 repo.wvfs.unlinkpath(f, ignoremissing=True)
2198 repo[None].forget(list)
2200 repo[None].forget(list)
2199
2201
2200 if warn:
2202 if warn:
2201 for warning in warnings:
2203 for warning in warnings:
2202 ui.warn(warning)
2204 ui.warn(warning)
2203
2205
2204 return ret
2206 return ret
2205
2207
2206 def _updatecatformatter(fm, ctx, matcher, path, decode):
2208 def _updatecatformatter(fm, ctx, matcher, path, decode):
2207 """Hook for adding data to the formatter used by ``hg cat``.
2209 """Hook for adding data to the formatter used by ``hg cat``.
2208
2210
2209 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2211 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2210 this method first."""
2212 this method first."""
2211 data = ctx[path].data()
2213 data = ctx[path].data()
2212 if decode:
2214 if decode:
2213 data = ctx.repo().wwritedata(path, data)
2215 data = ctx.repo().wwritedata(path, data)
2214 fm.startitem()
2216 fm.startitem()
2215 fm.write('data', '%s', data)
2217 fm.write('data', '%s', data)
2216 fm.data(abspath=path, path=matcher.rel(path))
2218 fm.data(abspath=path, path=matcher.rel(path))
2217
2219
2218 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2220 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2219 err = 1
2221 err = 1
2220 opts = pycompat.byteskwargs(opts)
2222 opts = pycompat.byteskwargs(opts)
2221
2223
2222 def write(path):
2224 def write(path):
2223 filename = None
2225 filename = None
2224 if fntemplate:
2226 if fntemplate:
2225 filename = makefilename(ctx, fntemplate,
2227 filename = makefilename(ctx, fntemplate,
2226 pathname=os.path.join(prefix, path))
2228 pathname=os.path.join(prefix, path))
2227 # attempt to create the directory if it does not already exist
2229 # attempt to create the directory if it does not already exist
2228 try:
2230 try:
2229 os.makedirs(os.path.dirname(filename))
2231 os.makedirs(os.path.dirname(filename))
2230 except OSError:
2232 except OSError:
2231 pass
2233 pass
2232 with formatter.maybereopen(basefm, filename, opts) as fm:
2234 with formatter.maybereopen(basefm, filename, opts) as fm:
2233 _updatecatformatter(fm, ctx, matcher, path, opts.get('decode'))
2235 _updatecatformatter(fm, ctx, matcher, path, opts.get('decode'))
2234
2236
2235 # Automation often uses hg cat on single files, so special case it
2237 # Automation often uses hg cat on single files, so special case it
2236 # for performance to avoid the cost of parsing the manifest.
2238 # for performance to avoid the cost of parsing the manifest.
2237 if len(matcher.files()) == 1 and not matcher.anypats():
2239 if len(matcher.files()) == 1 and not matcher.anypats():
2238 file = matcher.files()[0]
2240 file = matcher.files()[0]
2239 mfl = repo.manifestlog
2241 mfl = repo.manifestlog
2240 mfnode = ctx.manifestnode()
2242 mfnode = ctx.manifestnode()
2241 try:
2243 try:
2242 if mfnode and mfl[mfnode].find(file)[0]:
2244 if mfnode and mfl[mfnode].find(file)[0]:
2243 scmutil.fileprefetchhooks(repo, ctx, [file])
2245 scmutil.fileprefetchhooks(repo, ctx, [file])
2244 write(file)
2246 write(file)
2245 return 0
2247 return 0
2246 except KeyError:
2248 except KeyError:
2247 pass
2249 pass
2248
2250
2249 files = [f for f in ctx.walk(matcher)]
2251 files = [f for f in ctx.walk(matcher)]
2250 scmutil.fileprefetchhooks(repo, ctx, files)
2252 scmutil.fileprefetchhooks(repo, ctx, files)
2251
2253
2252 for abs in files:
2254 for abs in files:
2253 write(abs)
2255 write(abs)
2254 err = 0
2256 err = 0
2255
2257
2256 for subpath in sorted(ctx.substate):
2258 for subpath in sorted(ctx.substate):
2257 sub = ctx.sub(subpath)
2259 sub = ctx.sub(subpath)
2258 try:
2260 try:
2259 submatch = matchmod.subdirmatcher(subpath, matcher)
2261 submatch = matchmod.subdirmatcher(subpath, matcher)
2260
2262
2261 if not sub.cat(submatch, basefm, fntemplate,
2263 if not sub.cat(submatch, basefm, fntemplate,
2262 os.path.join(prefix, sub._path),
2264 os.path.join(prefix, sub._path),
2263 **pycompat.strkwargs(opts)):
2265 **pycompat.strkwargs(opts)):
2264 err = 0
2266 err = 0
2265 except error.RepoLookupError:
2267 except error.RepoLookupError:
2266 ui.status(_("skipping missing subrepository: %s\n")
2268 ui.status(_("skipping missing subrepository: %s\n")
2267 % os.path.join(prefix, subpath))
2269 % os.path.join(prefix, subpath))
2268
2270
2269 return err
2271 return err
2270
2272
2271 def commit(ui, repo, commitfunc, pats, opts):
2273 def commit(ui, repo, commitfunc, pats, opts):
2272 '''commit the specified files or all outstanding changes'''
2274 '''commit the specified files or all outstanding changes'''
2273 date = opts.get('date')
2275 date = opts.get('date')
2274 if date:
2276 if date:
2275 opts['date'] = dateutil.parsedate(date)
2277 opts['date'] = dateutil.parsedate(date)
2276 message = logmessage(ui, opts)
2278 message = logmessage(ui, opts)
2277 matcher = scmutil.match(repo[None], pats, opts)
2279 matcher = scmutil.match(repo[None], pats, opts)
2278
2280
2279 dsguard = None
2281 dsguard = None
2280 # extract addremove carefully -- this function can be called from a command
2282 # extract addremove carefully -- this function can be called from a command
2281 # that doesn't support addremove
2283 # that doesn't support addremove
2282 if opts.get('addremove'):
2284 if opts.get('addremove'):
2283 dsguard = dirstateguard.dirstateguard(repo, 'commit')
2285 dsguard = dirstateguard.dirstateguard(repo, 'commit')
2284 with dsguard or util.nullcontextmanager():
2286 with dsguard or util.nullcontextmanager():
2285 if dsguard:
2287 if dsguard:
2286 if scmutil.addremove(repo, matcher, "", opts) != 0:
2288 if scmutil.addremove(repo, matcher, "", opts) != 0:
2287 raise error.Abort(
2289 raise error.Abort(
2288 _("failed to mark all new/missing files as added/removed"))
2290 _("failed to mark all new/missing files as added/removed"))
2289
2291
2290 return commitfunc(ui, repo, message, matcher, opts)
2292 return commitfunc(ui, repo, message, matcher, opts)
2291
2293
2292 def samefile(f, ctx1, ctx2):
2294 def samefile(f, ctx1, ctx2):
2293 if f in ctx1.manifest():
2295 if f in ctx1.manifest():
2294 a = ctx1.filectx(f)
2296 a = ctx1.filectx(f)
2295 if f in ctx2.manifest():
2297 if f in ctx2.manifest():
2296 b = ctx2.filectx(f)
2298 b = ctx2.filectx(f)
2297 return (not a.cmp(b)
2299 return (not a.cmp(b)
2298 and a.flags() == b.flags())
2300 and a.flags() == b.flags())
2299 else:
2301 else:
2300 return False
2302 return False
2301 else:
2303 else:
2302 return f not in ctx2.manifest()
2304 return f not in ctx2.manifest()
2303
2305
2304 def amend(ui, repo, old, extra, pats, opts):
2306 def amend(ui, repo, old, extra, pats, opts):
2305 # avoid cycle context -> subrepo -> cmdutil
2307 # avoid cycle context -> subrepo -> cmdutil
2306 from . import context
2308 from . import context
2307
2309
2308 # amend will reuse the existing user if not specified, but the obsolete
2310 # amend will reuse the existing user if not specified, but the obsolete
2309 # marker creation requires that the current user's name is specified.
2311 # marker creation requires that the current user's name is specified.
2310 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2312 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2311 ui.username() # raise exception if username not set
2313 ui.username() # raise exception if username not set
2312
2314
2313 ui.note(_('amending changeset %s\n') % old)
2315 ui.note(_('amending changeset %s\n') % old)
2314 base = old.p1()
2316 base = old.p1()
2315
2317
2316 with repo.wlock(), repo.lock(), repo.transaction('amend'):
2318 with repo.wlock(), repo.lock(), repo.transaction('amend'):
2317 # Participating changesets:
2319 # Participating changesets:
2318 #
2320 #
2319 # wctx o - workingctx that contains changes from working copy
2321 # wctx o - workingctx that contains changes from working copy
2320 # | to go into amending commit
2322 # | to go into amending commit
2321 # |
2323 # |
2322 # old o - changeset to amend
2324 # old o - changeset to amend
2323 # |
2325 # |
2324 # base o - first parent of the changeset to amend
2326 # base o - first parent of the changeset to amend
2325 wctx = repo[None]
2327 wctx = repo[None]
2326
2328
2327 # Copy to avoid mutating input
2329 # Copy to avoid mutating input
2328 extra = extra.copy()
2330 extra = extra.copy()
2329 # Update extra dict from amended commit (e.g. to preserve graft
2331 # Update extra dict from amended commit (e.g. to preserve graft
2330 # source)
2332 # source)
2331 extra.update(old.extra())
2333 extra.update(old.extra())
2332
2334
2333 # Also update it from the from the wctx
2335 # Also update it from the from the wctx
2334 extra.update(wctx.extra())
2336 extra.update(wctx.extra())
2335
2337
2336 user = opts.get('user') or old.user()
2338 user = opts.get('user') or old.user()
2337 date = opts.get('date') or old.date()
2339 date = opts.get('date') or old.date()
2338
2340
2339 # Parse the date to allow comparison between date and old.date()
2341 # Parse the date to allow comparison between date and old.date()
2340 date = dateutil.parsedate(date)
2342 date = dateutil.parsedate(date)
2341
2343
2342 if len(old.parents()) > 1:
2344 if len(old.parents()) > 1:
2343 # ctx.files() isn't reliable for merges, so fall back to the
2345 # ctx.files() isn't reliable for merges, so fall back to the
2344 # slower repo.status() method
2346 # slower repo.status() method
2345 files = set([fn for st in repo.status(base, old)[:3]
2347 files = set([fn for st in repo.status(base, old)[:3]
2346 for fn in st])
2348 for fn in st])
2347 else:
2349 else:
2348 files = set(old.files())
2350 files = set(old.files())
2349
2351
2350 # add/remove the files to the working copy if the "addremove" option
2352 # add/remove the files to the working copy if the "addremove" option
2351 # was specified.
2353 # was specified.
2352 matcher = scmutil.match(wctx, pats, opts)
2354 matcher = scmutil.match(wctx, pats, opts)
2353 if (opts.get('addremove')
2355 if (opts.get('addremove')
2354 and scmutil.addremove(repo, matcher, "", opts)):
2356 and scmutil.addremove(repo, matcher, "", opts)):
2355 raise error.Abort(
2357 raise error.Abort(
2356 _("failed to mark all new/missing files as added/removed"))
2358 _("failed to mark all new/missing files as added/removed"))
2357
2359
2358 # Check subrepos. This depends on in-place wctx._status update in
2360 # Check subrepos. This depends on in-place wctx._status update in
2359 # subrepo.precommit(). To minimize the risk of this hack, we do
2361 # subrepo.precommit(). To minimize the risk of this hack, we do
2360 # nothing if .hgsub does not exist.
2362 # nothing if .hgsub does not exist.
2361 if '.hgsub' in wctx or '.hgsub' in old:
2363 if '.hgsub' in wctx or '.hgsub' in old:
2362 subs, commitsubs, newsubstate = subrepoutil.precommit(
2364 subs, commitsubs, newsubstate = subrepoutil.precommit(
2363 ui, wctx, wctx._status, matcher)
2365 ui, wctx, wctx._status, matcher)
2364 # amend should abort if commitsubrepos is enabled
2366 # amend should abort if commitsubrepos is enabled
2365 assert not commitsubs
2367 assert not commitsubs
2366 if subs:
2368 if subs:
2367 subrepoutil.writestate(repo, newsubstate)
2369 subrepoutil.writestate(repo, newsubstate)
2368
2370
2369 ms = mergemod.mergestate.read(repo)
2371 ms = mergemod.mergestate.read(repo)
2370 mergeutil.checkunresolved(ms)
2372 mergeutil.checkunresolved(ms)
2371
2373
2372 filestoamend = set(f for f in wctx.files() if matcher(f))
2374 filestoamend = set(f for f in wctx.files() if matcher(f))
2373
2375
2374 changes = (len(filestoamend) > 0)
2376 changes = (len(filestoamend) > 0)
2375 if changes:
2377 if changes:
2376 # Recompute copies (avoid recording a -> b -> a)
2378 # Recompute copies (avoid recording a -> b -> a)
2377 copied = copies.pathcopies(base, wctx, matcher)
2379 copied = copies.pathcopies(base, wctx, matcher)
2378 if old.p2:
2380 if old.p2:
2379 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
2381 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
2380
2382
2381 # Prune files which were reverted by the updates: if old
2383 # Prune files which were reverted by the updates: if old
2382 # introduced file X and the file was renamed in the working
2384 # introduced file X and the file was renamed in the working
2383 # copy, then those two files are the same and
2385 # copy, then those two files are the same and
2384 # we can discard X from our list of files. Likewise if X
2386 # we can discard X from our list of files. Likewise if X
2385 # was removed, it's no longer relevant. If X is missing (aka
2387 # was removed, it's no longer relevant. If X is missing (aka
2386 # deleted), old X must be preserved.
2388 # deleted), old X must be preserved.
2387 files.update(filestoamend)
2389 files.update(filestoamend)
2388 files = [f for f in files if (not samefile(f, wctx, base)
2390 files = [f for f in files if (not samefile(f, wctx, base)
2389 or f in wctx.deleted())]
2391 or f in wctx.deleted())]
2390
2392
2391 def filectxfn(repo, ctx_, path):
2393 def filectxfn(repo, ctx_, path):
2392 try:
2394 try:
2393 # If the file being considered is not amongst the files
2395 # If the file being considered is not amongst the files
2394 # to be amended, we should return the file context from the
2396 # to be amended, we should return the file context from the
2395 # old changeset. This avoids issues when only some files in
2397 # old changeset. This avoids issues when only some files in
2396 # the working copy are being amended but there are also
2398 # the working copy are being amended but there are also
2397 # changes to other files from the old changeset.
2399 # changes to other files from the old changeset.
2398 if path not in filestoamend:
2400 if path not in filestoamend:
2399 return old.filectx(path)
2401 return old.filectx(path)
2400
2402
2401 # Return None for removed files.
2403 # Return None for removed files.
2402 if path in wctx.removed():
2404 if path in wctx.removed():
2403 return None
2405 return None
2404
2406
2405 fctx = wctx[path]
2407 fctx = wctx[path]
2406 flags = fctx.flags()
2408 flags = fctx.flags()
2407 mctx = context.memfilectx(repo, ctx_,
2409 mctx = context.memfilectx(repo, ctx_,
2408 fctx.path(), fctx.data(),
2410 fctx.path(), fctx.data(),
2409 islink='l' in flags,
2411 islink='l' in flags,
2410 isexec='x' in flags,
2412 isexec='x' in flags,
2411 copied=copied.get(path))
2413 copied=copied.get(path))
2412 return mctx
2414 return mctx
2413 except KeyError:
2415 except KeyError:
2414 return None
2416 return None
2415 else:
2417 else:
2416 ui.note(_('copying changeset %s to %s\n') % (old, base))
2418 ui.note(_('copying changeset %s to %s\n') % (old, base))
2417
2419
2418 # Use version of files as in the old cset
2420 # Use version of files as in the old cset
2419 def filectxfn(repo, ctx_, path):
2421 def filectxfn(repo, ctx_, path):
2420 try:
2422 try:
2421 return old.filectx(path)
2423 return old.filectx(path)
2422 except KeyError:
2424 except KeyError:
2423 return None
2425 return None
2424
2426
2425 # See if we got a message from -m or -l, if not, open the editor with
2427 # See if we got a message from -m or -l, if not, open the editor with
2426 # the message of the changeset to amend.
2428 # the message of the changeset to amend.
2427 message = logmessage(ui, opts)
2429 message = logmessage(ui, opts)
2428
2430
2429 editform = mergeeditform(old, 'commit.amend')
2431 editform = mergeeditform(old, 'commit.amend')
2430 editor = getcommiteditor(editform=editform,
2432 editor = getcommiteditor(editform=editform,
2431 **pycompat.strkwargs(opts))
2433 **pycompat.strkwargs(opts))
2432
2434
2433 if not message:
2435 if not message:
2434 editor = getcommiteditor(edit=True, editform=editform)
2436 editor = getcommiteditor(edit=True, editform=editform)
2435 message = old.description()
2437 message = old.description()
2436
2438
2437 pureextra = extra.copy()
2439 pureextra = extra.copy()
2438 extra['amend_source'] = old.hex()
2440 extra['amend_source'] = old.hex()
2439
2441
2440 new = context.memctx(repo,
2442 new = context.memctx(repo,
2441 parents=[base.node(), old.p2().node()],
2443 parents=[base.node(), old.p2().node()],
2442 text=message,
2444 text=message,
2443 files=files,
2445 files=files,
2444 filectxfn=filectxfn,
2446 filectxfn=filectxfn,
2445 user=user,
2447 user=user,
2446 date=date,
2448 date=date,
2447 extra=extra,
2449 extra=extra,
2448 editor=editor)
2450 editor=editor)
2449
2451
2450 newdesc = changelog.stripdesc(new.description())
2452 newdesc = changelog.stripdesc(new.description())
2451 if ((not changes)
2453 if ((not changes)
2452 and newdesc == old.description()
2454 and newdesc == old.description()
2453 and user == old.user()
2455 and user == old.user()
2454 and date == old.date()
2456 and date == old.date()
2455 and pureextra == old.extra()):
2457 and pureextra == old.extra()):
2456 # nothing changed. continuing here would create a new node
2458 # nothing changed. continuing here would create a new node
2457 # anyway because of the amend_source noise.
2459 # anyway because of the amend_source noise.
2458 #
2460 #
2459 # This not what we expect from amend.
2461 # This not what we expect from amend.
2460 return old.node()
2462 return old.node()
2461
2463
2462 if opts.get('secret'):
2464 if opts.get('secret'):
2463 commitphase = 'secret'
2465 commitphase = 'secret'
2464 else:
2466 else:
2465 commitphase = old.phase()
2467 commitphase = old.phase()
2466 overrides = {('phases', 'new-commit'): commitphase}
2468 overrides = {('phases', 'new-commit'): commitphase}
2467 with ui.configoverride(overrides, 'amend'):
2469 with ui.configoverride(overrides, 'amend'):
2468 newid = repo.commitctx(new)
2470 newid = repo.commitctx(new)
2469
2471
2470 # Reroute the working copy parent to the new changeset
2472 # Reroute the working copy parent to the new changeset
2471 repo.setparents(newid, nullid)
2473 repo.setparents(newid, nullid)
2472 mapping = {old.node(): (newid,)}
2474 mapping = {old.node(): (newid,)}
2473 obsmetadata = None
2475 obsmetadata = None
2474 if opts.get('note'):
2476 if opts.get('note'):
2475 obsmetadata = {'note': opts['note']}
2477 obsmetadata = {'note': opts['note']}
2476 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata)
2478 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata)
2477
2479
2478 # Fixing the dirstate because localrepo.commitctx does not update
2480 # Fixing the dirstate because localrepo.commitctx does not update
2479 # it. This is rather convenient because we did not need to update
2481 # it. This is rather convenient because we did not need to update
2480 # the dirstate for all the files in the new commit which commitctx
2482 # the dirstate for all the files in the new commit which commitctx
2481 # could have done if it updated the dirstate. Now, we can
2483 # could have done if it updated the dirstate. Now, we can
2482 # selectively update the dirstate only for the amended files.
2484 # selectively update the dirstate only for the amended files.
2483 dirstate = repo.dirstate
2485 dirstate = repo.dirstate
2484
2486
2485 # Update the state of the files which were added and
2487 # Update the state of the files which were added and
2486 # and modified in the amend to "normal" in the dirstate.
2488 # and modified in the amend to "normal" in the dirstate.
2487 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
2489 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
2488 for f in normalfiles:
2490 for f in normalfiles:
2489 dirstate.normal(f)
2491 dirstate.normal(f)
2490
2492
2491 # Update the state of files which were removed in the amend
2493 # Update the state of files which were removed in the amend
2492 # to "removed" in the dirstate.
2494 # to "removed" in the dirstate.
2493 removedfiles = set(wctx.removed()) & filestoamend
2495 removedfiles = set(wctx.removed()) & filestoamend
2494 for f in removedfiles:
2496 for f in removedfiles:
2495 dirstate.drop(f)
2497 dirstate.drop(f)
2496
2498
2497 return newid
2499 return newid
2498
2500
2499 def commiteditor(repo, ctx, subs, editform=''):
2501 def commiteditor(repo, ctx, subs, editform=''):
2500 if ctx.description():
2502 if ctx.description():
2501 return ctx.description()
2503 return ctx.description()
2502 return commitforceeditor(repo, ctx, subs, editform=editform,
2504 return commitforceeditor(repo, ctx, subs, editform=editform,
2503 unchangedmessagedetection=True)
2505 unchangedmessagedetection=True)
2504
2506
2505 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2507 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2506 editform='', unchangedmessagedetection=False):
2508 editform='', unchangedmessagedetection=False):
2507 if not extramsg:
2509 if not extramsg:
2508 extramsg = _("Leave message empty to abort commit.")
2510 extramsg = _("Leave message empty to abort commit.")
2509
2511
2510 forms = [e for e in editform.split('.') if e]
2512 forms = [e for e in editform.split('.') if e]
2511 forms.insert(0, 'changeset')
2513 forms.insert(0, 'changeset')
2512 templatetext = None
2514 templatetext = None
2513 while forms:
2515 while forms:
2514 ref = '.'.join(forms)
2516 ref = '.'.join(forms)
2515 if repo.ui.config('committemplate', ref):
2517 if repo.ui.config('committemplate', ref):
2516 templatetext = committext = buildcommittemplate(
2518 templatetext = committext = buildcommittemplate(
2517 repo, ctx, subs, extramsg, ref)
2519 repo, ctx, subs, extramsg, ref)
2518 break
2520 break
2519 forms.pop()
2521 forms.pop()
2520 else:
2522 else:
2521 committext = buildcommittext(repo, ctx, subs, extramsg)
2523 committext = buildcommittext(repo, ctx, subs, extramsg)
2522
2524
2523 # run editor in the repository root
2525 # run editor in the repository root
2524 olddir = pycompat.getcwd()
2526 olddir = pycompat.getcwd()
2525 os.chdir(repo.root)
2527 os.chdir(repo.root)
2526
2528
2527 # make in-memory changes visible to external process
2529 # make in-memory changes visible to external process
2528 tr = repo.currenttransaction()
2530 tr = repo.currenttransaction()
2529 repo.dirstate.write(tr)
2531 repo.dirstate.write(tr)
2530 pending = tr and tr.writepending() and repo.root
2532 pending = tr and tr.writepending() and repo.root
2531
2533
2532 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2534 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2533 editform=editform, pending=pending,
2535 editform=editform, pending=pending,
2534 repopath=repo.path, action='commit')
2536 repopath=repo.path, action='commit')
2535 text = editortext
2537 text = editortext
2536
2538
2537 # strip away anything below this special string (used for editors that want
2539 # strip away anything below this special string (used for editors that want
2538 # to display the diff)
2540 # to display the diff)
2539 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2541 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2540 if stripbelow:
2542 if stripbelow:
2541 text = text[:stripbelow.start()]
2543 text = text[:stripbelow.start()]
2542
2544
2543 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2545 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2544 os.chdir(olddir)
2546 os.chdir(olddir)
2545
2547
2546 if finishdesc:
2548 if finishdesc:
2547 text = finishdesc(text)
2549 text = finishdesc(text)
2548 if not text.strip():
2550 if not text.strip():
2549 raise error.Abort(_("empty commit message"))
2551 raise error.Abort(_("empty commit message"))
2550 if unchangedmessagedetection and editortext == templatetext:
2552 if unchangedmessagedetection and editortext == templatetext:
2551 raise error.Abort(_("commit message unchanged"))
2553 raise error.Abort(_("commit message unchanged"))
2552
2554
2553 return text
2555 return text
2554
2556
2555 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
2557 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
2556 ui = repo.ui
2558 ui = repo.ui
2557 spec = formatter.templatespec(ref, None, None)
2559 spec = formatter.templatespec(ref, None, None)
2558 t = logcmdutil.changesettemplater(ui, repo, spec)
2560 t = logcmdutil.changesettemplater(ui, repo, spec)
2559 t.t.cache.update((k, templater.unquotestring(v))
2561 t.t.cache.update((k, templater.unquotestring(v))
2560 for k, v in repo.ui.configitems('committemplate'))
2562 for k, v in repo.ui.configitems('committemplate'))
2561
2563
2562 if not extramsg:
2564 if not extramsg:
2563 extramsg = '' # ensure that extramsg is string
2565 extramsg = '' # ensure that extramsg is string
2564
2566
2565 ui.pushbuffer()
2567 ui.pushbuffer()
2566 t.show(ctx, extramsg=extramsg)
2568 t.show(ctx, extramsg=extramsg)
2567 return ui.popbuffer()
2569 return ui.popbuffer()
2568
2570
2569 def hgprefix(msg):
2571 def hgprefix(msg):
2570 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2572 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2571
2573
2572 def buildcommittext(repo, ctx, subs, extramsg):
2574 def buildcommittext(repo, ctx, subs, extramsg):
2573 edittext = []
2575 edittext = []
2574 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2576 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2575 if ctx.description():
2577 if ctx.description():
2576 edittext.append(ctx.description())
2578 edittext.append(ctx.description())
2577 edittext.append("")
2579 edittext.append("")
2578 edittext.append("") # Empty line between message and comments.
2580 edittext.append("") # Empty line between message and comments.
2579 edittext.append(hgprefix(_("Enter commit message."
2581 edittext.append(hgprefix(_("Enter commit message."
2580 " Lines beginning with 'HG:' are removed.")))
2582 " Lines beginning with 'HG:' are removed.")))
2581 edittext.append(hgprefix(extramsg))
2583 edittext.append(hgprefix(extramsg))
2582 edittext.append("HG: --")
2584 edittext.append("HG: --")
2583 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2585 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2584 if ctx.p2():
2586 if ctx.p2():
2585 edittext.append(hgprefix(_("branch merge")))
2587 edittext.append(hgprefix(_("branch merge")))
2586 if ctx.branch():
2588 if ctx.branch():
2587 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2589 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2588 if bookmarks.isactivewdirparent(repo):
2590 if bookmarks.isactivewdirparent(repo):
2589 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2591 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2590 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2592 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2591 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2593 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2592 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2594 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2593 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2595 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2594 if not added and not modified and not removed:
2596 if not added and not modified and not removed:
2595 edittext.append(hgprefix(_("no files changed")))
2597 edittext.append(hgprefix(_("no files changed")))
2596 edittext.append("")
2598 edittext.append("")
2597
2599
2598 return "\n".join(edittext)
2600 return "\n".join(edittext)
2599
2601
2600 def commitstatus(repo, node, branch, bheads=None, opts=None):
2602 def commitstatus(repo, node, branch, bheads=None, opts=None):
2601 if opts is None:
2603 if opts is None:
2602 opts = {}
2604 opts = {}
2603 ctx = repo[node]
2605 ctx = repo[node]
2604 parents = ctx.parents()
2606 parents = ctx.parents()
2605
2607
2606 if (not opts.get('amend') and bheads and node not in bheads and not
2608 if (not opts.get('amend') and bheads and node not in bheads and not
2607 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2609 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2608 repo.ui.status(_('created new head\n'))
2610 repo.ui.status(_('created new head\n'))
2609 # The message is not printed for initial roots. For the other
2611 # The message is not printed for initial roots. For the other
2610 # changesets, it is printed in the following situations:
2612 # changesets, it is printed in the following situations:
2611 #
2613 #
2612 # Par column: for the 2 parents with ...
2614 # Par column: for the 2 parents with ...
2613 # N: null or no parent
2615 # N: null or no parent
2614 # B: parent is on another named branch
2616 # B: parent is on another named branch
2615 # C: parent is a regular non head changeset
2617 # C: parent is a regular non head changeset
2616 # H: parent was a branch head of the current branch
2618 # H: parent was a branch head of the current branch
2617 # Msg column: whether we print "created new head" message
2619 # Msg column: whether we print "created new head" message
2618 # In the following, it is assumed that there already exists some
2620 # In the following, it is assumed that there already exists some
2619 # initial branch heads of the current branch, otherwise nothing is
2621 # initial branch heads of the current branch, otherwise nothing is
2620 # printed anyway.
2622 # printed anyway.
2621 #
2623 #
2622 # Par Msg Comment
2624 # Par Msg Comment
2623 # N N y additional topo root
2625 # N N y additional topo root
2624 #
2626 #
2625 # B N y additional branch root
2627 # B N y additional branch root
2626 # C N y additional topo head
2628 # C N y additional topo head
2627 # H N n usual case
2629 # H N n usual case
2628 #
2630 #
2629 # B B y weird additional branch root
2631 # B B y weird additional branch root
2630 # C B y branch merge
2632 # C B y branch merge
2631 # H B n merge with named branch
2633 # H B n merge with named branch
2632 #
2634 #
2633 # C C y additional head from merge
2635 # C C y additional head from merge
2634 # C H n merge with a head
2636 # C H n merge with a head
2635 #
2637 #
2636 # H H n head merge: head count decreases
2638 # H H n head merge: head count decreases
2637
2639
2638 if not opts.get('close_branch'):
2640 if not opts.get('close_branch'):
2639 for r in parents:
2641 for r in parents:
2640 if r.closesbranch() and r.branch() == branch:
2642 if r.closesbranch() and r.branch() == branch:
2641 repo.ui.status(_('reopening closed branch head %d\n') % r.rev())
2643 repo.ui.status(_('reopening closed branch head %d\n') % r.rev())
2642
2644
2643 if repo.ui.debugflag:
2645 if repo.ui.debugflag:
2644 repo.ui.write(_('committed changeset %d:%s\n') % (ctx.rev(), ctx.hex()))
2646 repo.ui.write(_('committed changeset %d:%s\n') % (ctx.rev(), ctx.hex()))
2645 elif repo.ui.verbose:
2647 elif repo.ui.verbose:
2646 repo.ui.write(_('committed changeset %d:%s\n') % (ctx.rev(), ctx))
2648 repo.ui.write(_('committed changeset %d:%s\n') % (ctx.rev(), ctx))
2647
2649
2648 def postcommitstatus(repo, pats, opts):
2650 def postcommitstatus(repo, pats, opts):
2649 return repo.status(match=scmutil.match(repo[None], pats, opts))
2651 return repo.status(match=scmutil.match(repo[None], pats, opts))
2650
2652
2651 def revert(ui, repo, ctx, parents, *pats, **opts):
2653 def revert(ui, repo, ctx, parents, *pats, **opts):
2652 opts = pycompat.byteskwargs(opts)
2654 opts = pycompat.byteskwargs(opts)
2653 parent, p2 = parents
2655 parent, p2 = parents
2654 node = ctx.node()
2656 node = ctx.node()
2655
2657
2656 mf = ctx.manifest()
2658 mf = ctx.manifest()
2657 if node == p2:
2659 if node == p2:
2658 parent = p2
2660 parent = p2
2659
2661
2660 # need all matching names in dirstate and manifest of target rev,
2662 # need all matching names in dirstate and manifest of target rev,
2661 # so have to walk both. do not print errors if files exist in one
2663 # so have to walk both. do not print errors if files exist in one
2662 # but not other. in both cases, filesets should be evaluated against
2664 # but not other. in both cases, filesets should be evaluated against
2663 # workingctx to get consistent result (issue4497). this means 'set:**'
2665 # workingctx to get consistent result (issue4497). this means 'set:**'
2664 # cannot be used to select missing files from target rev.
2666 # cannot be used to select missing files from target rev.
2665
2667
2666 # `names` is a mapping for all elements in working copy and target revision
2668 # `names` is a mapping for all elements in working copy and target revision
2667 # The mapping is in the form:
2669 # The mapping is in the form:
2668 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2670 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2669 names = {}
2671 names = {}
2670
2672
2671 with repo.wlock():
2673 with repo.wlock():
2672 ## filling of the `names` mapping
2674 ## filling of the `names` mapping
2673 # walk dirstate to fill `names`
2675 # walk dirstate to fill `names`
2674
2676
2675 interactive = opts.get('interactive', False)
2677 interactive = opts.get('interactive', False)
2676 wctx = repo[None]
2678 wctx = repo[None]
2677 m = scmutil.match(wctx, pats, opts)
2679 m = scmutil.match(wctx, pats, opts)
2678
2680
2679 # we'll need this later
2681 # we'll need this later
2680 targetsubs = sorted(s for s in wctx.substate if m(s))
2682 targetsubs = sorted(s for s in wctx.substate if m(s))
2681
2683
2682 if not m.always():
2684 if not m.always():
2683 matcher = matchmod.badmatch(m, lambda x, y: False)
2685 matcher = matchmod.badmatch(m, lambda x, y: False)
2684 for abs in wctx.walk(matcher):
2686 for abs in wctx.walk(matcher):
2685 names[abs] = m.rel(abs), m.exact(abs)
2687 names[abs] = m.rel(abs), m.exact(abs)
2686
2688
2687 # walk target manifest to fill `names`
2689 # walk target manifest to fill `names`
2688
2690
2689 def badfn(path, msg):
2691 def badfn(path, msg):
2690 if path in names:
2692 if path in names:
2691 return
2693 return
2692 if path in ctx.substate:
2694 if path in ctx.substate:
2693 return
2695 return
2694 path_ = path + '/'
2696 path_ = path + '/'
2695 for f in names:
2697 for f in names:
2696 if f.startswith(path_):
2698 if f.startswith(path_):
2697 return
2699 return
2698 ui.warn("%s: %s\n" % (m.rel(path), msg))
2700 ui.warn("%s: %s\n" % (m.rel(path), msg))
2699
2701
2700 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2702 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2701 if abs not in names:
2703 if abs not in names:
2702 names[abs] = m.rel(abs), m.exact(abs)
2704 names[abs] = m.rel(abs), m.exact(abs)
2703
2705
2704 # Find status of all file in `names`.
2706 # Find status of all file in `names`.
2705 m = scmutil.matchfiles(repo, names)
2707 m = scmutil.matchfiles(repo, names)
2706
2708
2707 changes = repo.status(node1=node, match=m,
2709 changes = repo.status(node1=node, match=m,
2708 unknown=True, ignored=True, clean=True)
2710 unknown=True, ignored=True, clean=True)
2709 else:
2711 else:
2710 changes = repo.status(node1=node, match=m)
2712 changes = repo.status(node1=node, match=m)
2711 for kind in changes:
2713 for kind in changes:
2712 for abs in kind:
2714 for abs in kind:
2713 names[abs] = m.rel(abs), m.exact(abs)
2715 names[abs] = m.rel(abs), m.exact(abs)
2714
2716
2715 m = scmutil.matchfiles(repo, names)
2717 m = scmutil.matchfiles(repo, names)
2716
2718
2717 modified = set(changes.modified)
2719 modified = set(changes.modified)
2718 added = set(changes.added)
2720 added = set(changes.added)
2719 removed = set(changes.removed)
2721 removed = set(changes.removed)
2720 _deleted = set(changes.deleted)
2722 _deleted = set(changes.deleted)
2721 unknown = set(changes.unknown)
2723 unknown = set(changes.unknown)
2722 unknown.update(changes.ignored)
2724 unknown.update(changes.ignored)
2723 clean = set(changes.clean)
2725 clean = set(changes.clean)
2724 modadded = set()
2726 modadded = set()
2725
2727
2726 # We need to account for the state of the file in the dirstate,
2728 # We need to account for the state of the file in the dirstate,
2727 # even when we revert against something else than parent. This will
2729 # even when we revert against something else than parent. This will
2728 # slightly alter the behavior of revert (doing back up or not, delete
2730 # slightly alter the behavior of revert (doing back up or not, delete
2729 # or just forget etc).
2731 # or just forget etc).
2730 if parent == node:
2732 if parent == node:
2731 dsmodified = modified
2733 dsmodified = modified
2732 dsadded = added
2734 dsadded = added
2733 dsremoved = removed
2735 dsremoved = removed
2734 # store all local modifications, useful later for rename detection
2736 # store all local modifications, useful later for rename detection
2735 localchanges = dsmodified | dsadded
2737 localchanges = dsmodified | dsadded
2736 modified, added, removed = set(), set(), set()
2738 modified, added, removed = set(), set(), set()
2737 else:
2739 else:
2738 changes = repo.status(node1=parent, match=m)
2740 changes = repo.status(node1=parent, match=m)
2739 dsmodified = set(changes.modified)
2741 dsmodified = set(changes.modified)
2740 dsadded = set(changes.added)
2742 dsadded = set(changes.added)
2741 dsremoved = set(changes.removed)
2743 dsremoved = set(changes.removed)
2742 # store all local modifications, useful later for rename detection
2744 # store all local modifications, useful later for rename detection
2743 localchanges = dsmodified | dsadded
2745 localchanges = dsmodified | dsadded
2744
2746
2745 # only take into account for removes between wc and target
2747 # only take into account for removes between wc and target
2746 clean |= dsremoved - removed
2748 clean |= dsremoved - removed
2747 dsremoved &= removed
2749 dsremoved &= removed
2748 # distinct between dirstate remove and other
2750 # distinct between dirstate remove and other
2749 removed -= dsremoved
2751 removed -= dsremoved
2750
2752
2751 modadded = added & dsmodified
2753 modadded = added & dsmodified
2752 added -= modadded
2754 added -= modadded
2753
2755
2754 # tell newly modified apart.
2756 # tell newly modified apart.
2755 dsmodified &= modified
2757 dsmodified &= modified
2756 dsmodified |= modified & dsadded # dirstate added may need backup
2758 dsmodified |= modified & dsadded # dirstate added may need backup
2757 modified -= dsmodified
2759 modified -= dsmodified
2758
2760
2759 # We need to wait for some post-processing to update this set
2761 # We need to wait for some post-processing to update this set
2760 # before making the distinction. The dirstate will be used for
2762 # before making the distinction. The dirstate will be used for
2761 # that purpose.
2763 # that purpose.
2762 dsadded = added
2764 dsadded = added
2763
2765
2764 # in case of merge, files that are actually added can be reported as
2766 # in case of merge, files that are actually added can be reported as
2765 # modified, we need to post process the result
2767 # modified, we need to post process the result
2766 if p2 != nullid:
2768 if p2 != nullid:
2767 mergeadd = set(dsmodified)
2769 mergeadd = set(dsmodified)
2768 for path in dsmodified:
2770 for path in dsmodified:
2769 if path in mf:
2771 if path in mf:
2770 mergeadd.remove(path)
2772 mergeadd.remove(path)
2771 dsadded |= mergeadd
2773 dsadded |= mergeadd
2772 dsmodified -= mergeadd
2774 dsmodified -= mergeadd
2773
2775
2774 # if f is a rename, update `names` to also revert the source
2776 # if f is a rename, update `names` to also revert the source
2775 cwd = repo.getcwd()
2777 cwd = repo.getcwd()
2776 for f in localchanges:
2778 for f in localchanges:
2777 src = repo.dirstate.copied(f)
2779 src = repo.dirstate.copied(f)
2778 # XXX should we check for rename down to target node?
2780 # XXX should we check for rename down to target node?
2779 if src and src not in names and repo.dirstate[src] == 'r':
2781 if src and src not in names and repo.dirstate[src] == 'r':
2780 dsremoved.add(src)
2782 dsremoved.add(src)
2781 names[src] = (repo.pathto(src, cwd), True)
2783 names[src] = (repo.pathto(src, cwd), True)
2782
2784
2783 # determine the exact nature of the deleted changesets
2785 # determine the exact nature of the deleted changesets
2784 deladded = set(_deleted)
2786 deladded = set(_deleted)
2785 for path in _deleted:
2787 for path in _deleted:
2786 if path in mf:
2788 if path in mf:
2787 deladded.remove(path)
2789 deladded.remove(path)
2788 deleted = _deleted - deladded
2790 deleted = _deleted - deladded
2789
2791
2790 # distinguish between file to forget and the other
2792 # distinguish between file to forget and the other
2791 added = set()
2793 added = set()
2792 for abs in dsadded:
2794 for abs in dsadded:
2793 if repo.dirstate[abs] != 'a':
2795 if repo.dirstate[abs] != 'a':
2794 added.add(abs)
2796 added.add(abs)
2795 dsadded -= added
2797 dsadded -= added
2796
2798
2797 for abs in deladded:
2799 for abs in deladded:
2798 if repo.dirstate[abs] == 'a':
2800 if repo.dirstate[abs] == 'a':
2799 dsadded.add(abs)
2801 dsadded.add(abs)
2800 deladded -= dsadded
2802 deladded -= dsadded
2801
2803
2802 # For files marked as removed, we check if an unknown file is present at
2804 # For files marked as removed, we check if an unknown file is present at
2803 # the same path. If a such file exists it may need to be backed up.
2805 # the same path. If a such file exists it may need to be backed up.
2804 # Making the distinction at this stage helps have simpler backup
2806 # Making the distinction at this stage helps have simpler backup
2805 # logic.
2807 # logic.
2806 removunk = set()
2808 removunk = set()
2807 for abs in removed:
2809 for abs in removed:
2808 target = repo.wjoin(abs)
2810 target = repo.wjoin(abs)
2809 if os.path.lexists(target):
2811 if os.path.lexists(target):
2810 removunk.add(abs)
2812 removunk.add(abs)
2811 removed -= removunk
2813 removed -= removunk
2812
2814
2813 dsremovunk = set()
2815 dsremovunk = set()
2814 for abs in dsremoved:
2816 for abs in dsremoved:
2815 target = repo.wjoin(abs)
2817 target = repo.wjoin(abs)
2816 if os.path.lexists(target):
2818 if os.path.lexists(target):
2817 dsremovunk.add(abs)
2819 dsremovunk.add(abs)
2818 dsremoved -= dsremovunk
2820 dsremoved -= dsremovunk
2819
2821
2820 # action to be actually performed by revert
2822 # action to be actually performed by revert
2821 # (<list of file>, message>) tuple
2823 # (<list of file>, message>) tuple
2822 actions = {'revert': ([], _('reverting %s\n')),
2824 actions = {'revert': ([], _('reverting %s\n')),
2823 'add': ([], _('adding %s\n')),
2825 'add': ([], _('adding %s\n')),
2824 'remove': ([], _('removing %s\n')),
2826 'remove': ([], _('removing %s\n')),
2825 'drop': ([], _('removing %s\n')),
2827 'drop': ([], _('removing %s\n')),
2826 'forget': ([], _('forgetting %s\n')),
2828 'forget': ([], _('forgetting %s\n')),
2827 'undelete': ([], _('undeleting %s\n')),
2829 'undelete': ([], _('undeleting %s\n')),
2828 'noop': (None, _('no changes needed to %s\n')),
2830 'noop': (None, _('no changes needed to %s\n')),
2829 'unknown': (None, _('file not managed: %s\n')),
2831 'unknown': (None, _('file not managed: %s\n')),
2830 }
2832 }
2831
2833
2832 # "constant" that convey the backup strategy.
2834 # "constant" that convey the backup strategy.
2833 # All set to `discard` if `no-backup` is set do avoid checking
2835 # All set to `discard` if `no-backup` is set do avoid checking
2834 # no_backup lower in the code.
2836 # no_backup lower in the code.
2835 # These values are ordered for comparison purposes
2837 # These values are ordered for comparison purposes
2836 backupinteractive = 3 # do backup if interactively modified
2838 backupinteractive = 3 # do backup if interactively modified
2837 backup = 2 # unconditionally do backup
2839 backup = 2 # unconditionally do backup
2838 check = 1 # check if the existing file differs from target
2840 check = 1 # check if the existing file differs from target
2839 discard = 0 # never do backup
2841 discard = 0 # never do backup
2840 if opts.get('no_backup'):
2842 if opts.get('no_backup'):
2841 backupinteractive = backup = check = discard
2843 backupinteractive = backup = check = discard
2842 if interactive:
2844 if interactive:
2843 dsmodifiedbackup = backupinteractive
2845 dsmodifiedbackup = backupinteractive
2844 else:
2846 else:
2845 dsmodifiedbackup = backup
2847 dsmodifiedbackup = backup
2846 tobackup = set()
2848 tobackup = set()
2847
2849
2848 backupanddel = actions['remove']
2850 backupanddel = actions['remove']
2849 if not opts.get('no_backup'):
2851 if not opts.get('no_backup'):
2850 backupanddel = actions['drop']
2852 backupanddel = actions['drop']
2851
2853
2852 disptable = (
2854 disptable = (
2853 # dispatch table:
2855 # dispatch table:
2854 # file state
2856 # file state
2855 # action
2857 # action
2856 # make backup
2858 # make backup
2857
2859
2858 ## Sets that results that will change file on disk
2860 ## Sets that results that will change file on disk
2859 # Modified compared to target, no local change
2861 # Modified compared to target, no local change
2860 (modified, actions['revert'], discard),
2862 (modified, actions['revert'], discard),
2861 # Modified compared to target, but local file is deleted
2863 # Modified compared to target, but local file is deleted
2862 (deleted, actions['revert'], discard),
2864 (deleted, actions['revert'], discard),
2863 # Modified compared to target, local change
2865 # Modified compared to target, local change
2864 (dsmodified, actions['revert'], dsmodifiedbackup),
2866 (dsmodified, actions['revert'], dsmodifiedbackup),
2865 # Added since target
2867 # Added since target
2866 (added, actions['remove'], discard),
2868 (added, actions['remove'], discard),
2867 # Added in working directory
2869 # Added in working directory
2868 (dsadded, actions['forget'], discard),
2870 (dsadded, actions['forget'], discard),
2869 # Added since target, have local modification
2871 # Added since target, have local modification
2870 (modadded, backupanddel, backup),
2872 (modadded, backupanddel, backup),
2871 # Added since target but file is missing in working directory
2873 # Added since target but file is missing in working directory
2872 (deladded, actions['drop'], discard),
2874 (deladded, actions['drop'], discard),
2873 # Removed since target, before working copy parent
2875 # Removed since target, before working copy parent
2874 (removed, actions['add'], discard),
2876 (removed, actions['add'], discard),
2875 # Same as `removed` but an unknown file exists at the same path
2877 # Same as `removed` but an unknown file exists at the same path
2876 (removunk, actions['add'], check),
2878 (removunk, actions['add'], check),
2877 # Removed since targe, marked as such in working copy parent
2879 # Removed since targe, marked as such in working copy parent
2878 (dsremoved, actions['undelete'], discard),
2880 (dsremoved, actions['undelete'], discard),
2879 # Same as `dsremoved` but an unknown file exists at the same path
2881 # Same as `dsremoved` but an unknown file exists at the same path
2880 (dsremovunk, actions['undelete'], check),
2882 (dsremovunk, actions['undelete'], check),
2881 ## the following sets does not result in any file changes
2883 ## the following sets does not result in any file changes
2882 # File with no modification
2884 # File with no modification
2883 (clean, actions['noop'], discard),
2885 (clean, actions['noop'], discard),
2884 # Existing file, not tracked anywhere
2886 # Existing file, not tracked anywhere
2885 (unknown, actions['unknown'], discard),
2887 (unknown, actions['unknown'], discard),
2886 )
2888 )
2887
2889
2888 for abs, (rel, exact) in sorted(names.items()):
2890 for abs, (rel, exact) in sorted(names.items()):
2889 # target file to be touch on disk (relative to cwd)
2891 # target file to be touch on disk (relative to cwd)
2890 target = repo.wjoin(abs)
2892 target = repo.wjoin(abs)
2891 # search the entry in the dispatch table.
2893 # search the entry in the dispatch table.
2892 # if the file is in any of these sets, it was touched in the working
2894 # if the file is in any of these sets, it was touched in the working
2893 # directory parent and we are sure it needs to be reverted.
2895 # directory parent and we are sure it needs to be reverted.
2894 for table, (xlist, msg), dobackup in disptable:
2896 for table, (xlist, msg), dobackup in disptable:
2895 if abs not in table:
2897 if abs not in table:
2896 continue
2898 continue
2897 if xlist is not None:
2899 if xlist is not None:
2898 xlist.append(abs)
2900 xlist.append(abs)
2899 if dobackup:
2901 if dobackup:
2900 # If in interactive mode, don't automatically create
2902 # If in interactive mode, don't automatically create
2901 # .orig files (issue4793)
2903 # .orig files (issue4793)
2902 if dobackup == backupinteractive:
2904 if dobackup == backupinteractive:
2903 tobackup.add(abs)
2905 tobackup.add(abs)
2904 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
2906 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
2905 bakname = scmutil.origpath(ui, repo, rel)
2907 bakname = scmutil.origpath(ui, repo, rel)
2906 ui.note(_('saving current version of %s as %s\n') %
2908 ui.note(_('saving current version of %s as %s\n') %
2907 (rel, bakname))
2909 (rel, bakname))
2908 if not opts.get('dry_run'):
2910 if not opts.get('dry_run'):
2909 if interactive:
2911 if interactive:
2910 util.copyfile(target, bakname)
2912 util.copyfile(target, bakname)
2911 else:
2913 else:
2912 util.rename(target, bakname)
2914 util.rename(target, bakname)
2913 if ui.verbose or not exact:
2915 if ui.verbose or not exact:
2914 if not isinstance(msg, bytes):
2916 if not isinstance(msg, bytes):
2915 msg = msg(abs)
2917 msg = msg(abs)
2916 ui.status(msg % rel)
2918 ui.status(msg % rel)
2917 elif exact:
2919 elif exact:
2918 ui.warn(msg % rel)
2920 ui.warn(msg % rel)
2919 break
2921 break
2920
2922
2921 if not opts.get('dry_run'):
2923 if not opts.get('dry_run'):
2922 needdata = ('revert', 'add', 'undelete')
2924 needdata = ('revert', 'add', 'undelete')
2923 if _revertprefetch is not _revertprefetchstub:
2925 if _revertprefetch is not _revertprefetchstub:
2924 ui.deprecwarn("'cmdutil._revertprefetch' is deprecated, "
2926 ui.deprecwarn("'cmdutil._revertprefetch' is deprecated, "
2925 "add a callback to 'scmutil.fileprefetchhooks'",
2927 "add a callback to 'scmutil.fileprefetchhooks'",
2926 '4.6', stacklevel=1)
2928 '4.6', stacklevel=1)
2927 _revertprefetch(repo, ctx,
2929 _revertprefetch(repo, ctx,
2928 *[actions[name][0] for name in needdata])
2930 *[actions[name][0] for name in needdata])
2929 oplist = [actions[name][0] for name in needdata]
2931 oplist = [actions[name][0] for name in needdata]
2930 prefetch = scmutil.fileprefetchhooks
2932 prefetch = scmutil.fileprefetchhooks
2931 prefetch(repo, ctx, [f for sublist in oplist for f in sublist])
2933 prefetch(repo, ctx, [f for sublist in oplist for f in sublist])
2932 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
2934 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
2933
2935
2934 if targetsubs:
2936 if targetsubs:
2935 # Revert the subrepos on the revert list
2937 # Revert the subrepos on the revert list
2936 for sub in targetsubs:
2938 for sub in targetsubs:
2937 try:
2939 try:
2938 wctx.sub(sub).revert(ctx.substate[sub], *pats,
2940 wctx.sub(sub).revert(ctx.substate[sub], *pats,
2939 **pycompat.strkwargs(opts))
2941 **pycompat.strkwargs(opts))
2940 except KeyError:
2942 except KeyError:
2941 raise error.Abort("subrepository '%s' does not exist in %s!"
2943 raise error.Abort("subrepository '%s' does not exist in %s!"
2942 % (sub, short(ctx.node())))
2944 % (sub, short(ctx.node())))
2943
2945
2944 def _revertprefetchstub(repo, ctx, *files):
2946 def _revertprefetchstub(repo, ctx, *files):
2945 """Stub method for detecting extension wrapping of _revertprefetch(), to
2947 """Stub method for detecting extension wrapping of _revertprefetch(), to
2946 issue a deprecation warning."""
2948 issue a deprecation warning."""
2947
2949
2948 _revertprefetch = _revertprefetchstub
2950 _revertprefetch = _revertprefetchstub
2949
2951
2950 def _performrevert(repo, parents, ctx, actions, interactive=False,
2952 def _performrevert(repo, parents, ctx, actions, interactive=False,
2951 tobackup=None):
2953 tobackup=None):
2952 """function that actually perform all the actions computed for revert
2954 """function that actually perform all the actions computed for revert
2953
2955
2954 This is an independent function to let extension to plug in and react to
2956 This is an independent function to let extension to plug in and react to
2955 the imminent revert.
2957 the imminent revert.
2956
2958
2957 Make sure you have the working directory locked when calling this function.
2959 Make sure you have the working directory locked when calling this function.
2958 """
2960 """
2959 parent, p2 = parents
2961 parent, p2 = parents
2960 node = ctx.node()
2962 node = ctx.node()
2961 excluded_files = []
2963 excluded_files = []
2962
2964
2963 def checkout(f):
2965 def checkout(f):
2964 fc = ctx[f]
2966 fc = ctx[f]
2965 repo.wwrite(f, fc.data(), fc.flags())
2967 repo.wwrite(f, fc.data(), fc.flags())
2966
2968
2967 def doremove(f):
2969 def doremove(f):
2968 try:
2970 try:
2969 repo.wvfs.unlinkpath(f)
2971 repo.wvfs.unlinkpath(f)
2970 except OSError:
2972 except OSError:
2971 pass
2973 pass
2972 repo.dirstate.remove(f)
2974 repo.dirstate.remove(f)
2973
2975
2974 audit_path = pathutil.pathauditor(repo.root, cached=True)
2976 audit_path = pathutil.pathauditor(repo.root, cached=True)
2975 for f in actions['forget'][0]:
2977 for f in actions['forget'][0]:
2976 if interactive:
2978 if interactive:
2977 choice = repo.ui.promptchoice(
2979 choice = repo.ui.promptchoice(
2978 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
2980 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
2979 if choice == 0:
2981 if choice == 0:
2980 repo.dirstate.drop(f)
2982 repo.dirstate.drop(f)
2981 else:
2983 else:
2982 excluded_files.append(f)
2984 excluded_files.append(f)
2983 else:
2985 else:
2984 repo.dirstate.drop(f)
2986 repo.dirstate.drop(f)
2985 for f in actions['remove'][0]:
2987 for f in actions['remove'][0]:
2986 audit_path(f)
2988 audit_path(f)
2987 if interactive:
2989 if interactive:
2988 choice = repo.ui.promptchoice(
2990 choice = repo.ui.promptchoice(
2989 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
2991 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
2990 if choice == 0:
2992 if choice == 0:
2991 doremove(f)
2993 doremove(f)
2992 else:
2994 else:
2993 excluded_files.append(f)
2995 excluded_files.append(f)
2994 else:
2996 else:
2995 doremove(f)
2997 doremove(f)
2996 for f in actions['drop'][0]:
2998 for f in actions['drop'][0]:
2997 audit_path(f)
2999 audit_path(f)
2998 repo.dirstate.remove(f)
3000 repo.dirstate.remove(f)
2999
3001
3000 normal = None
3002 normal = None
3001 if node == parent:
3003 if node == parent:
3002 # We're reverting to our parent. If possible, we'd like status
3004 # We're reverting to our parent. If possible, we'd like status
3003 # to report the file as clean. We have to use normallookup for
3005 # to report the file as clean. We have to use normallookup for
3004 # merges to avoid losing information about merged/dirty files.
3006 # merges to avoid losing information about merged/dirty files.
3005 if p2 != nullid:
3007 if p2 != nullid:
3006 normal = repo.dirstate.normallookup
3008 normal = repo.dirstate.normallookup
3007 else:
3009 else:
3008 normal = repo.dirstate.normal
3010 normal = repo.dirstate.normal
3009
3011
3010 newlyaddedandmodifiedfiles = set()
3012 newlyaddedandmodifiedfiles = set()
3011 if interactive:
3013 if interactive:
3012 # Prompt the user for changes to revert
3014 # Prompt the user for changes to revert
3013 torevert = [f for f in actions['revert'][0] if f not in excluded_files]
3015 torevert = [f for f in actions['revert'][0] if f not in excluded_files]
3014 m = scmutil.matchfiles(repo, torevert)
3016 m = scmutil.matchfiles(repo, torevert)
3015 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3017 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3016 diffopts.nodates = True
3018 diffopts.nodates = True
3017 diffopts.git = True
3019 diffopts.git = True
3018 operation = 'discard'
3020 operation = 'discard'
3019 reversehunks = True
3021 reversehunks = True
3020 if node != parent:
3022 if node != parent:
3021 operation = 'apply'
3023 operation = 'apply'
3022 reversehunks = False
3024 reversehunks = False
3023 if reversehunks:
3025 if reversehunks:
3024 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3026 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3025 else:
3027 else:
3026 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3028 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3027 originalchunks = patch.parsepatch(diff)
3029 originalchunks = patch.parsepatch(diff)
3028
3030
3029 try:
3031 try:
3030
3032
3031 chunks, opts = recordfilter(repo.ui, originalchunks,
3033 chunks, opts = recordfilter(repo.ui, originalchunks,
3032 operation=operation)
3034 operation=operation)
3033 if reversehunks:
3035 if reversehunks:
3034 chunks = patch.reversehunks(chunks)
3036 chunks = patch.reversehunks(chunks)
3035
3037
3036 except error.PatchError as err:
3038 except error.PatchError as err:
3037 raise error.Abort(_('error parsing patch: %s') % err)
3039 raise error.Abort(_('error parsing patch: %s') % err)
3038
3040
3039 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3041 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3040 if tobackup is None:
3042 if tobackup is None:
3041 tobackup = set()
3043 tobackup = set()
3042 # Apply changes
3044 # Apply changes
3043 fp = stringio()
3045 fp = stringio()
3044 for c in chunks:
3046 for c in chunks:
3045 # Create a backup file only if this hunk should be backed up
3047 # Create a backup file only if this hunk should be backed up
3046 if ishunk(c) and c.header.filename() in tobackup:
3048 if ishunk(c) and c.header.filename() in tobackup:
3047 abs = c.header.filename()
3049 abs = c.header.filename()
3048 target = repo.wjoin(abs)
3050 target = repo.wjoin(abs)
3049 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3051 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3050 util.copyfile(target, bakname)
3052 util.copyfile(target, bakname)
3051 tobackup.remove(abs)
3053 tobackup.remove(abs)
3052 c.write(fp)
3054 c.write(fp)
3053 dopatch = fp.tell()
3055 dopatch = fp.tell()
3054 fp.seek(0)
3056 fp.seek(0)
3055 if dopatch:
3057 if dopatch:
3056 try:
3058 try:
3057 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3059 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3058 except error.PatchError as err:
3060 except error.PatchError as err:
3059 raise error.Abort(pycompat.bytestr(err))
3061 raise error.Abort(pycompat.bytestr(err))
3060 del fp
3062 del fp
3061 else:
3063 else:
3062 for f in actions['revert'][0]:
3064 for f in actions['revert'][0]:
3063 checkout(f)
3065 checkout(f)
3064 if normal:
3066 if normal:
3065 normal(f)
3067 normal(f)
3066
3068
3067 for f in actions['add'][0]:
3069 for f in actions['add'][0]:
3068 # Don't checkout modified files, they are already created by the diff
3070 # Don't checkout modified files, they are already created by the diff
3069 if f not in newlyaddedandmodifiedfiles:
3071 if f not in newlyaddedandmodifiedfiles:
3070 checkout(f)
3072 checkout(f)
3071 repo.dirstate.add(f)
3073 repo.dirstate.add(f)
3072
3074
3073 normal = repo.dirstate.normallookup
3075 normal = repo.dirstate.normallookup
3074 if node == parent and p2 == nullid:
3076 if node == parent and p2 == nullid:
3075 normal = repo.dirstate.normal
3077 normal = repo.dirstate.normal
3076 for f in actions['undelete'][0]:
3078 for f in actions['undelete'][0]:
3077 checkout(f)
3079 checkout(f)
3078 normal(f)
3080 normal(f)
3079
3081
3080 copied = copies.pathcopies(repo[parent], ctx)
3082 copied = copies.pathcopies(repo[parent], ctx)
3081
3083
3082 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3084 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3083 if f in copied:
3085 if f in copied:
3084 repo.dirstate.copy(copied[f], f)
3086 repo.dirstate.copy(copied[f], f)
3085
3087
3086 class command(registrar.command):
3088 class command(registrar.command):
3087 """deprecated: used registrar.command instead"""
3089 """deprecated: used registrar.command instead"""
3088 def _doregister(self, func, name, *args, **kwargs):
3090 def _doregister(self, func, name, *args, **kwargs):
3089 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3091 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3090 return super(command, self)._doregister(func, name, *args, **kwargs)
3092 return super(command, self)._doregister(func, name, *args, **kwargs)
3091
3093
3092 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3094 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3093 # commands.outgoing. "missing" is "missing" of the result of
3095 # commands.outgoing. "missing" is "missing" of the result of
3094 # "findcommonoutgoing()"
3096 # "findcommonoutgoing()"
3095 outgoinghooks = util.hooks()
3097 outgoinghooks = util.hooks()
3096
3098
3097 # a list of (ui, repo) functions called by commands.summary
3099 # a list of (ui, repo) functions called by commands.summary
3098 summaryhooks = util.hooks()
3100 summaryhooks = util.hooks()
3099
3101
3100 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3102 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3101 #
3103 #
3102 # functions should return tuple of booleans below, if 'changes' is None:
3104 # functions should return tuple of booleans below, if 'changes' is None:
3103 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3105 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3104 #
3106 #
3105 # otherwise, 'changes' is a tuple of tuples below:
3107 # otherwise, 'changes' is a tuple of tuples below:
3106 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3108 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3107 # - (desturl, destbranch, destpeer, outgoing)
3109 # - (desturl, destbranch, destpeer, outgoing)
3108 summaryremotehooks = util.hooks()
3110 summaryremotehooks = util.hooks()
3109
3111
3110 # A list of state files kept by multistep operations like graft.
3112 # A list of state files kept by multistep operations like graft.
3111 # Since graft cannot be aborted, it is considered 'clearable' by update.
3113 # Since graft cannot be aborted, it is considered 'clearable' by update.
3112 # note: bisect is intentionally excluded
3114 # note: bisect is intentionally excluded
3113 # (state file, clearable, allowcommit, error, hint)
3115 # (state file, clearable, allowcommit, error, hint)
3114 unfinishedstates = [
3116 unfinishedstates = [
3115 ('graftstate', True, False, _('graft in progress'),
3117 ('graftstate', True, False, _('graft in progress'),
3116 _("use 'hg graft --continue' or 'hg update' to abort")),
3118 _("use 'hg graft --continue' or 'hg update' to abort")),
3117 ('updatestate', True, False, _('last update was interrupted'),
3119 ('updatestate', True, False, _('last update was interrupted'),
3118 _("use 'hg update' to get a consistent checkout"))
3120 _("use 'hg update' to get a consistent checkout"))
3119 ]
3121 ]
3120
3122
3121 def checkunfinished(repo, commit=False):
3123 def checkunfinished(repo, commit=False):
3122 '''Look for an unfinished multistep operation, like graft, and abort
3124 '''Look for an unfinished multistep operation, like graft, and abort
3123 if found. It's probably good to check this right before
3125 if found. It's probably good to check this right before
3124 bailifchanged().
3126 bailifchanged().
3125 '''
3127 '''
3126 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3128 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3127 if commit and allowcommit:
3129 if commit and allowcommit:
3128 continue
3130 continue
3129 if repo.vfs.exists(f):
3131 if repo.vfs.exists(f):
3130 raise error.Abort(msg, hint=hint)
3132 raise error.Abort(msg, hint=hint)
3131
3133
3132 def clearunfinished(repo):
3134 def clearunfinished(repo):
3133 '''Check for unfinished operations (as above), and clear the ones
3135 '''Check for unfinished operations (as above), and clear the ones
3134 that are clearable.
3136 that are clearable.
3135 '''
3137 '''
3136 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3138 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3137 if not clearable and repo.vfs.exists(f):
3139 if not clearable and repo.vfs.exists(f):
3138 raise error.Abort(msg, hint=hint)
3140 raise error.Abort(msg, hint=hint)
3139 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3141 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3140 if clearable and repo.vfs.exists(f):
3142 if clearable and repo.vfs.exists(f):
3141 util.unlink(repo.vfs.join(f))
3143 util.unlink(repo.vfs.join(f))
3142
3144
3143 afterresolvedstates = [
3145 afterresolvedstates = [
3144 ('graftstate',
3146 ('graftstate',
3145 _('hg graft --continue')),
3147 _('hg graft --continue')),
3146 ]
3148 ]
3147
3149
3148 def howtocontinue(repo):
3150 def howtocontinue(repo):
3149 '''Check for an unfinished operation and return the command to finish
3151 '''Check for an unfinished operation and return the command to finish
3150 it.
3152 it.
3151
3153
3152 afterresolvedstates tuples define a .hg/{file} and the corresponding
3154 afterresolvedstates tuples define a .hg/{file} and the corresponding
3153 command needed to finish it.
3155 command needed to finish it.
3154
3156
3155 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3157 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3156 a boolean.
3158 a boolean.
3157 '''
3159 '''
3158 contmsg = _("continue: %s")
3160 contmsg = _("continue: %s")
3159 for f, msg in afterresolvedstates:
3161 for f, msg in afterresolvedstates:
3160 if repo.vfs.exists(f):
3162 if repo.vfs.exists(f):
3161 return contmsg % msg, True
3163 return contmsg % msg, True
3162 if repo[None].dirty(missing=True, merge=False, branch=False):
3164 if repo[None].dirty(missing=True, merge=False, branch=False):
3163 return contmsg % _("hg commit"), False
3165 return contmsg % _("hg commit"), False
3164 return None, None
3166 return None, None
3165
3167
3166 def checkafterresolved(repo):
3168 def checkafterresolved(repo):
3167 '''Inform the user about the next action after completing hg resolve
3169 '''Inform the user about the next action after completing hg resolve
3168
3170
3169 If there's a matching afterresolvedstates, howtocontinue will yield
3171 If there's a matching afterresolvedstates, howtocontinue will yield
3170 repo.ui.warn as the reporter.
3172 repo.ui.warn as the reporter.
3171
3173
3172 Otherwise, it will yield repo.ui.note.
3174 Otherwise, it will yield repo.ui.note.
3173 '''
3175 '''
3174 msg, warning = howtocontinue(repo)
3176 msg, warning = howtocontinue(repo)
3175 if msg is not None:
3177 if msg is not None:
3176 if warning:
3178 if warning:
3177 repo.ui.warn("%s\n" % msg)
3179 repo.ui.warn("%s\n" % msg)
3178 else:
3180 else:
3179 repo.ui.note("%s\n" % msg)
3181 repo.ui.note("%s\n" % msg)
3180
3182
3181 def wrongtooltocontinue(repo, task):
3183 def wrongtooltocontinue(repo, task):
3182 '''Raise an abort suggesting how to properly continue if there is an
3184 '''Raise an abort suggesting how to properly continue if there is an
3183 active task.
3185 active task.
3184
3186
3185 Uses howtocontinue() to find the active task.
3187 Uses howtocontinue() to find the active task.
3186
3188
3187 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3189 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3188 a hint.
3190 a hint.
3189 '''
3191 '''
3190 after = howtocontinue(repo)
3192 after = howtocontinue(repo)
3191 hint = None
3193 hint = None
3192 if after[1]:
3194 if after[1]:
3193 hint = after[0]
3195 hint = after[0]
3194 raise error.Abort(_('no %s in progress') % task, hint=hint)
3196 raise error.Abort(_('no %s in progress') % task, hint=hint)
3195
3197
3196 class changeset_printer(logcmdutil.changesetprinter):
3198 class changeset_printer(logcmdutil.changesetprinter):
3197
3199
3198 def __init__(self, ui, *args, **kwargs):
3200 def __init__(self, ui, *args, **kwargs):
3199 msg = ("'cmdutil.changeset_printer' is deprecated, "
3201 msg = ("'cmdutil.changeset_printer' is deprecated, "
3200 "use 'logcmdutil.logcmdutil'")
3202 "use 'logcmdutil.logcmdutil'")
3201 ui.deprecwarn(msg, "4.6")
3203 ui.deprecwarn(msg, "4.6")
3202 super(changeset_printer, self).__init__(ui, *args, **kwargs)
3204 super(changeset_printer, self).__init__(ui, *args, **kwargs)
3203
3205
3204 def displaygraph(ui, *args, **kwargs):
3206 def displaygraph(ui, *args, **kwargs):
3205 msg = ("'cmdutil.displaygraph' is deprecated, "
3207 msg = ("'cmdutil.displaygraph' is deprecated, "
3206 "use 'logcmdutil.displaygraph'")
3208 "use 'logcmdutil.displaygraph'")
3207 ui.deprecwarn(msg, "4.6")
3209 ui.deprecwarn(msg, "4.6")
3208 return logcmdutil.displaygraph(ui, *args, **kwargs)
3210 return logcmdutil.displaygraph(ui, *args, **kwargs)
3209
3211
3210 def show_changeset(ui, *args, **kwargs):
3212 def show_changeset(ui, *args, **kwargs):
3211 msg = ("'cmdutil.show_changeset' is deprecated, "
3213 msg = ("'cmdutil.show_changeset' is deprecated, "
3212 "use 'logcmdutil.changesetdisplayer'")
3214 "use 'logcmdutil.changesetdisplayer'")
3213 ui.deprecwarn(msg, "4.6")
3215 ui.deprecwarn(msg, "4.6")
3214 return logcmdutil.changesetdisplayer(ui, *args, **kwargs)
3216 return logcmdutil.changesetdisplayer(ui, *args, **kwargs)
@@ -1,1573 +1,1571 b''
1 # manifest.py - manifest revision class for mercurial
1 # manifest.py - manifest revision class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import heapq
10 import heapq
11 import itertools
11 import itertools
12 import struct
12 import struct
13
13
14 from .i18n import _
14 from .i18n import _
15 from .node import (
15 from .node import (
16 bin,
16 bin,
17 hex,
17 hex,
18 )
18 )
19 from . import (
19 from . import (
20 error,
20 error,
21 mdiff,
21 mdiff,
22 policy,
22 policy,
23 revlog,
23 revlog,
24 util,
24 util,
25 )
25 )
26
26
27 parsers = policy.importmod(r'parsers')
27 parsers = policy.importmod(r'parsers')
28 propertycache = util.propertycache
28 propertycache = util.propertycache
29
29
30 def _parse(data):
30 def _parse(data):
31 # This method does a little bit of excessive-looking
31 # This method does a little bit of excessive-looking
32 # precondition checking. This is so that the behavior of this
32 # precondition checking. This is so that the behavior of this
33 # class exactly matches its C counterpart to try and help
33 # class exactly matches its C counterpart to try and help
34 # prevent surprise breakage for anyone that develops against
34 # prevent surprise breakage for anyone that develops against
35 # the pure version.
35 # the pure version.
36 if data and data[-1:] != '\n':
36 if data and data[-1:] != '\n':
37 raise ValueError('Manifest did not end in a newline.')
37 raise ValueError('Manifest did not end in a newline.')
38 prev = None
38 prev = None
39 for l in data.splitlines():
39 for l in data.splitlines():
40 if prev is not None and prev > l:
40 if prev is not None and prev > l:
41 raise ValueError('Manifest lines not in sorted order.')
41 raise ValueError('Manifest lines not in sorted order.')
42 prev = l
42 prev = l
43 f, n = l.split('\0')
43 f, n = l.split('\0')
44 if len(n) > 40:
44 if len(n) > 40:
45 yield f, bin(n[:40]), n[40:]
45 yield f, bin(n[:40]), n[40:]
46 else:
46 else:
47 yield f, bin(n), ''
47 yield f, bin(n), ''
48
48
49 def _text(it):
49 def _text(it):
50 files = []
50 files = []
51 lines = []
51 lines = []
52 _hex = revlog.hex
52 _hex = revlog.hex
53 for f, n, fl in it:
53 for f, n, fl in it:
54 files.append(f)
54 files.append(f)
55 # if this is changed to support newlines in filenames,
55 # if this is changed to support newlines in filenames,
56 # be sure to check the templates/ dir again (especially *-raw.tmpl)
56 # be sure to check the templates/ dir again (especially *-raw.tmpl)
57 lines.append("%s\0%s%s\n" % (f, _hex(n), fl))
57 lines.append("%s\0%s%s\n" % (f, _hex(n), fl))
58
58
59 _checkforbidden(files)
59 _checkforbidden(files)
60 return ''.join(lines)
60 return ''.join(lines)
61
61
62 class lazymanifestiter(object):
62 class lazymanifestiter(object):
63 def __init__(self, lm):
63 def __init__(self, lm):
64 self.pos = 0
64 self.pos = 0
65 self.lm = lm
65 self.lm = lm
66
66
67 def __iter__(self):
67 def __iter__(self):
68 return self
68 return self
69
69
70 def next(self):
70 def next(self):
71 try:
71 try:
72 data, pos = self.lm._get(self.pos)
72 data, pos = self.lm._get(self.pos)
73 except IndexError:
73 except IndexError:
74 raise StopIteration
74 raise StopIteration
75 if pos == -1:
75 if pos == -1:
76 self.pos += 1
76 self.pos += 1
77 return data[0]
77 return data[0]
78 self.pos += 1
78 self.pos += 1
79 zeropos = data.find('\x00', pos)
79 zeropos = data.find('\x00', pos)
80 return data[pos:zeropos]
80 return data[pos:zeropos]
81
81
82 __next__ = next
82 __next__ = next
83
83
84 class lazymanifestiterentries(object):
84 class lazymanifestiterentries(object):
85 def __init__(self, lm):
85 def __init__(self, lm):
86 self.lm = lm
86 self.lm = lm
87 self.pos = 0
87 self.pos = 0
88
88
89 def __iter__(self):
89 def __iter__(self):
90 return self
90 return self
91
91
92 def next(self):
92 def next(self):
93 try:
93 try:
94 data, pos = self.lm._get(self.pos)
94 data, pos = self.lm._get(self.pos)
95 except IndexError:
95 except IndexError:
96 raise StopIteration
96 raise StopIteration
97 if pos == -1:
97 if pos == -1:
98 self.pos += 1
98 self.pos += 1
99 return data
99 return data
100 zeropos = data.find('\x00', pos)
100 zeropos = data.find('\x00', pos)
101 hashval = unhexlify(data, self.lm.extrainfo[self.pos],
101 hashval = unhexlify(data, self.lm.extrainfo[self.pos],
102 zeropos + 1, 40)
102 zeropos + 1, 40)
103 flags = self.lm._getflags(data, self.pos, zeropos)
103 flags = self.lm._getflags(data, self.pos, zeropos)
104 self.pos += 1
104 self.pos += 1
105 return (data[pos:zeropos], hashval, flags)
105 return (data[pos:zeropos], hashval, flags)
106
106
107 __next__ = next
107 __next__ = next
108
108
109 def unhexlify(data, extra, pos, length):
109 def unhexlify(data, extra, pos, length):
110 s = bin(data[pos:pos + length])
110 s = bin(data[pos:pos + length])
111 if extra:
111 if extra:
112 s += chr(extra & 0xff)
112 s += chr(extra & 0xff)
113 return s
113 return s
114
114
115 def _cmp(a, b):
115 def _cmp(a, b):
116 return (a > b) - (a < b)
116 return (a > b) - (a < b)
117
117
118 class _lazymanifest(object):
118 class _lazymanifest(object):
119 def __init__(self, data, positions=None, extrainfo=None, extradata=None):
119 def __init__(self, data, positions=None, extrainfo=None, extradata=None):
120 if positions is None:
120 if positions is None:
121 self.positions = self.findlines(data)
121 self.positions = self.findlines(data)
122 self.extrainfo = [0] * len(self.positions)
122 self.extrainfo = [0] * len(self.positions)
123 self.data = data
123 self.data = data
124 self.extradata = []
124 self.extradata = []
125 else:
125 else:
126 self.positions = positions[:]
126 self.positions = positions[:]
127 self.extrainfo = extrainfo[:]
127 self.extrainfo = extrainfo[:]
128 self.extradata = extradata[:]
128 self.extradata = extradata[:]
129 self.data = data
129 self.data = data
130
130
131 def findlines(self, data):
131 def findlines(self, data):
132 if not data:
132 if not data:
133 return []
133 return []
134 pos = data.find("\n")
134 pos = data.find("\n")
135 if pos == -1 or data[-1:] != '\n':
135 if pos == -1 or data[-1:] != '\n':
136 raise ValueError("Manifest did not end in a newline.")
136 raise ValueError("Manifest did not end in a newline.")
137 positions = [0]
137 positions = [0]
138 prev = data[:data.find('\x00')]
138 prev = data[:data.find('\x00')]
139 while pos < len(data) - 1 and pos != -1:
139 while pos < len(data) - 1 and pos != -1:
140 positions.append(pos + 1)
140 positions.append(pos + 1)
141 nexts = data[pos + 1:data.find('\x00', pos + 1)]
141 nexts = data[pos + 1:data.find('\x00', pos + 1)]
142 if nexts < prev:
142 if nexts < prev:
143 raise ValueError("Manifest lines not in sorted order.")
143 raise ValueError("Manifest lines not in sorted order.")
144 prev = nexts
144 prev = nexts
145 pos = data.find("\n", pos + 1)
145 pos = data.find("\n", pos + 1)
146 return positions
146 return positions
147
147
148 def _get(self, index):
148 def _get(self, index):
149 # get the position encoded in pos:
149 # get the position encoded in pos:
150 # positive number is an index in 'data'
150 # positive number is an index in 'data'
151 # negative number is in extrapieces
151 # negative number is in extrapieces
152 pos = self.positions[index]
152 pos = self.positions[index]
153 if pos >= 0:
153 if pos >= 0:
154 return self.data, pos
154 return self.data, pos
155 return self.extradata[-pos - 1], -1
155 return self.extradata[-pos - 1], -1
156
156
157 def _getkey(self, pos):
157 def _getkey(self, pos):
158 if pos >= 0:
158 if pos >= 0:
159 return self.data[pos:self.data.find('\x00', pos + 1)]
159 return self.data[pos:self.data.find('\x00', pos + 1)]
160 return self.extradata[-pos - 1][0]
160 return self.extradata[-pos - 1][0]
161
161
162 def bsearch(self, key):
162 def bsearch(self, key):
163 first = 0
163 first = 0
164 last = len(self.positions) - 1
164 last = len(self.positions) - 1
165
165
166 while first <= last:
166 while first <= last:
167 midpoint = (first + last)//2
167 midpoint = (first + last)//2
168 nextpos = self.positions[midpoint]
168 nextpos = self.positions[midpoint]
169 candidate = self._getkey(nextpos)
169 candidate = self._getkey(nextpos)
170 r = _cmp(key, candidate)
170 r = _cmp(key, candidate)
171 if r == 0:
171 if r == 0:
172 return midpoint
172 return midpoint
173 else:
173 else:
174 if r < 0:
174 if r < 0:
175 last = midpoint - 1
175 last = midpoint - 1
176 else:
176 else:
177 first = midpoint + 1
177 first = midpoint + 1
178 return -1
178 return -1
179
179
180 def bsearch2(self, key):
180 def bsearch2(self, key):
181 # same as the above, but will always return the position
181 # same as the above, but will always return the position
182 # done for performance reasons
182 # done for performance reasons
183 first = 0
183 first = 0
184 last = len(self.positions) - 1
184 last = len(self.positions) - 1
185
185
186 while first <= last:
186 while first <= last:
187 midpoint = (first + last)//2
187 midpoint = (first + last)//2
188 nextpos = self.positions[midpoint]
188 nextpos = self.positions[midpoint]
189 candidate = self._getkey(nextpos)
189 candidate = self._getkey(nextpos)
190 r = _cmp(key, candidate)
190 r = _cmp(key, candidate)
191 if r == 0:
191 if r == 0:
192 return (midpoint, True)
192 return (midpoint, True)
193 else:
193 else:
194 if r < 0:
194 if r < 0:
195 last = midpoint - 1
195 last = midpoint - 1
196 else:
196 else:
197 first = midpoint + 1
197 first = midpoint + 1
198 return (first, False)
198 return (first, False)
199
199
200 def __contains__(self, key):
200 def __contains__(self, key):
201 return self.bsearch(key) != -1
201 return self.bsearch(key) != -1
202
202
203 def _getflags(self, data, needle, pos):
203 def _getflags(self, data, needle, pos):
204 start = pos + 41
204 start = pos + 41
205 end = data.find("\n", start)
205 end = data.find("\n", start)
206 if end == -1:
206 if end == -1:
207 end = len(data) - 1
207 end = len(data) - 1
208 if start == end:
208 if start == end:
209 return ''
209 return ''
210 return self.data[start:end]
210 return self.data[start:end]
211
211
212 def __getitem__(self, key):
212 def __getitem__(self, key):
213 if not isinstance(key, bytes):
213 if not isinstance(key, bytes):
214 raise TypeError("getitem: manifest keys must be a bytes.")
214 raise TypeError("getitem: manifest keys must be a bytes.")
215 needle = self.bsearch(key)
215 needle = self.bsearch(key)
216 if needle == -1:
216 if needle == -1:
217 raise KeyError
217 raise KeyError
218 data, pos = self._get(needle)
218 data, pos = self._get(needle)
219 if pos == -1:
219 if pos == -1:
220 return (data[1], data[2])
220 return (data[1], data[2])
221 zeropos = data.find('\x00', pos)
221 zeropos = data.find('\x00', pos)
222 assert 0 <= needle <= len(self.positions)
222 assert 0 <= needle <= len(self.positions)
223 assert len(self.extrainfo) == len(self.positions)
223 assert len(self.extrainfo) == len(self.positions)
224 hashval = unhexlify(data, self.extrainfo[needle], zeropos + 1, 40)
224 hashval = unhexlify(data, self.extrainfo[needle], zeropos + 1, 40)
225 flags = self._getflags(data, needle, zeropos)
225 flags = self._getflags(data, needle, zeropos)
226 return (hashval, flags)
226 return (hashval, flags)
227
227
228 def __delitem__(self, key):
228 def __delitem__(self, key):
229 needle, found = self.bsearch2(key)
229 needle, found = self.bsearch2(key)
230 if not found:
230 if not found:
231 raise KeyError
231 raise KeyError
232 cur = self.positions[needle]
232 cur = self.positions[needle]
233 self.positions = self.positions[:needle] + self.positions[needle + 1:]
233 self.positions = self.positions[:needle] + self.positions[needle + 1:]
234 self.extrainfo = self.extrainfo[:needle] + self.extrainfo[needle + 1:]
234 self.extrainfo = self.extrainfo[:needle] + self.extrainfo[needle + 1:]
235 if cur >= 0:
235 if cur >= 0:
236 self.data = self.data[:cur] + '\x00' + self.data[cur + 1:]
236 self.data = self.data[:cur] + '\x00' + self.data[cur + 1:]
237
237
238 def __setitem__(self, key, value):
238 def __setitem__(self, key, value):
239 if not isinstance(key, bytes):
239 if not isinstance(key, bytes):
240 raise TypeError("setitem: manifest keys must be a byte string.")
240 raise TypeError("setitem: manifest keys must be a byte string.")
241 if not isinstance(value, tuple) or len(value) != 2:
241 if not isinstance(value, tuple) or len(value) != 2:
242 raise TypeError("Manifest values must be a tuple of (node, flags).")
242 raise TypeError("Manifest values must be a tuple of (node, flags).")
243 hashval = value[0]
243 hashval = value[0]
244 if not isinstance(hashval, bytes) or not 20 <= len(hashval) <= 22:
244 if not isinstance(hashval, bytes) or not 20 <= len(hashval) <= 22:
245 raise TypeError("node must be a 20-byte byte string")
245 raise TypeError("node must be a 20-byte byte string")
246 flags = value[1]
246 flags = value[1]
247 if len(hashval) == 22:
247 if len(hashval) == 22:
248 hashval = hashval[:-1]
248 hashval = hashval[:-1]
249 if not isinstance(flags, bytes) or len(flags) > 1:
249 if not isinstance(flags, bytes) or len(flags) > 1:
250 raise TypeError("flags must a 0 or 1 byte string, got %r", flags)
250 raise TypeError("flags must a 0 or 1 byte string, got %r", flags)
251 needle, found = self.bsearch2(key)
251 needle, found = self.bsearch2(key)
252 if found:
252 if found:
253 # put the item
253 # put the item
254 pos = self.positions[needle]
254 pos = self.positions[needle]
255 if pos < 0:
255 if pos < 0:
256 self.extradata[-pos - 1] = (key, hashval, value[1])
256 self.extradata[-pos - 1] = (key, hashval, value[1])
257 else:
257 else:
258 # just don't bother
258 # just don't bother
259 self.extradata.append((key, hashval, value[1]))
259 self.extradata.append((key, hashval, value[1]))
260 self.positions[needle] = -len(self.extradata)
260 self.positions[needle] = -len(self.extradata)
261 else:
261 else:
262 # not found, put it in with extra positions
262 # not found, put it in with extra positions
263 self.extradata.append((key, hashval, value[1]))
263 self.extradata.append((key, hashval, value[1]))
264 self.positions = (self.positions[:needle] + [-len(self.extradata)]
264 self.positions = (self.positions[:needle] + [-len(self.extradata)]
265 + self.positions[needle:])
265 + self.positions[needle:])
266 self.extrainfo = (self.extrainfo[:needle] + [0] +
266 self.extrainfo = (self.extrainfo[:needle] + [0] +
267 self.extrainfo[needle:])
267 self.extrainfo[needle:])
268
268
269 def copy(self):
269 def copy(self):
270 # XXX call _compact like in C?
270 # XXX call _compact like in C?
271 return _lazymanifest(self.data, self.positions, self.extrainfo,
271 return _lazymanifest(self.data, self.positions, self.extrainfo,
272 self.extradata)
272 self.extradata)
273
273
274 def _compact(self):
274 def _compact(self):
275 # hopefully not called TOO often
275 # hopefully not called TOO often
276 if len(self.extradata) == 0:
276 if len(self.extradata) == 0:
277 return
277 return
278 l = []
278 l = []
279 last_cut = 0
279 last_cut = 0
280 i = 0
280 i = 0
281 offset = 0
281 offset = 0
282 self.extrainfo = [0] * len(self.positions)
282 self.extrainfo = [0] * len(self.positions)
283 while i < len(self.positions):
283 while i < len(self.positions):
284 if self.positions[i] >= 0:
284 if self.positions[i] >= 0:
285 cur = self.positions[i]
285 cur = self.positions[i]
286 last_cut = cur
286 last_cut = cur
287 while True:
287 while True:
288 self.positions[i] = offset
288 self.positions[i] = offset
289 i += 1
289 i += 1
290 if i == len(self.positions) or self.positions[i] < 0:
290 if i == len(self.positions) or self.positions[i] < 0:
291 break
291 break
292 offset += self.positions[i] - cur
292 offset += self.positions[i] - cur
293 cur = self.positions[i]
293 cur = self.positions[i]
294 end_cut = self.data.find('\n', cur)
294 end_cut = self.data.find('\n', cur)
295 if end_cut != -1:
295 if end_cut != -1:
296 end_cut += 1
296 end_cut += 1
297 offset += end_cut - cur
297 offset += end_cut - cur
298 l.append(self.data[last_cut:end_cut])
298 l.append(self.data[last_cut:end_cut])
299 else:
299 else:
300 while i < len(self.positions) and self.positions[i] < 0:
300 while i < len(self.positions) and self.positions[i] < 0:
301 cur = self.positions[i]
301 cur = self.positions[i]
302 t = self.extradata[-cur - 1]
302 t = self.extradata[-cur - 1]
303 l.append(self._pack(t))
303 l.append(self._pack(t))
304 self.positions[i] = offset
304 self.positions[i] = offset
305 if len(t[1]) > 20:
305 if len(t[1]) > 20:
306 self.extrainfo[i] = ord(t[1][21])
306 self.extrainfo[i] = ord(t[1][21])
307 offset += len(l[-1])
307 offset += len(l[-1])
308 i += 1
308 i += 1
309 self.data = ''.join(l)
309 self.data = ''.join(l)
310 self.extradata = []
310 self.extradata = []
311
311
312 def _pack(self, d):
312 def _pack(self, d):
313 return d[0] + '\x00' + hex(d[1][:20]) + d[2] + '\n'
313 return d[0] + '\x00' + hex(d[1][:20]) + d[2] + '\n'
314
314
315 def text(self):
315 def text(self):
316 self._compact()
316 self._compact()
317 return self.data
317 return self.data
318
318
319 def diff(self, m2, clean=False):
319 def diff(self, m2, clean=False):
320 '''Finds changes between the current manifest and m2.'''
320 '''Finds changes between the current manifest and m2.'''
321 # XXX think whether efficiency matters here
321 # XXX think whether efficiency matters here
322 diff = {}
322 diff = {}
323
323
324 for fn, e1, flags in self.iterentries():
324 for fn, e1, flags in self.iterentries():
325 if fn not in m2:
325 if fn not in m2:
326 diff[fn] = (e1, flags), (None, '')
326 diff[fn] = (e1, flags), (None, '')
327 else:
327 else:
328 e2 = m2[fn]
328 e2 = m2[fn]
329 if (e1, flags) != e2:
329 if (e1, flags) != e2:
330 diff[fn] = (e1, flags), e2
330 diff[fn] = (e1, flags), e2
331 elif clean:
331 elif clean:
332 diff[fn] = None
332 diff[fn] = None
333
333
334 for fn, e2, flags in m2.iterentries():
334 for fn, e2, flags in m2.iterentries():
335 if fn not in self:
335 if fn not in self:
336 diff[fn] = (None, ''), (e2, flags)
336 diff[fn] = (None, ''), (e2, flags)
337
337
338 return diff
338 return diff
339
339
340 def iterentries(self):
340 def iterentries(self):
341 return lazymanifestiterentries(self)
341 return lazymanifestiterentries(self)
342
342
343 def iterkeys(self):
343 def iterkeys(self):
344 return lazymanifestiter(self)
344 return lazymanifestiter(self)
345
345
346 def __iter__(self):
346 def __iter__(self):
347 return lazymanifestiter(self)
347 return lazymanifestiter(self)
348
348
349 def __len__(self):
349 def __len__(self):
350 return len(self.positions)
350 return len(self.positions)
351
351
352 def filtercopy(self, filterfn):
352 def filtercopy(self, filterfn):
353 # XXX should be optimized
353 # XXX should be optimized
354 c = _lazymanifest('')
354 c = _lazymanifest('')
355 for f, n, fl in self.iterentries():
355 for f, n, fl in self.iterentries():
356 if filterfn(f):
356 if filterfn(f):
357 c[f] = n, fl
357 c[f] = n, fl
358 return c
358 return c
359
359
360 try:
360 try:
361 _lazymanifest = parsers.lazymanifest
361 _lazymanifest = parsers.lazymanifest
362 except AttributeError:
362 except AttributeError:
363 pass
363 pass
364
364
365 class manifestdict(object):
365 class manifestdict(object):
366 def __init__(self, data=''):
366 def __init__(self, data=''):
367 self._lm = _lazymanifest(data)
367 self._lm = _lazymanifest(data)
368
368
369 def __getitem__(self, key):
369 def __getitem__(self, key):
370 return self._lm[key][0]
370 return self._lm[key][0]
371
371
372 def find(self, key):
372 def find(self, key):
373 return self._lm[key]
373 return self._lm[key]
374
374
375 def __len__(self):
375 def __len__(self):
376 return len(self._lm)
376 return len(self._lm)
377
377
378 def __nonzero__(self):
378 def __nonzero__(self):
379 # nonzero is covered by the __len__ function, but implementing it here
379 # nonzero is covered by the __len__ function, but implementing it here
380 # makes it easier for extensions to override.
380 # makes it easier for extensions to override.
381 return len(self._lm) != 0
381 return len(self._lm) != 0
382
382
383 __bool__ = __nonzero__
383 __bool__ = __nonzero__
384
384
385 def __setitem__(self, key, node):
385 def __setitem__(self, key, node):
386 self._lm[key] = node, self.flags(key, '')
386 self._lm[key] = node, self.flags(key, '')
387
387
388 def __contains__(self, key):
388 def __contains__(self, key):
389 if key is None:
389 if key is None:
390 return False
390 return False
391 return key in self._lm
391 return key in self._lm
392
392
393 def __delitem__(self, key):
393 def __delitem__(self, key):
394 del self._lm[key]
394 del self._lm[key]
395
395
396 def __iter__(self):
396 def __iter__(self):
397 return self._lm.__iter__()
397 return self._lm.__iter__()
398
398
399 def iterkeys(self):
399 def iterkeys(self):
400 return self._lm.iterkeys()
400 return self._lm.iterkeys()
401
401
402 def keys(self):
402 def keys(self):
403 return list(self.iterkeys())
403 return list(self.iterkeys())
404
404
405 def filesnotin(self, m2, match=None):
405 def filesnotin(self, m2, match=None):
406 '''Set of files in this manifest that are not in the other'''
406 '''Set of files in this manifest that are not in the other'''
407 if match:
407 if match:
408 m1 = self.matches(match)
408 m1 = self.matches(match)
409 m2 = m2.matches(match)
409 m2 = m2.matches(match)
410 return m1.filesnotin(m2)
410 return m1.filesnotin(m2)
411 diff = self.diff(m2)
411 diff = self.diff(m2)
412 files = set(filepath
412 files = set(filepath
413 for filepath, hashflags in diff.iteritems()
413 for filepath, hashflags in diff.iteritems()
414 if hashflags[1][0] is None)
414 if hashflags[1][0] is None)
415 return files
415 return files
416
416
417 @propertycache
417 @propertycache
418 def _dirs(self):
418 def _dirs(self):
419 return util.dirs(self)
419 return util.dirs(self)
420
420
421 def dirs(self):
421 def dirs(self):
422 return self._dirs
422 return self._dirs
423
423
424 def hasdir(self, dir):
424 def hasdir(self, dir):
425 return dir in self._dirs
425 return dir in self._dirs
426
426
427 def _filesfastpath(self, match):
427 def _filesfastpath(self, match):
428 '''Checks whether we can correctly and quickly iterate over matcher
428 '''Checks whether we can correctly and quickly iterate over matcher
429 files instead of over manifest files.'''
429 files instead of over manifest files.'''
430 files = match.files()
430 files = match.files()
431 return (len(files) < 100 and (match.isexact() or
431 return (len(files) < 100 and (match.isexact() or
432 (match.prefix() and all(fn in self for fn in files))))
432 (match.prefix() and all(fn in self for fn in files))))
433
433
434 def walk(self, match):
434 def walk(self, match):
435 '''Generates matching file names.
435 '''Generates matching file names.
436
436
437 Equivalent to manifest.matches(match).iterkeys(), but without creating
437 Equivalent to manifest.matches(match).iterkeys(), but without creating
438 an entirely new manifest.
438 an entirely new manifest.
439
439
440 It also reports nonexistent files by marking them bad with match.bad().
440 It also reports nonexistent files by marking them bad with match.bad().
441 '''
441 '''
442 if match.always():
442 if match.always():
443 for f in iter(self):
443 for f in iter(self):
444 yield f
444 yield f
445 return
445 return
446
446
447 fset = set(match.files())
447 fset = set(match.files())
448
448
449 # avoid the entire walk if we're only looking for specific files
449 # avoid the entire walk if we're only looking for specific files
450 if self._filesfastpath(match):
450 if self._filesfastpath(match):
451 for fn in sorted(fset):
451 for fn in sorted(fset):
452 yield fn
452 yield fn
453 return
453 return
454
454
455 for fn in self:
455 for fn in self:
456 if fn in fset:
456 if fn in fset:
457 # specified pattern is the exact name
457 # specified pattern is the exact name
458 fset.remove(fn)
458 fset.remove(fn)
459 if match(fn):
459 if match(fn):
460 yield fn
460 yield fn
461
461
462 # for dirstate.walk, files=['.'] means "walk the whole tree".
462 # for dirstate.walk, files=['.'] means "walk the whole tree".
463 # follow that here, too
463 # follow that here, too
464 fset.discard('.')
464 fset.discard('.')
465
465
466 for fn in sorted(fset):
466 for fn in sorted(fset):
467 if not self.hasdir(fn):
467 if not self.hasdir(fn):
468 match.bad(fn, None)
468 match.bad(fn, None)
469
469
470 def matches(self, match):
470 def matches(self, match):
471 '''generate a new manifest filtered by the match argument'''
471 '''generate a new manifest filtered by the match argument'''
472 if match.always():
472 if match.always():
473 return self.copy()
473 return self.copy()
474
474
475 if self._filesfastpath(match):
475 if self._filesfastpath(match):
476 m = manifestdict()
476 m = manifestdict()
477 lm = self._lm
477 lm = self._lm
478 for fn in match.files():
478 for fn in match.files():
479 if fn in lm:
479 if fn in lm:
480 m._lm[fn] = lm[fn]
480 m._lm[fn] = lm[fn]
481 return m
481 return m
482
482
483 m = manifestdict()
483 m = manifestdict()
484 m._lm = self._lm.filtercopy(match)
484 m._lm = self._lm.filtercopy(match)
485 return m
485 return m
486
486
487 def diff(self, m2, match=None, clean=False):
487 def diff(self, m2, match=None, clean=False):
488 '''Finds changes between the current manifest and m2.
488 '''Finds changes between the current manifest and m2.
489
489
490 Args:
490 Args:
491 m2: the manifest to which this manifest should be compared.
491 m2: the manifest to which this manifest should be compared.
492 clean: if true, include files unchanged between these manifests
492 clean: if true, include files unchanged between these manifests
493 with a None value in the returned dictionary.
493 with a None value in the returned dictionary.
494
494
495 The result is returned as a dict with filename as key and
495 The result is returned as a dict with filename as key and
496 values of the form ((n1,fl1),(n2,fl2)), where n1/n2 is the
496 values of the form ((n1,fl1),(n2,fl2)), where n1/n2 is the
497 nodeid in the current/other manifest and fl1/fl2 is the flag
497 nodeid in the current/other manifest and fl1/fl2 is the flag
498 in the current/other manifest. Where the file does not exist,
498 in the current/other manifest. Where the file does not exist,
499 the nodeid will be None and the flags will be the empty
499 the nodeid will be None and the flags will be the empty
500 string.
500 string.
501 '''
501 '''
502 if match:
502 if match:
503 m1 = self.matches(match)
503 m1 = self.matches(match)
504 m2 = m2.matches(match)
504 m2 = m2.matches(match)
505 return m1.diff(m2, clean=clean)
505 return m1.diff(m2, clean=clean)
506 return self._lm.diff(m2._lm, clean)
506 return self._lm.diff(m2._lm, clean)
507
507
508 def setflag(self, key, flag):
508 def setflag(self, key, flag):
509 self._lm[key] = self[key], flag
509 self._lm[key] = self[key], flag
510
510
511 def get(self, key, default=None):
511 def get(self, key, default=None):
512 try:
512 try:
513 return self._lm[key][0]
513 return self._lm[key][0]
514 except KeyError:
514 except KeyError:
515 return default
515 return default
516
516
517 def flags(self, key, default=''):
517 def flags(self, key, default=''):
518 try:
518 try:
519 return self._lm[key][1]
519 return self._lm[key][1]
520 except KeyError:
520 except KeyError:
521 return default
521 return default
522
522
523 def copy(self):
523 def copy(self):
524 c = manifestdict()
524 c = manifestdict()
525 c._lm = self._lm.copy()
525 c._lm = self._lm.copy()
526 return c
526 return c
527
527
528 def items(self):
528 def items(self):
529 return (x[:2] for x in self._lm.iterentries())
529 return (x[:2] for x in self._lm.iterentries())
530
530
531 iteritems = items
531 iteritems = items
532
532
533 def iterentries(self):
533 def iterentries(self):
534 return self._lm.iterentries()
534 return self._lm.iterentries()
535
535
536 def text(self):
536 def text(self):
537 # most likely uses native version
537 # most likely uses native version
538 return self._lm.text()
538 return self._lm.text()
539
539
540 def fastdelta(self, base, changes):
540 def fastdelta(self, base, changes):
541 """Given a base manifest text as a bytearray and a list of changes
541 """Given a base manifest text as a bytearray and a list of changes
542 relative to that text, compute a delta that can be used by revlog.
542 relative to that text, compute a delta that can be used by revlog.
543 """
543 """
544 delta = []
544 delta = []
545 dstart = None
545 dstart = None
546 dend = None
546 dend = None
547 dline = [""]
547 dline = [""]
548 start = 0
548 start = 0
549 # zero copy representation of base as a buffer
549 # zero copy representation of base as a buffer
550 addbuf = util.buffer(base)
550 addbuf = util.buffer(base)
551
551
552 changes = list(changes)
552 changes = list(changes)
553 if len(changes) < 1000:
553 if len(changes) < 1000:
554 # start with a readonly loop that finds the offset of
554 # start with a readonly loop that finds the offset of
555 # each line and creates the deltas
555 # each line and creates the deltas
556 for f, todelete in changes:
556 for f, todelete in changes:
557 # bs will either be the index of the item or the insert point
557 # bs will either be the index of the item or the insert point
558 start, end = _msearch(addbuf, f, start)
558 start, end = _msearch(addbuf, f, start)
559 if not todelete:
559 if not todelete:
560 h, fl = self._lm[f]
560 h, fl = self._lm[f]
561 l = "%s\0%s%s\n" % (f, revlog.hex(h), fl)
561 l = "%s\0%s%s\n" % (f, revlog.hex(h), fl)
562 else:
562 else:
563 if start == end:
563 if start == end:
564 # item we want to delete was not found, error out
564 # item we want to delete was not found, error out
565 raise AssertionError(
565 raise AssertionError(
566 _("failed to remove %s from manifest") % f)
566 _("failed to remove %s from manifest") % f)
567 l = ""
567 l = ""
568 if dstart is not None and dstart <= start and dend >= start:
568 if dstart is not None and dstart <= start and dend >= start:
569 if dend < end:
569 if dend < end:
570 dend = end
570 dend = end
571 if l:
571 if l:
572 dline.append(l)
572 dline.append(l)
573 else:
573 else:
574 if dstart is not None:
574 if dstart is not None:
575 delta.append([dstart, dend, "".join(dline)])
575 delta.append([dstart, dend, "".join(dline)])
576 dstart = start
576 dstart = start
577 dend = end
577 dend = end
578 dline = [l]
578 dline = [l]
579
579
580 if dstart is not None:
580 if dstart is not None:
581 delta.append([dstart, dend, "".join(dline)])
581 delta.append([dstart, dend, "".join(dline)])
582 # apply the delta to the base, and get a delta for addrevision
582 # apply the delta to the base, and get a delta for addrevision
583 deltatext, arraytext = _addlistdelta(base, delta)
583 deltatext, arraytext = _addlistdelta(base, delta)
584 else:
584 else:
585 # For large changes, it's much cheaper to just build the text and
585 # For large changes, it's much cheaper to just build the text and
586 # diff it.
586 # diff it.
587 arraytext = bytearray(self.text())
587 arraytext = bytearray(self.text())
588 deltatext = mdiff.textdiff(
588 deltatext = mdiff.textdiff(
589 util.buffer(base), util.buffer(arraytext))
589 util.buffer(base), util.buffer(arraytext))
590
590
591 return arraytext, deltatext
591 return arraytext, deltatext
592
592
593 def _msearch(m, s, lo=0, hi=None):
593 def _msearch(m, s, lo=0, hi=None):
594 '''return a tuple (start, end) that says where to find s within m.
594 '''return a tuple (start, end) that says where to find s within m.
595
595
596 If the string is found m[start:end] are the line containing
596 If the string is found m[start:end] are the line containing
597 that string. If start == end the string was not found and
597 that string. If start == end the string was not found and
598 they indicate the proper sorted insertion point.
598 they indicate the proper sorted insertion point.
599
599
600 m should be a buffer, a memoryview or a byte string.
600 m should be a buffer, a memoryview or a byte string.
601 s is a byte string'''
601 s is a byte string'''
602 def advance(i, c):
602 def advance(i, c):
603 while i < lenm and m[i:i + 1] != c:
603 while i < lenm and m[i:i + 1] != c:
604 i += 1
604 i += 1
605 return i
605 return i
606 if not s:
606 if not s:
607 return (lo, lo)
607 return (lo, lo)
608 lenm = len(m)
608 lenm = len(m)
609 if not hi:
609 if not hi:
610 hi = lenm
610 hi = lenm
611 while lo < hi:
611 while lo < hi:
612 mid = (lo + hi) // 2
612 mid = (lo + hi) // 2
613 start = mid
613 start = mid
614 while start > 0 and m[start - 1:start] != '\n':
614 while start > 0 and m[start - 1:start] != '\n':
615 start -= 1
615 start -= 1
616 end = advance(start, '\0')
616 end = advance(start, '\0')
617 if bytes(m[start:end]) < s:
617 if bytes(m[start:end]) < s:
618 # we know that after the null there are 40 bytes of sha1
618 # we know that after the null there are 40 bytes of sha1
619 # this translates to the bisect lo = mid + 1
619 # this translates to the bisect lo = mid + 1
620 lo = advance(end + 40, '\n') + 1
620 lo = advance(end + 40, '\n') + 1
621 else:
621 else:
622 # this translates to the bisect hi = mid
622 # this translates to the bisect hi = mid
623 hi = start
623 hi = start
624 end = advance(lo, '\0')
624 end = advance(lo, '\0')
625 found = m[lo:end]
625 found = m[lo:end]
626 if s == found:
626 if s == found:
627 # we know that after the null there are 40 bytes of sha1
627 # we know that after the null there are 40 bytes of sha1
628 end = advance(end + 40, '\n')
628 end = advance(end + 40, '\n')
629 return (lo, end + 1)
629 return (lo, end + 1)
630 else:
630 else:
631 return (lo, lo)
631 return (lo, lo)
632
632
633 def _checkforbidden(l):
633 def _checkforbidden(l):
634 """Check filenames for illegal characters."""
634 """Check filenames for illegal characters."""
635 for f in l:
635 for f in l:
636 if '\n' in f or '\r' in f:
636 if '\n' in f or '\r' in f:
637 raise error.RevlogError(
637 raise error.RevlogError(
638 _("'\\n' and '\\r' disallowed in filenames: %r") % f)
638 _("'\\n' and '\\r' disallowed in filenames: %r") % f)
639
639
640
640
641 # apply the changes collected during the bisect loop to our addlist
641 # apply the changes collected during the bisect loop to our addlist
642 # return a delta suitable for addrevision
642 # return a delta suitable for addrevision
643 def _addlistdelta(addlist, x):
643 def _addlistdelta(addlist, x):
644 # for large addlist arrays, building a new array is cheaper
644 # for large addlist arrays, building a new array is cheaper
645 # than repeatedly modifying the existing one
645 # than repeatedly modifying the existing one
646 currentposition = 0
646 currentposition = 0
647 newaddlist = bytearray()
647 newaddlist = bytearray()
648
648
649 for start, end, content in x:
649 for start, end, content in x:
650 newaddlist += addlist[currentposition:start]
650 newaddlist += addlist[currentposition:start]
651 if content:
651 if content:
652 newaddlist += bytearray(content)
652 newaddlist += bytearray(content)
653
653
654 currentposition = end
654 currentposition = end
655
655
656 newaddlist += addlist[currentposition:]
656 newaddlist += addlist[currentposition:]
657
657
658 deltatext = "".join(struct.pack(">lll", start, end, len(content))
658 deltatext = "".join(struct.pack(">lll", start, end, len(content))
659 + content for start, end, content in x)
659 + content for start, end, content in x)
660 return deltatext, newaddlist
660 return deltatext, newaddlist
661
661
662 def _splittopdir(f):
662 def _splittopdir(f):
663 if '/' in f:
663 if '/' in f:
664 dir, subpath = f.split('/', 1)
664 dir, subpath = f.split('/', 1)
665 return dir + '/', subpath
665 return dir + '/', subpath
666 else:
666 else:
667 return '', f
667 return '', f
668
668
669 _noop = lambda s: None
669 _noop = lambda s: None
670
670
671 class treemanifest(object):
671 class treemanifest(object):
672 def __init__(self, dir='', text=''):
672 def __init__(self, dir='', text=''):
673 self._dir = dir
673 self._dir = dir
674 self._node = revlog.nullid
674 self._node = revlog.nullid
675 self._loadfunc = _noop
675 self._loadfunc = _noop
676 self._copyfunc = _noop
676 self._copyfunc = _noop
677 self._dirty = False
677 self._dirty = False
678 self._dirs = {}
678 self._dirs = {}
679 # Using _lazymanifest here is a little slower than plain old dicts
679 # Using _lazymanifest here is a little slower than plain old dicts
680 self._files = {}
680 self._files = {}
681 self._flags = {}
681 self._flags = {}
682 if text:
682 if text:
683 def readsubtree(subdir, subm):
683 def readsubtree(subdir, subm):
684 raise AssertionError('treemanifest constructor only accepts '
684 raise AssertionError('treemanifest constructor only accepts '
685 'flat manifests')
685 'flat manifests')
686 self.parse(text, readsubtree)
686 self.parse(text, readsubtree)
687 self._dirty = True # Mark flat manifest dirty after parsing
687 self._dirty = True # Mark flat manifest dirty after parsing
688
688
689 def _subpath(self, path):
689 def _subpath(self, path):
690 return self._dir + path
690 return self._dir + path
691
691
692 def __len__(self):
692 def __len__(self):
693 self._load()
693 self._load()
694 size = len(self._files)
694 size = len(self._files)
695 for m in self._dirs.values():
695 for m in self._dirs.values():
696 size += m.__len__()
696 size += m.__len__()
697 return size
697 return size
698
698
699 def __nonzero__(self):
699 def __nonzero__(self):
700 # Faster than "__len() != 0" since it avoids loading sub-manifests
700 # Faster than "__len() != 0" since it avoids loading sub-manifests
701 return not self._isempty()
701 return not self._isempty()
702
702
703 __bool__ = __nonzero__
703 __bool__ = __nonzero__
704
704
705 def _isempty(self):
705 def _isempty(self):
706 self._load() # for consistency; already loaded by all callers
706 self._load() # for consistency; already loaded by all callers
707 return (not self._files and (not self._dirs or
707 return (not self._files and (not self._dirs or
708 all(m._isempty() for m in self._dirs.values())))
708 all(m._isempty() for m in self._dirs.values())))
709
709
710 def __repr__(self):
710 def __repr__(self):
711 return ('<treemanifest dir=%s, node=%s, loaded=%s, dirty=%s at 0x%x>' %
711 return ('<treemanifest dir=%s, node=%s, loaded=%s, dirty=%s at 0x%x>' %
712 (self._dir, revlog.hex(self._node),
712 (self._dir, revlog.hex(self._node),
713 bool(self._loadfunc is _noop),
713 bool(self._loadfunc is _noop),
714 self._dirty, id(self)))
714 self._dirty, id(self)))
715
715
716 def dir(self):
716 def dir(self):
717 '''The directory that this tree manifest represents, including a
717 '''The directory that this tree manifest represents, including a
718 trailing '/'. Empty string for the repo root directory.'''
718 trailing '/'. Empty string for the repo root directory.'''
719 return self._dir
719 return self._dir
720
720
721 def node(self):
721 def node(self):
722 '''This node of this instance. nullid for unsaved instances. Should
722 '''This node of this instance. nullid for unsaved instances. Should
723 be updated when the instance is read or written from a revlog.
723 be updated when the instance is read or written from a revlog.
724 '''
724 '''
725 assert not self._dirty
725 assert not self._dirty
726 return self._node
726 return self._node
727
727
728 def setnode(self, node):
728 def setnode(self, node):
729 self._node = node
729 self._node = node
730 self._dirty = False
730 self._dirty = False
731
731
732 def iterentries(self):
732 def iterentries(self):
733 self._load()
733 self._load()
734 for p, n in sorted(itertools.chain(self._dirs.items(),
734 for p, n in sorted(itertools.chain(self._dirs.items(),
735 self._files.items())):
735 self._files.items())):
736 if p in self._files:
736 if p in self._files:
737 yield self._subpath(p), n, self._flags.get(p, '')
737 yield self._subpath(p), n, self._flags.get(p, '')
738 else:
738 else:
739 for x in n.iterentries():
739 for x in n.iterentries():
740 yield x
740 yield x
741
741
742 def items(self):
742 def items(self):
743 self._load()
743 self._load()
744 for p, n in sorted(itertools.chain(self._dirs.items(),
744 for p, n in sorted(itertools.chain(self._dirs.items(),
745 self._files.items())):
745 self._files.items())):
746 if p in self._files:
746 if p in self._files:
747 yield self._subpath(p), n
747 yield self._subpath(p), n
748 else:
748 else:
749 for f, sn in n.iteritems():
749 for f, sn in n.iteritems():
750 yield f, sn
750 yield f, sn
751
751
752 iteritems = items
752 iteritems = items
753
753
754 def iterkeys(self):
754 def iterkeys(self):
755 self._load()
755 self._load()
756 for p in sorted(itertools.chain(self._dirs, self._files)):
756 for p in sorted(itertools.chain(self._dirs, self._files)):
757 if p in self._files:
757 if p in self._files:
758 yield self._subpath(p)
758 yield self._subpath(p)
759 else:
759 else:
760 for f in self._dirs[p]:
760 for f in self._dirs[p]:
761 yield f
761 yield f
762
762
763 def keys(self):
763 def keys(self):
764 return list(self.iterkeys())
764 return list(self.iterkeys())
765
765
766 def __iter__(self):
766 def __iter__(self):
767 return self.iterkeys()
767 return self.iterkeys()
768
768
769 def __contains__(self, f):
769 def __contains__(self, f):
770 if f is None:
770 if f is None:
771 return False
771 return False
772 self._load()
772 self._load()
773 dir, subpath = _splittopdir(f)
773 dir, subpath = _splittopdir(f)
774 if dir:
774 if dir:
775 if dir not in self._dirs:
775 if dir not in self._dirs:
776 return False
776 return False
777 return self._dirs[dir].__contains__(subpath)
777 return self._dirs[dir].__contains__(subpath)
778 else:
778 else:
779 return f in self._files
779 return f in self._files
780
780
781 def get(self, f, default=None):
781 def get(self, f, default=None):
782 self._load()
782 self._load()
783 dir, subpath = _splittopdir(f)
783 dir, subpath = _splittopdir(f)
784 if dir:
784 if dir:
785 if dir not in self._dirs:
785 if dir not in self._dirs:
786 return default
786 return default
787 return self._dirs[dir].get(subpath, default)
787 return self._dirs[dir].get(subpath, default)
788 else:
788 else:
789 return self._files.get(f, default)
789 return self._files.get(f, default)
790
790
791 def __getitem__(self, f):
791 def __getitem__(self, f):
792 self._load()
792 self._load()
793 dir, subpath = _splittopdir(f)
793 dir, subpath = _splittopdir(f)
794 if dir:
794 if dir:
795 return self._dirs[dir].__getitem__(subpath)
795 return self._dirs[dir].__getitem__(subpath)
796 else:
796 else:
797 return self._files[f]
797 return self._files[f]
798
798
799 def flags(self, f):
799 def flags(self, f):
800 self._load()
800 self._load()
801 dir, subpath = _splittopdir(f)
801 dir, subpath = _splittopdir(f)
802 if dir:
802 if dir:
803 if dir not in self._dirs:
803 if dir not in self._dirs:
804 return ''
804 return ''
805 return self._dirs[dir].flags(subpath)
805 return self._dirs[dir].flags(subpath)
806 else:
806 else:
807 if f in self._dirs:
807 if f in self._dirs:
808 return ''
808 return ''
809 return self._flags.get(f, '')
809 return self._flags.get(f, '')
810
810
811 def find(self, f):
811 def find(self, f):
812 self._load()
812 self._load()
813 dir, subpath = _splittopdir(f)
813 dir, subpath = _splittopdir(f)
814 if dir:
814 if dir:
815 return self._dirs[dir].find(subpath)
815 return self._dirs[dir].find(subpath)
816 else:
816 else:
817 return self._files[f], self._flags.get(f, '')
817 return self._files[f], self._flags.get(f, '')
818
818
819 def __delitem__(self, f):
819 def __delitem__(self, f):
820 self._load()
820 self._load()
821 dir, subpath = _splittopdir(f)
821 dir, subpath = _splittopdir(f)
822 if dir:
822 if dir:
823 self._dirs[dir].__delitem__(subpath)
823 self._dirs[dir].__delitem__(subpath)
824 # If the directory is now empty, remove it
824 # If the directory is now empty, remove it
825 if self._dirs[dir]._isempty():
825 if self._dirs[dir]._isempty():
826 del self._dirs[dir]
826 del self._dirs[dir]
827 else:
827 else:
828 del self._files[f]
828 del self._files[f]
829 if f in self._flags:
829 if f in self._flags:
830 del self._flags[f]
830 del self._flags[f]
831 self._dirty = True
831 self._dirty = True
832
832
833 def __setitem__(self, f, n):
833 def __setitem__(self, f, n):
834 assert n is not None
834 assert n is not None
835 self._load()
835 self._load()
836 dir, subpath = _splittopdir(f)
836 dir, subpath = _splittopdir(f)
837 if dir:
837 if dir:
838 if dir not in self._dirs:
838 if dir not in self._dirs:
839 self._dirs[dir] = treemanifest(self._subpath(dir))
839 self._dirs[dir] = treemanifest(self._subpath(dir))
840 self._dirs[dir].__setitem__(subpath, n)
840 self._dirs[dir].__setitem__(subpath, n)
841 else:
841 else:
842 self._files[f] = n[:21] # to match manifestdict's behavior
842 self._files[f] = n[:21] # to match manifestdict's behavior
843 self._dirty = True
843 self._dirty = True
844
844
845 def _load(self):
845 def _load(self):
846 if self._loadfunc is not _noop:
846 if self._loadfunc is not _noop:
847 lf, self._loadfunc = self._loadfunc, _noop
847 lf, self._loadfunc = self._loadfunc, _noop
848 lf(self)
848 lf(self)
849 elif self._copyfunc is not _noop:
849 elif self._copyfunc is not _noop:
850 cf, self._copyfunc = self._copyfunc, _noop
850 cf, self._copyfunc = self._copyfunc, _noop
851 cf(self)
851 cf(self)
852
852
853 def setflag(self, f, flags):
853 def setflag(self, f, flags):
854 """Set the flags (symlink, executable) for path f."""
854 """Set the flags (symlink, executable) for path f."""
855 self._load()
855 self._load()
856 dir, subpath = _splittopdir(f)
856 dir, subpath = _splittopdir(f)
857 if dir:
857 if dir:
858 if dir not in self._dirs:
858 if dir not in self._dirs:
859 self._dirs[dir] = treemanifest(self._subpath(dir))
859 self._dirs[dir] = treemanifest(self._subpath(dir))
860 self._dirs[dir].setflag(subpath, flags)
860 self._dirs[dir].setflag(subpath, flags)
861 else:
861 else:
862 self._flags[f] = flags
862 self._flags[f] = flags
863 self._dirty = True
863 self._dirty = True
864
864
865 def copy(self):
865 def copy(self):
866 copy = treemanifest(self._dir)
866 copy = treemanifest(self._dir)
867 copy._node = self._node
867 copy._node = self._node
868 copy._dirty = self._dirty
868 copy._dirty = self._dirty
869 if self._copyfunc is _noop:
869 if self._copyfunc is _noop:
870 def _copyfunc(s):
870 def _copyfunc(s):
871 self._load()
871 self._load()
872 for d in self._dirs:
872 for d in self._dirs:
873 s._dirs[d] = self._dirs[d].copy()
873 s._dirs[d] = self._dirs[d].copy()
874 s._files = dict.copy(self._files)
874 s._files = dict.copy(self._files)
875 s._flags = dict.copy(self._flags)
875 s._flags = dict.copy(self._flags)
876 if self._loadfunc is _noop:
876 if self._loadfunc is _noop:
877 _copyfunc(copy)
877 _copyfunc(copy)
878 else:
878 else:
879 copy._copyfunc = _copyfunc
879 copy._copyfunc = _copyfunc
880 else:
880 else:
881 copy._copyfunc = self._copyfunc
881 copy._copyfunc = self._copyfunc
882 return copy
882 return copy
883
883
884 def filesnotin(self, m2, match=None):
884 def filesnotin(self, m2, match=None):
885 '''Set of files in this manifest that are not in the other'''
885 '''Set of files in this manifest that are not in the other'''
886 if match:
886 if match:
887 m1 = self.matches(match)
887 m1 = self.matches(match)
888 m2 = m2.matches(match)
888 m2 = m2.matches(match)
889 return m1.filesnotin(m2)
889 return m1.filesnotin(m2)
890
890
891 files = set()
891 files = set()
892 def _filesnotin(t1, t2):
892 def _filesnotin(t1, t2):
893 if t1._node == t2._node and not t1._dirty and not t2._dirty:
893 if t1._node == t2._node and not t1._dirty and not t2._dirty:
894 return
894 return
895 t1._load()
895 t1._load()
896 t2._load()
896 t2._load()
897 for d, m1 in t1._dirs.iteritems():
897 for d, m1 in t1._dirs.iteritems():
898 if d in t2._dirs:
898 if d in t2._dirs:
899 m2 = t2._dirs[d]
899 m2 = t2._dirs[d]
900 _filesnotin(m1, m2)
900 _filesnotin(m1, m2)
901 else:
901 else:
902 files.update(m1.iterkeys())
902 files.update(m1.iterkeys())
903
903
904 for fn in t1._files:
904 for fn in t1._files:
905 if fn not in t2._files:
905 if fn not in t2._files:
906 files.add(t1._subpath(fn))
906 files.add(t1._subpath(fn))
907
907
908 _filesnotin(self, m2)
908 _filesnotin(self, m2)
909 return files
909 return files
910
910
911 @propertycache
911 @propertycache
912 def _alldirs(self):
912 def _alldirs(self):
913 return util.dirs(self)
913 return util.dirs(self)
914
914
915 def dirs(self):
915 def dirs(self):
916 return self._alldirs
916 return self._alldirs
917
917
918 def hasdir(self, dir):
918 def hasdir(self, dir):
919 self._load()
919 self._load()
920 topdir, subdir = _splittopdir(dir)
920 topdir, subdir = _splittopdir(dir)
921 if topdir:
921 if topdir:
922 if topdir in self._dirs:
922 if topdir in self._dirs:
923 return self._dirs[topdir].hasdir(subdir)
923 return self._dirs[topdir].hasdir(subdir)
924 return False
924 return False
925 return (dir + '/') in self._dirs
925 return (dir + '/') in self._dirs
926
926
927 def walk(self, match):
927 def walk(self, match):
928 '''Generates matching file names.
928 '''Generates matching file names.
929
929
930 Equivalent to manifest.matches(match).iterkeys(), but without creating
930 Equivalent to manifest.matches(match).iterkeys(), but without creating
931 an entirely new manifest.
931 an entirely new manifest.
932
932
933 It also reports nonexistent files by marking them bad with match.bad().
933 It also reports nonexistent files by marking them bad with match.bad().
934 '''
934 '''
935 if match.always():
935 if match.always():
936 for f in iter(self):
936 for f in iter(self):
937 yield f
937 yield f
938 return
938 return
939
939
940 fset = set(match.files())
940 fset = set(match.files())
941
941
942 for fn in self._walk(match):
942 for fn in self._walk(match):
943 if fn in fset:
943 if fn in fset:
944 # specified pattern is the exact name
944 # specified pattern is the exact name
945 fset.remove(fn)
945 fset.remove(fn)
946 yield fn
946 yield fn
947
947
948 # for dirstate.walk, files=['.'] means "walk the whole tree".
948 # for dirstate.walk, files=['.'] means "walk the whole tree".
949 # follow that here, too
949 # follow that here, too
950 fset.discard('.')
950 fset.discard('.')
951
951
952 for fn in sorted(fset):
952 for fn in sorted(fset):
953 if not self.hasdir(fn):
953 if not self.hasdir(fn):
954 match.bad(fn, None)
954 match.bad(fn, None)
955
955
956 def _walk(self, match):
956 def _walk(self, match):
957 '''Recursively generates matching file names for walk().'''
957 '''Recursively generates matching file names for walk().'''
958 if not match.visitdir(self._dir[:-1] or '.'):
958 if not match.visitdir(self._dir[:-1] or '.'):
959 return
959 return
960
960
961 # yield this dir's files and walk its submanifests
961 # yield this dir's files and walk its submanifests
962 self._load()
962 self._load()
963 for p in sorted(list(self._dirs) + list(self._files)):
963 for p in sorted(list(self._dirs) + list(self._files)):
964 if p in self._files:
964 if p in self._files:
965 fullp = self._subpath(p)
965 fullp = self._subpath(p)
966 if match(fullp):
966 if match(fullp):
967 yield fullp
967 yield fullp
968 else:
968 else:
969 for f in self._dirs[p]._walk(match):
969 for f in self._dirs[p]._walk(match):
970 yield f
970 yield f
971
971
972 def matches(self, match):
972 def matches(self, match):
973 '''generate a new manifest filtered by the match argument'''
973 '''generate a new manifest filtered by the match argument'''
974 if match.always():
974 if match.always():
975 return self.copy()
975 return self.copy()
976
976
977 return self._matches(match)
977 return self._matches(match)
978
978
979 def _matches(self, match):
979 def _matches(self, match):
980 '''recursively generate a new manifest filtered by the match argument.
980 '''recursively generate a new manifest filtered by the match argument.
981 '''
981 '''
982
982
983 visit = match.visitdir(self._dir[:-1] or '.')
983 visit = match.visitdir(self._dir[:-1] or '.')
984 if visit == 'all':
984 if visit == 'all':
985 return self.copy()
985 return self.copy()
986 ret = treemanifest(self._dir)
986 ret = treemanifest(self._dir)
987 if not visit:
987 if not visit:
988 return ret
988 return ret
989
989
990 self._load()
990 self._load()
991 for fn in self._files:
991 for fn in self._files:
992 fullp = self._subpath(fn)
992 fullp = self._subpath(fn)
993 if not match(fullp):
993 if not match(fullp):
994 continue
994 continue
995 ret._files[fn] = self._files[fn]
995 ret._files[fn] = self._files[fn]
996 if fn in self._flags:
996 if fn in self._flags:
997 ret._flags[fn] = self._flags[fn]
997 ret._flags[fn] = self._flags[fn]
998
998
999 for dir, subm in self._dirs.iteritems():
999 for dir, subm in self._dirs.iteritems():
1000 m = subm._matches(match)
1000 m = subm._matches(match)
1001 if not m._isempty():
1001 if not m._isempty():
1002 ret._dirs[dir] = m
1002 ret._dirs[dir] = m
1003
1003
1004 if not ret._isempty():
1004 if not ret._isempty():
1005 ret._dirty = True
1005 ret._dirty = True
1006 return ret
1006 return ret
1007
1007
1008 def diff(self, m2, match=None, clean=False):
1008 def diff(self, m2, match=None, clean=False):
1009 '''Finds changes between the current manifest and m2.
1009 '''Finds changes between the current manifest and m2.
1010
1010
1011 Args:
1011 Args:
1012 m2: the manifest to which this manifest should be compared.
1012 m2: the manifest to which this manifest should be compared.
1013 clean: if true, include files unchanged between these manifests
1013 clean: if true, include files unchanged between these manifests
1014 with a None value in the returned dictionary.
1014 with a None value in the returned dictionary.
1015
1015
1016 The result is returned as a dict with filename as key and
1016 The result is returned as a dict with filename as key and
1017 values of the form ((n1,fl1),(n2,fl2)), where n1/n2 is the
1017 values of the form ((n1,fl1),(n2,fl2)), where n1/n2 is the
1018 nodeid in the current/other manifest and fl1/fl2 is the flag
1018 nodeid in the current/other manifest and fl1/fl2 is the flag
1019 in the current/other manifest. Where the file does not exist,
1019 in the current/other manifest. Where the file does not exist,
1020 the nodeid will be None and the flags will be the empty
1020 the nodeid will be None and the flags will be the empty
1021 string.
1021 string.
1022 '''
1022 '''
1023 if match:
1023 if match:
1024 m1 = self.matches(match)
1024 m1 = self.matches(match)
1025 m2 = m2.matches(match)
1025 m2 = m2.matches(match)
1026 return m1.diff(m2, clean=clean)
1026 return m1.diff(m2, clean=clean)
1027 result = {}
1027 result = {}
1028 emptytree = treemanifest()
1028 emptytree = treemanifest()
1029 def _diff(t1, t2):
1029 def _diff(t1, t2):
1030 if t1._node == t2._node and not t1._dirty and not t2._dirty:
1030 if t1._node == t2._node and not t1._dirty and not t2._dirty:
1031 return
1031 return
1032 t1._load()
1032 t1._load()
1033 t2._load()
1033 t2._load()
1034 for d, m1 in t1._dirs.iteritems():
1034 for d, m1 in t1._dirs.iteritems():
1035 m2 = t2._dirs.get(d, emptytree)
1035 m2 = t2._dirs.get(d, emptytree)
1036 _diff(m1, m2)
1036 _diff(m1, m2)
1037
1037
1038 for d, m2 in t2._dirs.iteritems():
1038 for d, m2 in t2._dirs.iteritems():
1039 if d not in t1._dirs:
1039 if d not in t1._dirs:
1040 _diff(emptytree, m2)
1040 _diff(emptytree, m2)
1041
1041
1042 for fn, n1 in t1._files.iteritems():
1042 for fn, n1 in t1._files.iteritems():
1043 fl1 = t1._flags.get(fn, '')
1043 fl1 = t1._flags.get(fn, '')
1044 n2 = t2._files.get(fn, None)
1044 n2 = t2._files.get(fn, None)
1045 fl2 = t2._flags.get(fn, '')
1045 fl2 = t2._flags.get(fn, '')
1046 if n1 != n2 or fl1 != fl2:
1046 if n1 != n2 or fl1 != fl2:
1047 result[t1._subpath(fn)] = ((n1, fl1), (n2, fl2))
1047 result[t1._subpath(fn)] = ((n1, fl1), (n2, fl2))
1048 elif clean:
1048 elif clean:
1049 result[t1._subpath(fn)] = None
1049 result[t1._subpath(fn)] = None
1050
1050
1051 for fn, n2 in t2._files.iteritems():
1051 for fn, n2 in t2._files.iteritems():
1052 if fn not in t1._files:
1052 if fn not in t1._files:
1053 fl2 = t2._flags.get(fn, '')
1053 fl2 = t2._flags.get(fn, '')
1054 result[t2._subpath(fn)] = ((None, ''), (n2, fl2))
1054 result[t2._subpath(fn)] = ((None, ''), (n2, fl2))
1055
1055
1056 _diff(self, m2)
1056 _diff(self, m2)
1057 return result
1057 return result
1058
1058
1059 def unmodifiedsince(self, m2):
1059 def unmodifiedsince(self, m2):
1060 return not self._dirty and not m2._dirty and self._node == m2._node
1060 return not self._dirty and not m2._dirty and self._node == m2._node
1061
1061
1062 def parse(self, text, readsubtree):
1062 def parse(self, text, readsubtree):
1063 for f, n, fl in _parse(text):
1063 for f, n, fl in _parse(text):
1064 if fl == 't':
1064 if fl == 't':
1065 f = f + '/'
1065 f = f + '/'
1066 self._dirs[f] = readsubtree(self._subpath(f), n)
1066 self._dirs[f] = readsubtree(self._subpath(f), n)
1067 elif '/' in f:
1067 elif '/' in f:
1068 # This is a flat manifest, so use __setitem__ and setflag rather
1068 # This is a flat manifest, so use __setitem__ and setflag rather
1069 # than assigning directly to _files and _flags, so we can
1069 # than assigning directly to _files and _flags, so we can
1070 # assign a path in a subdirectory, and to mark dirty (compared
1070 # assign a path in a subdirectory, and to mark dirty (compared
1071 # to nullid).
1071 # to nullid).
1072 self[f] = n
1072 self[f] = n
1073 if fl:
1073 if fl:
1074 self.setflag(f, fl)
1074 self.setflag(f, fl)
1075 else:
1075 else:
1076 # Assigning to _files and _flags avoids marking as dirty,
1076 # Assigning to _files and _flags avoids marking as dirty,
1077 # and should be a little faster.
1077 # and should be a little faster.
1078 self._files[f] = n
1078 self._files[f] = n
1079 if fl:
1079 if fl:
1080 self._flags[f] = fl
1080 self._flags[f] = fl
1081
1081
1082 def text(self):
1082 def text(self):
1083 """Get the full data of this manifest as a bytestring."""
1083 """Get the full data of this manifest as a bytestring."""
1084 self._load()
1084 self._load()
1085 return _text(self.iterentries())
1085 return _text(self.iterentries())
1086
1086
1087 def dirtext(self):
1087 def dirtext(self):
1088 """Get the full data of this directory as a bytestring. Make sure that
1088 """Get the full data of this directory as a bytestring. Make sure that
1089 any submanifests have been written first, so their nodeids are correct.
1089 any submanifests have been written first, so their nodeids are correct.
1090 """
1090 """
1091 self._load()
1091 self._load()
1092 flags = self.flags
1092 flags = self.flags
1093 dirs = [(d[:-1], self._dirs[d]._node, 't') for d in self._dirs]
1093 dirs = [(d[:-1], self._dirs[d]._node, 't') for d in self._dirs]
1094 files = [(f, self._files[f], flags(f)) for f in self._files]
1094 files = [(f, self._files[f], flags(f)) for f in self._files]
1095 return _text(sorted(dirs + files))
1095 return _text(sorted(dirs + files))
1096
1096
1097 def read(self, gettext, readsubtree):
1097 def read(self, gettext, readsubtree):
1098 def _load_for_read(s):
1098 def _load_for_read(s):
1099 s.parse(gettext(), readsubtree)
1099 s.parse(gettext(), readsubtree)
1100 s._dirty = False
1100 s._dirty = False
1101 self._loadfunc = _load_for_read
1101 self._loadfunc = _load_for_read
1102
1102
1103 def writesubtrees(self, m1, m2, writesubtree):
1103 def writesubtrees(self, m1, m2, writesubtree):
1104 self._load() # for consistency; should never have any effect here
1104 self._load() # for consistency; should never have any effect here
1105 m1._load()
1105 m1._load()
1106 m2._load()
1106 m2._load()
1107 emptytree = treemanifest()
1107 emptytree = treemanifest()
1108 for d, subm in self._dirs.iteritems():
1108 for d, subm in self._dirs.iteritems():
1109 subp1 = m1._dirs.get(d, emptytree)._node
1109 subp1 = m1._dirs.get(d, emptytree)._node
1110 subp2 = m2._dirs.get(d, emptytree)._node
1110 subp2 = m2._dirs.get(d, emptytree)._node
1111 if subp1 == revlog.nullid:
1111 if subp1 == revlog.nullid:
1112 subp1, subp2 = subp2, subp1
1112 subp1, subp2 = subp2, subp1
1113 writesubtree(subm, subp1, subp2)
1113 writesubtree(subm, subp1, subp2)
1114
1114
1115 def walksubtrees(self, matcher=None):
1115 def walksubtrees(self, matcher=None):
1116 """Returns an iterator of the subtrees of this manifest, including this
1116 """Returns an iterator of the subtrees of this manifest, including this
1117 manifest itself.
1117 manifest itself.
1118
1118
1119 If `matcher` is provided, it only returns subtrees that match.
1119 If `matcher` is provided, it only returns subtrees that match.
1120 """
1120 """
1121 if matcher and not matcher.visitdir(self._dir[:-1] or '.'):
1121 if matcher and not matcher.visitdir(self._dir[:-1] or '.'):
1122 return
1122 return
1123 if not matcher or matcher(self._dir[:-1]):
1123 if not matcher or matcher(self._dir[:-1]):
1124 yield self
1124 yield self
1125
1125
1126 self._load()
1126 self._load()
1127 for d, subm in self._dirs.iteritems():
1127 for d, subm in self._dirs.iteritems():
1128 for subtree in subm.walksubtrees(matcher=matcher):
1128 for subtree in subm.walksubtrees(matcher=matcher):
1129 yield subtree
1129 yield subtree
1130
1130
1131 class manifestrevlog(revlog.revlog):
1131 class manifestrevlog(revlog.revlog):
1132 '''A revlog that stores manifest texts. This is responsible for caching the
1132 '''A revlog that stores manifest texts. This is responsible for caching the
1133 full-text manifest contents.
1133 full-text manifest contents.
1134 '''
1134 '''
1135 def __init__(self, opener, dir='', dirlogcache=None, indexfile=None,
1135 def __init__(self, opener, dir='', dirlogcache=None, indexfile=None,
1136 treemanifest=False):
1136 treemanifest=False):
1137 """Constructs a new manifest revlog
1137 """Constructs a new manifest revlog
1138
1138
1139 `indexfile` - used by extensions to have two manifests at once, like
1139 `indexfile` - used by extensions to have two manifests at once, like
1140 when transitioning between flatmanifeset and treemanifests.
1140 when transitioning between flatmanifeset and treemanifests.
1141
1141
1142 `treemanifest` - used to indicate this is a tree manifest revlog. Opener
1142 `treemanifest` - used to indicate this is a tree manifest revlog. Opener
1143 options can also be used to make this a tree manifest revlog. The opener
1143 options can also be used to make this a tree manifest revlog. The opener
1144 option takes precedence, so if it is set to True, we ignore whatever
1144 option takes precedence, so if it is set to True, we ignore whatever
1145 value is passed in to the constructor.
1145 value is passed in to the constructor.
1146 """
1146 """
1147 # During normal operations, we expect to deal with not more than four
1147 # During normal operations, we expect to deal with not more than four
1148 # revs at a time (such as during commit --amend). When rebasing large
1148 # revs at a time (such as during commit --amend). When rebasing large
1149 # stacks of commits, the number can go up, hence the config knob below.
1149 # stacks of commits, the number can go up, hence the config knob below.
1150 cachesize = 4
1150 cachesize = 4
1151 optiontreemanifest = False
1151 optiontreemanifest = False
1152 opts = getattr(opener, 'options', None)
1152 opts = getattr(opener, 'options', None)
1153 if opts is not None:
1153 if opts is not None:
1154 cachesize = opts.get('manifestcachesize', cachesize)
1154 cachesize = opts.get('manifestcachesize', cachesize)
1155 optiontreemanifest = opts.get('treemanifest', False)
1155 optiontreemanifest = opts.get('treemanifest', False)
1156
1156
1157 self._treeondisk = optiontreemanifest or treemanifest
1157 self._treeondisk = optiontreemanifest or treemanifest
1158
1158
1159 self._fulltextcache = util.lrucachedict(cachesize)
1159 self._fulltextcache = util.lrucachedict(cachesize)
1160
1160
1161 if dir:
1161 if dir:
1162 assert self._treeondisk, 'opts is %r' % opts
1162 assert self._treeondisk, 'opts is %r' % opts
1163 if not dir.endswith('/'):
1164 dir = dir + '/'
1165
1163
1166 if indexfile is None:
1164 if indexfile is None:
1167 indexfile = '00manifest.i'
1165 indexfile = '00manifest.i'
1168 if dir:
1166 if dir:
1169 indexfile = "meta/" + dir + indexfile
1167 indexfile = "meta/" + dir + indexfile
1170
1168
1171 self._dir = dir
1169 self._dir = dir
1172 # The dirlogcache is kept on the root manifest log
1170 # The dirlogcache is kept on the root manifest log
1173 if dir:
1171 if dir:
1174 self._dirlogcache = dirlogcache
1172 self._dirlogcache = dirlogcache
1175 else:
1173 else:
1176 self._dirlogcache = {'': self}
1174 self._dirlogcache = {'': self}
1177
1175
1178 super(manifestrevlog, self).__init__(opener, indexfile,
1176 super(manifestrevlog, self).__init__(opener, indexfile,
1179 # only root indexfile is cached
1177 # only root indexfile is cached
1180 checkambig=not bool(dir),
1178 checkambig=not bool(dir),
1181 mmaplargeindex=True)
1179 mmaplargeindex=True)
1182
1180
1183 @property
1181 @property
1184 def fulltextcache(self):
1182 def fulltextcache(self):
1185 return self._fulltextcache
1183 return self._fulltextcache
1186
1184
1187 def clearcaches(self):
1185 def clearcaches(self):
1188 super(manifestrevlog, self).clearcaches()
1186 super(manifestrevlog, self).clearcaches()
1189 self._fulltextcache.clear()
1187 self._fulltextcache.clear()
1190 self._dirlogcache = {'': self}
1188 self._dirlogcache = {'': self}
1191
1189
1192 def dirlog(self, d):
1190 def dirlog(self, d):
1193 if d:
1191 if d:
1194 assert self._treeondisk
1192 assert self._treeondisk
1195 if d not in self._dirlogcache:
1193 if d not in self._dirlogcache:
1196 mfrevlog = manifestrevlog(self.opener, d,
1194 mfrevlog = manifestrevlog(self.opener, d,
1197 self._dirlogcache,
1195 self._dirlogcache,
1198 treemanifest=self._treeondisk)
1196 treemanifest=self._treeondisk)
1199 self._dirlogcache[d] = mfrevlog
1197 self._dirlogcache[d] = mfrevlog
1200 return self._dirlogcache[d]
1198 return self._dirlogcache[d]
1201
1199
1202 def add(self, m, transaction, link, p1, p2, added, removed, readtree=None):
1200 def add(self, m, transaction, link, p1, p2, added, removed, readtree=None):
1203 if p1 in self.fulltextcache and util.safehasattr(m, 'fastdelta'):
1201 if p1 in self.fulltextcache and util.safehasattr(m, 'fastdelta'):
1204 # If our first parent is in the manifest cache, we can
1202 # If our first parent is in the manifest cache, we can
1205 # compute a delta here using properties we know about the
1203 # compute a delta here using properties we know about the
1206 # manifest up-front, which may save time later for the
1204 # manifest up-front, which may save time later for the
1207 # revlog layer.
1205 # revlog layer.
1208
1206
1209 _checkforbidden(added)
1207 _checkforbidden(added)
1210 # combine the changed lists into one sorted iterator
1208 # combine the changed lists into one sorted iterator
1211 work = heapq.merge([(x, False) for x in added],
1209 work = heapq.merge([(x, False) for x in added],
1212 [(x, True) for x in removed])
1210 [(x, True) for x in removed])
1213
1211
1214 arraytext, deltatext = m.fastdelta(self.fulltextcache[p1], work)
1212 arraytext, deltatext = m.fastdelta(self.fulltextcache[p1], work)
1215 cachedelta = self.rev(p1), deltatext
1213 cachedelta = self.rev(p1), deltatext
1216 text = util.buffer(arraytext)
1214 text = util.buffer(arraytext)
1217 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
1215 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
1218 else:
1216 else:
1219 # The first parent manifest isn't already loaded, so we'll
1217 # The first parent manifest isn't already loaded, so we'll
1220 # just encode a fulltext of the manifest and pass that
1218 # just encode a fulltext of the manifest and pass that
1221 # through to the revlog layer, and let it handle the delta
1219 # through to the revlog layer, and let it handle the delta
1222 # process.
1220 # process.
1223 if self._treeondisk:
1221 if self._treeondisk:
1224 assert readtree, "readtree must be set for treemanifest writes"
1222 assert readtree, "readtree must be set for treemanifest writes"
1225 m1 = readtree(self._dir, p1)
1223 m1 = readtree(self._dir, p1)
1226 m2 = readtree(self._dir, p2)
1224 m2 = readtree(self._dir, p2)
1227 n = self._addtree(m, transaction, link, m1, m2, readtree)
1225 n = self._addtree(m, transaction, link, m1, m2, readtree)
1228 arraytext = None
1226 arraytext = None
1229 else:
1227 else:
1230 text = m.text()
1228 text = m.text()
1231 n = self.addrevision(text, transaction, link, p1, p2)
1229 n = self.addrevision(text, transaction, link, p1, p2)
1232 arraytext = bytearray(text)
1230 arraytext = bytearray(text)
1233
1231
1234 if arraytext is not None:
1232 if arraytext is not None:
1235 self.fulltextcache[n] = arraytext
1233 self.fulltextcache[n] = arraytext
1236
1234
1237 return n
1235 return n
1238
1236
1239 def _addtree(self, m, transaction, link, m1, m2, readtree):
1237 def _addtree(self, m, transaction, link, m1, m2, readtree):
1240 # If the manifest is unchanged compared to one parent,
1238 # If the manifest is unchanged compared to one parent,
1241 # don't write a new revision
1239 # don't write a new revision
1242 if self._dir != '' and (m.unmodifiedsince(m1) or m.unmodifiedsince(m2)):
1240 if self._dir != '' and (m.unmodifiedsince(m1) or m.unmodifiedsince(m2)):
1243 return m.node()
1241 return m.node()
1244 def writesubtree(subm, subp1, subp2):
1242 def writesubtree(subm, subp1, subp2):
1245 sublog = self.dirlog(subm.dir())
1243 sublog = self.dirlog(subm.dir())
1246 sublog.add(subm, transaction, link, subp1, subp2, None, None,
1244 sublog.add(subm, transaction, link, subp1, subp2, None, None,
1247 readtree=readtree)
1245 readtree=readtree)
1248 m.writesubtrees(m1, m2, writesubtree)
1246 m.writesubtrees(m1, m2, writesubtree)
1249 text = m.dirtext()
1247 text = m.dirtext()
1250 n = None
1248 n = None
1251 if self._dir != '':
1249 if self._dir != '':
1252 # Double-check whether contents are unchanged to one parent
1250 # Double-check whether contents are unchanged to one parent
1253 if text == m1.dirtext():
1251 if text == m1.dirtext():
1254 n = m1.node()
1252 n = m1.node()
1255 elif text == m2.dirtext():
1253 elif text == m2.dirtext():
1256 n = m2.node()
1254 n = m2.node()
1257
1255
1258 if not n:
1256 if not n:
1259 n = self.addrevision(text, transaction, link, m1.node(), m2.node())
1257 n = self.addrevision(text, transaction, link, m1.node(), m2.node())
1260
1258
1261 # Save nodeid so parent manifest can calculate its nodeid
1259 # Save nodeid so parent manifest can calculate its nodeid
1262 m.setnode(n)
1260 m.setnode(n)
1263 return n
1261 return n
1264
1262
1265 class manifestlog(object):
1263 class manifestlog(object):
1266 """A collection class representing the collection of manifest snapshots
1264 """A collection class representing the collection of manifest snapshots
1267 referenced by commits in the repository.
1265 referenced by commits in the repository.
1268
1266
1269 In this situation, 'manifest' refers to the abstract concept of a snapshot
1267 In this situation, 'manifest' refers to the abstract concept of a snapshot
1270 of the list of files in the given commit. Consumers of the output of this
1268 of the list of files in the given commit. Consumers of the output of this
1271 class do not care about the implementation details of the actual manifests
1269 class do not care about the implementation details of the actual manifests
1272 they receive (i.e. tree or flat or lazily loaded, etc)."""
1270 they receive (i.e. tree or flat or lazily loaded, etc)."""
1273 def __init__(self, opener, repo):
1271 def __init__(self, opener, repo):
1274 usetreemanifest = False
1272 usetreemanifest = False
1275 cachesize = 4
1273 cachesize = 4
1276
1274
1277 opts = getattr(opener, 'options', None)
1275 opts = getattr(opener, 'options', None)
1278 if opts is not None:
1276 if opts is not None:
1279 usetreemanifest = opts.get('treemanifest', usetreemanifest)
1277 usetreemanifest = opts.get('treemanifest', usetreemanifest)
1280 cachesize = opts.get('manifestcachesize', cachesize)
1278 cachesize = opts.get('manifestcachesize', cachesize)
1281 self._treeinmem = usetreemanifest
1279 self._treeinmem = usetreemanifest
1282
1280
1283 self._revlog = repo._constructmanifest()
1281 self._revlog = repo._constructmanifest()
1284
1282
1285 # A cache of the manifestctx or treemanifestctx for each directory
1283 # A cache of the manifestctx or treemanifestctx for each directory
1286 self._dirmancache = {}
1284 self._dirmancache = {}
1287 self._dirmancache[''] = util.lrucachedict(cachesize)
1285 self._dirmancache[''] = util.lrucachedict(cachesize)
1288
1286
1289 self.cachesize = cachesize
1287 self.cachesize = cachesize
1290
1288
1291 def __getitem__(self, node):
1289 def __getitem__(self, node):
1292 """Retrieves the manifest instance for the given node. Throws a
1290 """Retrieves the manifest instance for the given node. Throws a
1293 LookupError if not found.
1291 LookupError if not found.
1294 """
1292 """
1295 return self.get('', node)
1293 return self.get('', node)
1296
1294
1297 def get(self, dir, node, verify=True):
1295 def get(self, dir, node, verify=True):
1298 """Retrieves the manifest instance for the given node. Throws a
1296 """Retrieves the manifest instance for the given node. Throws a
1299 LookupError if not found.
1297 LookupError if not found.
1300
1298
1301 `verify` - if True an exception will be thrown if the node is not in
1299 `verify` - if True an exception will be thrown if the node is not in
1302 the revlog
1300 the revlog
1303 """
1301 """
1304 if node in self._dirmancache.get(dir, ()):
1302 if node in self._dirmancache.get(dir, ()):
1305 return self._dirmancache[dir][node]
1303 return self._dirmancache[dir][node]
1306
1304
1307 if dir:
1305 if dir:
1308 if self._revlog._treeondisk:
1306 if self._revlog._treeondisk:
1309 if verify:
1307 if verify:
1310 dirlog = self._revlog.dirlog(dir)
1308 dirlog = self._revlog.dirlog(dir)
1311 if node not in dirlog.nodemap:
1309 if node not in dirlog.nodemap:
1312 raise LookupError(node, dirlog.indexfile,
1310 raise LookupError(node, dirlog.indexfile,
1313 _('no node'))
1311 _('no node'))
1314 m = treemanifestctx(self, dir, node)
1312 m = treemanifestctx(self, dir, node)
1315 else:
1313 else:
1316 raise error.Abort(
1314 raise error.Abort(
1317 _("cannot ask for manifest directory '%s' in a flat "
1315 _("cannot ask for manifest directory '%s' in a flat "
1318 "manifest") % dir)
1316 "manifest") % dir)
1319 else:
1317 else:
1320 if verify:
1318 if verify:
1321 if node not in self._revlog.nodemap:
1319 if node not in self._revlog.nodemap:
1322 raise LookupError(node, self._revlog.indexfile,
1320 raise LookupError(node, self._revlog.indexfile,
1323 _('no node'))
1321 _('no node'))
1324 if self._treeinmem:
1322 if self._treeinmem:
1325 m = treemanifestctx(self, '', node)
1323 m = treemanifestctx(self, '', node)
1326 else:
1324 else:
1327 m = manifestctx(self, node)
1325 m = manifestctx(self, node)
1328
1326
1329 if node != revlog.nullid:
1327 if node != revlog.nullid:
1330 mancache = self._dirmancache.get(dir)
1328 mancache = self._dirmancache.get(dir)
1331 if not mancache:
1329 if not mancache:
1332 mancache = util.lrucachedict(self.cachesize)
1330 mancache = util.lrucachedict(self.cachesize)
1333 self._dirmancache[dir] = mancache
1331 self._dirmancache[dir] = mancache
1334 mancache[node] = m
1332 mancache[node] = m
1335 return m
1333 return m
1336
1334
1337 def clearcaches(self):
1335 def clearcaches(self):
1338 self._dirmancache.clear()
1336 self._dirmancache.clear()
1339 self._revlog.clearcaches()
1337 self._revlog.clearcaches()
1340
1338
1341 class memmanifestctx(object):
1339 class memmanifestctx(object):
1342 def __init__(self, manifestlog):
1340 def __init__(self, manifestlog):
1343 self._manifestlog = manifestlog
1341 self._manifestlog = manifestlog
1344 self._manifestdict = manifestdict()
1342 self._manifestdict = manifestdict()
1345
1343
1346 def _revlog(self):
1344 def _revlog(self):
1347 return self._manifestlog._revlog
1345 return self._manifestlog._revlog
1348
1346
1349 def new(self):
1347 def new(self):
1350 return memmanifestctx(self._manifestlog)
1348 return memmanifestctx(self._manifestlog)
1351
1349
1352 def copy(self):
1350 def copy(self):
1353 memmf = memmanifestctx(self._manifestlog)
1351 memmf = memmanifestctx(self._manifestlog)
1354 memmf._manifestdict = self.read().copy()
1352 memmf._manifestdict = self.read().copy()
1355 return memmf
1353 return memmf
1356
1354
1357 def read(self):
1355 def read(self):
1358 return self._manifestdict
1356 return self._manifestdict
1359
1357
1360 def write(self, transaction, link, p1, p2, added, removed):
1358 def write(self, transaction, link, p1, p2, added, removed):
1361 return self._revlog().add(self._manifestdict, transaction, link, p1, p2,
1359 return self._revlog().add(self._manifestdict, transaction, link, p1, p2,
1362 added, removed)
1360 added, removed)
1363
1361
1364 class manifestctx(object):
1362 class manifestctx(object):
1365 """A class representing a single revision of a manifest, including its
1363 """A class representing a single revision of a manifest, including its
1366 contents, its parent revs, and its linkrev.
1364 contents, its parent revs, and its linkrev.
1367 """
1365 """
1368 def __init__(self, manifestlog, node):
1366 def __init__(self, manifestlog, node):
1369 self._manifestlog = manifestlog
1367 self._manifestlog = manifestlog
1370 self._data = None
1368 self._data = None
1371
1369
1372 self._node = node
1370 self._node = node
1373
1371
1374 # TODO: We eventually want p1, p2, and linkrev exposed on this class,
1372 # TODO: We eventually want p1, p2, and linkrev exposed on this class,
1375 # but let's add it later when something needs it and we can load it
1373 # but let's add it later when something needs it and we can load it
1376 # lazily.
1374 # lazily.
1377 #self.p1, self.p2 = revlog.parents(node)
1375 #self.p1, self.p2 = revlog.parents(node)
1378 #rev = revlog.rev(node)
1376 #rev = revlog.rev(node)
1379 #self.linkrev = revlog.linkrev(rev)
1377 #self.linkrev = revlog.linkrev(rev)
1380
1378
1381 def _revlog(self):
1379 def _revlog(self):
1382 return self._manifestlog._revlog
1380 return self._manifestlog._revlog
1383
1381
1384 def node(self):
1382 def node(self):
1385 return self._node
1383 return self._node
1386
1384
1387 def new(self):
1385 def new(self):
1388 return memmanifestctx(self._manifestlog)
1386 return memmanifestctx(self._manifestlog)
1389
1387
1390 def copy(self):
1388 def copy(self):
1391 memmf = memmanifestctx(self._manifestlog)
1389 memmf = memmanifestctx(self._manifestlog)
1392 memmf._manifestdict = self.read().copy()
1390 memmf._manifestdict = self.read().copy()
1393 return memmf
1391 return memmf
1394
1392
1395 @propertycache
1393 @propertycache
1396 def parents(self):
1394 def parents(self):
1397 return self._revlog().parents(self._node)
1395 return self._revlog().parents(self._node)
1398
1396
1399 def read(self):
1397 def read(self):
1400 if self._data is None:
1398 if self._data is None:
1401 if self._node == revlog.nullid:
1399 if self._node == revlog.nullid:
1402 self._data = manifestdict()
1400 self._data = manifestdict()
1403 else:
1401 else:
1404 rl = self._revlog()
1402 rl = self._revlog()
1405 text = rl.revision(self._node)
1403 text = rl.revision(self._node)
1406 arraytext = bytearray(text)
1404 arraytext = bytearray(text)
1407 rl._fulltextcache[self._node] = arraytext
1405 rl._fulltextcache[self._node] = arraytext
1408 self._data = manifestdict(text)
1406 self._data = manifestdict(text)
1409 return self._data
1407 return self._data
1410
1408
1411 def readfast(self, shallow=False):
1409 def readfast(self, shallow=False):
1412 '''Calls either readdelta or read, based on which would be less work.
1410 '''Calls either readdelta or read, based on which would be less work.
1413 readdelta is called if the delta is against the p1, and therefore can be
1411 readdelta is called if the delta is against the p1, and therefore can be
1414 read quickly.
1412 read quickly.
1415
1413
1416 If `shallow` is True, nothing changes since this is a flat manifest.
1414 If `shallow` is True, nothing changes since this is a flat manifest.
1417 '''
1415 '''
1418 rl = self._revlog()
1416 rl = self._revlog()
1419 r = rl.rev(self._node)
1417 r = rl.rev(self._node)
1420 deltaparent = rl.deltaparent(r)
1418 deltaparent = rl.deltaparent(r)
1421 if deltaparent != revlog.nullrev and deltaparent in rl.parentrevs(r):
1419 if deltaparent != revlog.nullrev and deltaparent in rl.parentrevs(r):
1422 return self.readdelta()
1420 return self.readdelta()
1423 return self.read()
1421 return self.read()
1424
1422
1425 def readdelta(self, shallow=False):
1423 def readdelta(self, shallow=False):
1426 '''Returns a manifest containing just the entries that are present
1424 '''Returns a manifest containing just the entries that are present
1427 in this manifest, but not in its p1 manifest. This is efficient to read
1425 in this manifest, but not in its p1 manifest. This is efficient to read
1428 if the revlog delta is already p1.
1426 if the revlog delta is already p1.
1429
1427
1430 Changing the value of `shallow` has no effect on flat manifests.
1428 Changing the value of `shallow` has no effect on flat manifests.
1431 '''
1429 '''
1432 revlog = self._revlog()
1430 revlog = self._revlog()
1433 r = revlog.rev(self._node)
1431 r = revlog.rev(self._node)
1434 d = mdiff.patchtext(revlog.revdiff(revlog.deltaparent(r), r))
1432 d = mdiff.patchtext(revlog.revdiff(revlog.deltaparent(r), r))
1435 return manifestdict(d)
1433 return manifestdict(d)
1436
1434
1437 def find(self, key):
1435 def find(self, key):
1438 return self.read().find(key)
1436 return self.read().find(key)
1439
1437
1440 class memtreemanifestctx(object):
1438 class memtreemanifestctx(object):
1441 def __init__(self, manifestlog, dir=''):
1439 def __init__(self, manifestlog, dir=''):
1442 self._manifestlog = manifestlog
1440 self._manifestlog = manifestlog
1443 self._dir = dir
1441 self._dir = dir
1444 self._treemanifest = treemanifest()
1442 self._treemanifest = treemanifest()
1445
1443
1446 def _revlog(self):
1444 def _revlog(self):
1447 return self._manifestlog._revlog
1445 return self._manifestlog._revlog
1448
1446
1449 def new(self, dir=''):
1447 def new(self, dir=''):
1450 return memtreemanifestctx(self._manifestlog, dir=dir)
1448 return memtreemanifestctx(self._manifestlog, dir=dir)
1451
1449
1452 def copy(self):
1450 def copy(self):
1453 memmf = memtreemanifestctx(self._manifestlog, dir=self._dir)
1451 memmf = memtreemanifestctx(self._manifestlog, dir=self._dir)
1454 memmf._treemanifest = self._treemanifest.copy()
1452 memmf._treemanifest = self._treemanifest.copy()
1455 return memmf
1453 return memmf
1456
1454
1457 def read(self):
1455 def read(self):
1458 return self._treemanifest
1456 return self._treemanifest
1459
1457
1460 def write(self, transaction, link, p1, p2, added, removed):
1458 def write(self, transaction, link, p1, p2, added, removed):
1461 def readtree(dir, node):
1459 def readtree(dir, node):
1462 return self._manifestlog.get(dir, node).read()
1460 return self._manifestlog.get(dir, node).read()
1463 return self._revlog().add(self._treemanifest, transaction, link, p1, p2,
1461 return self._revlog().add(self._treemanifest, transaction, link, p1, p2,
1464 added, removed, readtree=readtree)
1462 added, removed, readtree=readtree)
1465
1463
1466 class treemanifestctx(object):
1464 class treemanifestctx(object):
1467 def __init__(self, manifestlog, dir, node):
1465 def __init__(self, manifestlog, dir, node):
1468 self._manifestlog = manifestlog
1466 self._manifestlog = manifestlog
1469 self._dir = dir
1467 self._dir = dir
1470 self._data = None
1468 self._data = None
1471
1469
1472 self._node = node
1470 self._node = node
1473
1471
1474 # TODO: Load p1/p2/linkrev lazily. They need to be lazily loaded so that
1472 # TODO: Load p1/p2/linkrev lazily. They need to be lazily loaded so that
1475 # we can instantiate treemanifestctx objects for directories we don't
1473 # we can instantiate treemanifestctx objects for directories we don't
1476 # have on disk.
1474 # have on disk.
1477 #self.p1, self.p2 = revlog.parents(node)
1475 #self.p1, self.p2 = revlog.parents(node)
1478 #rev = revlog.rev(node)
1476 #rev = revlog.rev(node)
1479 #self.linkrev = revlog.linkrev(rev)
1477 #self.linkrev = revlog.linkrev(rev)
1480
1478
1481 def _revlog(self):
1479 def _revlog(self):
1482 return self._manifestlog._revlog.dirlog(self._dir)
1480 return self._manifestlog._revlog.dirlog(self._dir)
1483
1481
1484 def read(self):
1482 def read(self):
1485 if self._data is None:
1483 if self._data is None:
1486 rl = self._revlog()
1484 rl = self._revlog()
1487 if self._node == revlog.nullid:
1485 if self._node == revlog.nullid:
1488 self._data = treemanifest()
1486 self._data = treemanifest()
1489 elif rl._treeondisk:
1487 elif rl._treeondisk:
1490 m = treemanifest(dir=self._dir)
1488 m = treemanifest(dir=self._dir)
1491 def gettext():
1489 def gettext():
1492 return rl.revision(self._node)
1490 return rl.revision(self._node)
1493 def readsubtree(dir, subm):
1491 def readsubtree(dir, subm):
1494 # Set verify to False since we need to be able to create
1492 # Set verify to False since we need to be able to create
1495 # subtrees for trees that don't exist on disk.
1493 # subtrees for trees that don't exist on disk.
1496 return self._manifestlog.get(dir, subm, verify=False).read()
1494 return self._manifestlog.get(dir, subm, verify=False).read()
1497 m.read(gettext, readsubtree)
1495 m.read(gettext, readsubtree)
1498 m.setnode(self._node)
1496 m.setnode(self._node)
1499 self._data = m
1497 self._data = m
1500 else:
1498 else:
1501 text = rl.revision(self._node)
1499 text = rl.revision(self._node)
1502 arraytext = bytearray(text)
1500 arraytext = bytearray(text)
1503 rl.fulltextcache[self._node] = arraytext
1501 rl.fulltextcache[self._node] = arraytext
1504 self._data = treemanifest(dir=self._dir, text=text)
1502 self._data = treemanifest(dir=self._dir, text=text)
1505
1503
1506 return self._data
1504 return self._data
1507
1505
1508 def node(self):
1506 def node(self):
1509 return self._node
1507 return self._node
1510
1508
1511 def new(self, dir=''):
1509 def new(self, dir=''):
1512 return memtreemanifestctx(self._manifestlog, dir=dir)
1510 return memtreemanifestctx(self._manifestlog, dir=dir)
1513
1511
1514 def copy(self):
1512 def copy(self):
1515 memmf = memtreemanifestctx(self._manifestlog, dir=self._dir)
1513 memmf = memtreemanifestctx(self._manifestlog, dir=self._dir)
1516 memmf._treemanifest = self.read().copy()
1514 memmf._treemanifest = self.read().copy()
1517 return memmf
1515 return memmf
1518
1516
1519 @propertycache
1517 @propertycache
1520 def parents(self):
1518 def parents(self):
1521 return self._revlog().parents(self._node)
1519 return self._revlog().parents(self._node)
1522
1520
1523 def readdelta(self, shallow=False):
1521 def readdelta(self, shallow=False):
1524 '''Returns a manifest containing just the entries that are present
1522 '''Returns a manifest containing just the entries that are present
1525 in this manifest, but not in its p1 manifest. This is efficient to read
1523 in this manifest, but not in its p1 manifest. This is efficient to read
1526 if the revlog delta is already p1.
1524 if the revlog delta is already p1.
1527
1525
1528 If `shallow` is True, this will read the delta for this directory,
1526 If `shallow` is True, this will read the delta for this directory,
1529 without recursively reading subdirectory manifests. Instead, any
1527 without recursively reading subdirectory manifests. Instead, any
1530 subdirectory entry will be reported as it appears in the manifest, i.e.
1528 subdirectory entry will be reported as it appears in the manifest, i.e.
1531 the subdirectory will be reported among files and distinguished only by
1529 the subdirectory will be reported among files and distinguished only by
1532 its 't' flag.
1530 its 't' flag.
1533 '''
1531 '''
1534 revlog = self._revlog()
1532 revlog = self._revlog()
1535 if shallow:
1533 if shallow:
1536 r = revlog.rev(self._node)
1534 r = revlog.rev(self._node)
1537 d = mdiff.patchtext(revlog.revdiff(revlog.deltaparent(r), r))
1535 d = mdiff.patchtext(revlog.revdiff(revlog.deltaparent(r), r))
1538 return manifestdict(d)
1536 return manifestdict(d)
1539 else:
1537 else:
1540 # Need to perform a slow delta
1538 # Need to perform a slow delta
1541 r0 = revlog.deltaparent(revlog.rev(self._node))
1539 r0 = revlog.deltaparent(revlog.rev(self._node))
1542 m0 = self._manifestlog.get(self._dir, revlog.node(r0)).read()
1540 m0 = self._manifestlog.get(self._dir, revlog.node(r0)).read()
1543 m1 = self.read()
1541 m1 = self.read()
1544 md = treemanifest(dir=self._dir)
1542 md = treemanifest(dir=self._dir)
1545 for f, ((n0, fl0), (n1, fl1)) in m0.diff(m1).iteritems():
1543 for f, ((n0, fl0), (n1, fl1)) in m0.diff(m1).iteritems():
1546 if n1:
1544 if n1:
1547 md[f] = n1
1545 md[f] = n1
1548 if fl1:
1546 if fl1:
1549 md.setflag(f, fl1)
1547 md.setflag(f, fl1)
1550 return md
1548 return md
1551
1549
1552 def readfast(self, shallow=False):
1550 def readfast(self, shallow=False):
1553 '''Calls either readdelta or read, based on which would be less work.
1551 '''Calls either readdelta or read, based on which would be less work.
1554 readdelta is called if the delta is against the p1, and therefore can be
1552 readdelta is called if the delta is against the p1, and therefore can be
1555 read quickly.
1553 read quickly.
1556
1554
1557 If `shallow` is True, it only returns the entries from this manifest,
1555 If `shallow` is True, it only returns the entries from this manifest,
1558 and not any submanifests.
1556 and not any submanifests.
1559 '''
1557 '''
1560 rl = self._revlog()
1558 rl = self._revlog()
1561 r = rl.rev(self._node)
1559 r = rl.rev(self._node)
1562 deltaparent = rl.deltaparent(r)
1560 deltaparent = rl.deltaparent(r)
1563 if (deltaparent != revlog.nullrev and
1561 if (deltaparent != revlog.nullrev and
1564 deltaparent in rl.parentrevs(r)):
1562 deltaparent in rl.parentrevs(r)):
1565 return self.readdelta(shallow=shallow)
1563 return self.readdelta(shallow=shallow)
1566
1564
1567 if shallow:
1565 if shallow:
1568 return manifestdict(rl.revision(self._node))
1566 return manifestdict(rl.revision(self._node))
1569 else:
1567 else:
1570 return self.read()
1568 return self.read()
1571
1569
1572 def find(self, key):
1570 def find(self, key):
1573 return self.read().find(key)
1571 return self.read().find(key)
General Comments 0
You need to be logged in to leave comments. Login now